RISC-V: Fix more splitters accidentally calling gen_reg_rtx.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobc29e2de61acdf69dbf6af4594d9b8c8569044a52
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "builtins.h"
74 #include "tree-ssa-sccvn.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
236 /* Return the hashcode for a given reference operation P1. */
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
241 return vr1->hashcode;
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
247 return v == c || vn_reference_eq (v, c);
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
254 /* The set of VN hashtables. */
256 typedef struct vn_tables_s
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
264 /* vn_constant hashtable helpers. */
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
272 /* Hash table hash function for vn_constant_t. */
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
277 return vc1->hashcode;
280 /* Hash table equality function for vn_constant_t. */
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
317 tree VN_TOP;
319 /* Unique counter for our value ids. */
321 static unsigned int next_value_id;
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
343 return SSA_NAME_VERSION (entry->name);
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
349 return name == entry->name;
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
366 (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
376 vn_ssa_aux_t
377 VN_INFO (tree name)
379 vn_ssa_aux_t *res
380 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
381 INSERT);
382 if (*res != NULL)
383 return *res;
385 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
386 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
387 newinfo->name = name;
388 newinfo->valnum = VN_TOP;
389 /* We are using the visited flag to handle uses with defs not within the
390 region being value-numbered. */
391 newinfo->visited = false;
393 /* Given we create the VN_INFOs on-demand now we have to do initialization
394 different than VN_TOP here. */
395 if (SSA_NAME_IS_DEFAULT_DEF (name))
396 switch (TREE_CODE (SSA_NAME_VAR (name)))
398 case VAR_DECL:
399 /* All undefined vars are VARYING. */
400 newinfo->valnum = name;
401 newinfo->visited = true;
402 break;
404 case PARM_DECL:
405 /* Parameters are VARYING but we can record a condition
406 if we know it is a non-NULL pointer. */
407 newinfo->visited = true;
408 newinfo->valnum = name;
409 if (POINTER_TYPE_P (TREE_TYPE (name))
410 && nonnull_arg_p (SSA_NAME_VAR (name)))
412 tree ops[2];
413 ops[0] = name;
414 ops[1] = build_int_cst (TREE_TYPE (name), 0);
415 vn_nary_op_t nary;
416 /* Allocate from non-unwinding stack. */
417 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
418 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
419 boolean_type_node, ops);
420 nary->predicated_values = 0;
421 nary->u.result = boolean_true_node;
422 vn_nary_op_insert_into (nary, valid_info->nary, true);
423 gcc_assert (nary->unwind_to == NULL);
424 /* Also do not link it into the undo chain. */
425 last_inserted_nary = nary->next;
426 nary->next = (vn_nary_op_t)(void *)-1;
427 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
428 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
429 boolean_type_node, ops);
430 nary->predicated_values = 0;
431 nary->u.result = boolean_false_node;
432 vn_nary_op_insert_into (nary, valid_info->nary, true);
433 gcc_assert (nary->unwind_to == NULL);
434 last_inserted_nary = nary->next;
435 nary->next = (vn_nary_op_t)(void *)-1;
436 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, "Recording ");
439 print_generic_expr (dump_file, name, TDF_SLIM);
440 fprintf (dump_file, " != 0\n");
443 break;
445 case RESULT_DECL:
446 /* If the result is passed by invisible reference the default
447 def is initialized, otherwise it's uninitialized. Still
448 undefined is varying. */
449 newinfo->visited = true;
450 newinfo->valnum = name;
451 break;
453 default:
454 gcc_unreachable ();
456 return newinfo;
459 /* Return the SSA value of X. */
461 inline tree
462 SSA_VAL (tree x, bool *visited = NULL)
464 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
465 if (visited)
466 *visited = tem && tem->visited;
467 return tem && tem->visited ? tem->valnum : x;
470 /* Return the SSA value of the VUSE x, supporting released VDEFs
471 during elimination which will value-number the VDEF to the
472 associated VUSE (but not substitute in the whole lattice). */
474 static inline tree
475 vuse_ssa_val (tree x)
477 if (!x)
478 return NULL_TREE;
482 x = SSA_VAL (x);
483 gcc_assert (x != VN_TOP);
485 while (SSA_NAME_IN_FREE_LIST (x));
487 return x;
490 /* Similar to the above but used as callback for walk_non_aliases_vuses
491 and thus should stop at unvisited VUSE to not walk across region
492 boundaries. */
494 static tree
495 vuse_valueize (tree vuse)
499 bool visited;
500 vuse = SSA_VAL (vuse, &visited);
501 if (!visited)
502 return NULL_TREE;
503 gcc_assert (vuse != VN_TOP);
505 while (SSA_NAME_IN_FREE_LIST (vuse));
506 return vuse;
510 /* Return the vn_kind the expression computed by the stmt should be
511 associated with. */
513 enum vn_kind
514 vn_get_stmt_kind (gimple *stmt)
516 switch (gimple_code (stmt))
518 case GIMPLE_CALL:
519 return VN_REFERENCE;
520 case GIMPLE_PHI:
521 return VN_PHI;
522 case GIMPLE_ASSIGN:
524 enum tree_code code = gimple_assign_rhs_code (stmt);
525 tree rhs1 = gimple_assign_rhs1 (stmt);
526 switch (get_gimple_rhs_class (code))
528 case GIMPLE_UNARY_RHS:
529 case GIMPLE_BINARY_RHS:
530 case GIMPLE_TERNARY_RHS:
531 return VN_NARY;
532 case GIMPLE_SINGLE_RHS:
533 switch (TREE_CODE_CLASS (code))
535 case tcc_reference:
536 /* VOP-less references can go through unary case. */
537 if ((code == REALPART_EXPR
538 || code == IMAGPART_EXPR
539 || code == VIEW_CONVERT_EXPR
540 || code == BIT_FIELD_REF)
541 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
542 return VN_NARY;
544 /* Fallthrough. */
545 case tcc_declaration:
546 return VN_REFERENCE;
548 case tcc_constant:
549 return VN_CONSTANT;
551 default:
552 if (code == ADDR_EXPR)
553 return (is_gimple_min_invariant (rhs1)
554 ? VN_CONSTANT : VN_REFERENCE);
555 else if (code == CONSTRUCTOR)
556 return VN_NARY;
557 return VN_NONE;
559 default:
560 return VN_NONE;
563 default:
564 return VN_NONE;
568 /* Lookup a value id for CONSTANT and return it. If it does not
569 exist returns 0. */
571 unsigned int
572 get_constant_value_id (tree constant)
574 vn_constant_s **slot;
575 struct vn_constant_s vc;
577 vc.hashcode = vn_hash_constant_with_type (constant);
578 vc.constant = constant;
579 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
580 if (slot)
581 return (*slot)->value_id;
582 return 0;
585 /* Lookup a value id for CONSTANT, and if it does not exist, create a
586 new one and return it. If it does exist, return it. */
588 unsigned int
589 get_or_alloc_constant_value_id (tree constant)
591 vn_constant_s **slot;
592 struct vn_constant_s vc;
593 vn_constant_t vcp;
595 /* If the hashtable isn't initialized we're not running from PRE and thus
596 do not need value-ids. */
597 if (!constant_to_value_id)
598 return 0;
600 vc.hashcode = vn_hash_constant_with_type (constant);
601 vc.constant = constant;
602 slot = constant_to_value_id->find_slot (&vc, INSERT);
603 if (*slot)
604 return (*slot)->value_id;
606 vcp = XNEW (struct vn_constant_s);
607 vcp->hashcode = vc.hashcode;
608 vcp->constant = constant;
609 vcp->value_id = get_next_value_id ();
610 *slot = vcp;
611 bitmap_set_bit (constant_value_ids, vcp->value_id);
612 return vcp->value_id;
615 /* Return true if V is a value id for a constant. */
617 bool
618 value_id_constant_p (unsigned int v)
620 return bitmap_bit_p (constant_value_ids, v);
623 /* Compute the hash for a reference operand VRO1. */
625 static void
626 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
628 hstate.add_int (vro1->opcode);
629 if (vro1->op0)
630 inchash::add_expr (vro1->op0, hstate);
631 if (vro1->op1)
632 inchash::add_expr (vro1->op1, hstate);
633 if (vro1->op2)
634 inchash::add_expr (vro1->op2, hstate);
637 /* Compute a hash for the reference operation VR1 and return it. */
639 static hashval_t
640 vn_reference_compute_hash (const vn_reference_t vr1)
642 inchash::hash hstate;
643 hashval_t result;
644 int i;
645 vn_reference_op_t vro;
646 poly_int64 off = -1;
647 bool deref = false;
649 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
651 if (vro->opcode == MEM_REF)
652 deref = true;
653 else if (vro->opcode != ADDR_EXPR)
654 deref = false;
655 if (maybe_ne (vro->off, -1))
657 if (known_eq (off, -1))
658 off = 0;
659 off += vro->off;
661 else
663 if (maybe_ne (off, -1)
664 && maybe_ne (off, 0))
665 hstate.add_poly_int (off);
666 off = -1;
667 if (deref
668 && vro->opcode == ADDR_EXPR)
670 if (vro->op0)
672 tree op = TREE_OPERAND (vro->op0, 0);
673 hstate.add_int (TREE_CODE (op));
674 inchash::add_expr (op, hstate);
677 else
678 vn_reference_op_compute_hash (vro, hstate);
681 result = hstate.end ();
682 /* ??? We would ICE later if we hash instead of adding that in. */
683 if (vr1->vuse)
684 result += SSA_NAME_VERSION (vr1->vuse);
686 return result;
689 /* Return true if reference operations VR1 and VR2 are equivalent. This
690 means they have the same set of operands and vuses. */
692 bool
693 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
695 unsigned i, j;
697 /* Early out if this is not a hash collision. */
698 if (vr1->hashcode != vr2->hashcode)
699 return false;
701 /* The VOP needs to be the same. */
702 if (vr1->vuse != vr2->vuse)
703 return false;
705 /* If the operands are the same we are done. */
706 if (vr1->operands == vr2->operands)
707 return true;
709 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
710 return false;
712 if (INTEGRAL_TYPE_P (vr1->type)
713 && INTEGRAL_TYPE_P (vr2->type))
715 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
716 return false;
718 else if (INTEGRAL_TYPE_P (vr1->type)
719 && (TYPE_PRECISION (vr1->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
721 return false;
722 else if (INTEGRAL_TYPE_P (vr2->type)
723 && (TYPE_PRECISION (vr2->type)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
725 return false;
727 i = 0;
728 j = 0;
731 poly_int64 off1 = 0, off2 = 0;
732 vn_reference_op_t vro1, vro2;
733 vn_reference_op_s tem1, tem2;
734 bool deref1 = false, deref2 = false;
735 for (; vr1->operands.iterate (i, &vro1); i++)
737 if (vro1->opcode == MEM_REF)
738 deref1 = true;
739 /* Do not look through a storage order barrier. */
740 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
741 return false;
742 if (known_eq (vro1->off, -1))
743 break;
744 off1 += vro1->off;
746 for (; vr2->operands.iterate (j, &vro2); j++)
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
750 /* Do not look through a storage order barrier. */
751 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
752 return false;
753 if (known_eq (vro2->off, -1))
754 break;
755 off2 += vro2->off;
757 if (maybe_ne (off1, off2))
758 return false;
759 if (deref1 && vro1->opcode == ADDR_EXPR)
761 memset (&tem1, 0, sizeof (tem1));
762 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
763 tem1.type = TREE_TYPE (tem1.op0);
764 tem1.opcode = TREE_CODE (tem1.op0);
765 vro1 = &tem1;
766 deref1 = false;
768 if (deref2 && vro2->opcode == ADDR_EXPR)
770 memset (&tem2, 0, sizeof (tem2));
771 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
772 tem2.type = TREE_TYPE (tem2.op0);
773 tem2.opcode = TREE_CODE (tem2.op0);
774 vro2 = &tem2;
775 deref2 = false;
777 if (deref1 != deref2)
778 return false;
779 if (!vn_reference_op_eq (vro1, vro2))
780 return false;
781 ++j;
782 ++i;
784 while (vr1->operands.length () != i
785 || vr2->operands.length () != j);
787 return true;
790 /* Copy the operations present in load/store REF into RESULT, a vector of
791 vn_reference_op_s's. */
793 static void
794 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
796 /* For non-calls, store the information that makes up the address. */
797 tree orig = ref;
798 while (ref)
800 vn_reference_op_s temp;
802 memset (&temp, 0, sizeof (temp));
803 temp.type = TREE_TYPE (ref);
804 temp.opcode = TREE_CODE (ref);
805 temp.off = -1;
807 switch (temp.opcode)
809 case MODIFY_EXPR:
810 temp.op0 = TREE_OPERAND (ref, 1);
811 break;
812 case WITH_SIZE_EXPR:
813 temp.op0 = TREE_OPERAND (ref, 1);
814 temp.off = 0;
815 break;
816 case MEM_REF:
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 if (!mem_ref_offset (ref).to_shwi (&temp.off))
820 temp.off = -1;
821 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
822 temp.base = MR_DEPENDENCE_BASE (ref);
823 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
824 break;
825 case TARGET_MEM_REF:
826 /* The base address gets its own vn_reference_op_s structure. */
827 temp.op0 = TMR_INDEX (ref);
828 temp.op1 = TMR_STEP (ref);
829 temp.op2 = TMR_OFFSET (ref);
830 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 temp.base = MR_DEPENDENCE_BASE (ref);
832 result->safe_push (temp);
833 memset (&temp, 0, sizeof (temp));
834 temp.type = NULL_TREE;
835 temp.opcode = ERROR_MARK;
836 temp.op0 = TMR_INDEX2 (ref);
837 temp.off = -1;
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits, position and storage order. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
846 break;
847 case COMPONENT_REF:
848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
857 && poly_int_tree_p (this_offset))
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
894 && TREE_CODE (temp.op2) == INTEGER_CST)
896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
903 break;
904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
907 temp.op0 = ref;
908 break;
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
919 result->safe_push (temp);
920 temp.opcode = ADDR_EXPR;
921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
938 temp.op0 = ref;
939 break;
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
961 result->safe_push (temp);
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
981 vec<vn_reference_op_s> ops)
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
991 alias_set_type base_alias_set = -1;
993 /* First get the final access size from just the outermost expression. */
994 op = &ops[0];
995 if (op->opcode == COMPONENT_REF)
996 size_tree = DECL_SIZE (op->op0);
997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1001 machine_mode mode = TYPE_MODE (type);
1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
1005 size = GET_MODE_BITSIZE (mode);
1007 if (size_tree != NULL_TREE
1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
1017 FOR_EACH_VEC_ELT (ops, i, op)
1019 switch (op->opcode)
1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
1023 case ADDR_EXPR:
1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1030 vn_reference_op_t pop = &ops[i-1];
1031 base = TREE_OPERAND (op->op0, 0);
1032 if (known_eq (pop->off, -1))
1034 max_size = -1;
1035 offset = 0;
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1042 /* Fallthru. */
1043 case CALL_EXPR:
1044 return false;
1046 /* Record the base objects. */
1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
1060 *op0_p = op->op0;
1061 op0_p = NULL;
1062 break;
1064 /* And now the usual component-reference style ops. */
1065 case BIT_FIELD_REF:
1066 offset += wi::to_poly_offset (op->op1);
1067 break;
1069 case COMPONENT_REF:
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
1075 tree this_offset = DECL_FIELD_OFFSET (field);
1077 if (op->op1 || !poly_int_tree_p (this_offset))
1078 max_size = -1;
1079 else
1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
1086 break;
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
1094 || TREE_CODE (op->op2) != INTEGER_CST)
1095 max_size = -1;
1096 else
1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1103 woffset <<= LOG2_BITS_PER_UNIT;
1104 offset += woffset;
1106 break;
1108 case REALPART_EXPR:
1109 break;
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1115 case VIEW_CONVERT_EXPR:
1116 break;
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
1124 case CONST_DECL:
1125 return false;
1127 default:
1128 return false;
1132 if (base == NULL_TREE)
1133 return false;
1135 ref->ref = NULL_TREE;
1136 ref->base = base;
1137 ref->ref_alias_set = set;
1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1153 if (!offset.to_shwi (&ref->offset))
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1161 ref->max_size = -1;
1163 return true;
1166 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1169 static void
1170 copy_reference_ops_from_call (gcall *call,
1171 vec<vn_reference_op_s> *result)
1173 vn_reference_op_s temp;
1174 unsigned i;
1175 tree lhs = gimple_call_lhs (call);
1176 int lr;
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
1188 result->safe_push (temp);
1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
1192 memset (&temp, 0, sizeof (temp));
1193 temp.type = gimple_call_fntype (call);
1194 temp.opcode = CALL_EXPR;
1195 temp.op0 = gimple_call_fn (call);
1196 temp.op1 = gimple_call_chain (call);
1197 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1198 temp.op2 = size_int (lr);
1199 temp.off = -1;
1200 result->safe_push (temp);
1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1206 tree callarg = gimple_call_arg (call, i);
1207 copy_reference_ops_from_ref (callarg, result);
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static bool
1214 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 tree addr_base;
1221 poly_int64 addr_offset = 0;
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1224 from .foo.bar to the preceding MEM_REF offset and replace the
1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1229 if (addr_base != TREE_OPERAND (op->op0, 0))
1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1236 op->op0 = build_fold_addr_expr (addr_base);
1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
1239 else
1240 mem_op->off = -1;
1241 return true;
1243 return false;
1246 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
1248 static bool
1249 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1250 unsigned int *i_p)
1252 bool changed = false;
1253 vn_reference_op_t op;
1257 unsigned int i = *i_p;
1258 op = &(*ops)[i];
1259 vn_reference_op_t mem_op = &(*ops)[i - 1];
1260 gimple *def_stmt;
1261 enum tree_code code;
1262 poly_offset_int off;
1264 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1265 if (!is_gimple_assign (def_stmt))
1266 return changed;
1268 code = gimple_assign_rhs_code (def_stmt);
1269 if (code != ADDR_EXPR
1270 && code != POINTER_PLUS_EXPR)
1271 return changed;
1273 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1275 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1276 from .foo.bar to the preceding MEM_REF offset and replace the
1277 address with &OBJ. */
1278 if (code == ADDR_EXPR)
1280 tree addr, addr_base;
1281 poly_int64 addr_offset;
1283 addr = gimple_assign_rhs1 (def_stmt);
1284 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1285 &addr_offset);
1286 /* If that didn't work because the address isn't invariant propagate
1287 the reference tree from the address operation in case the current
1288 dereference isn't offsetted. */
1289 if (!addr_base
1290 && *i_p == ops->length () - 1
1291 && known_eq (off, 0)
1292 /* This makes us disable this transform for PRE where the
1293 reference ops might be also used for code insertion which
1294 is invalid. */
1295 && default_vn_walk_kind == VN_WALKREWRITE)
1297 auto_vec<vn_reference_op_s, 32> tem;
1298 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1299 /* Make sure to preserve TBAA info. The only objects not
1300 wrapped in MEM_REFs that can have their address taken are
1301 STRING_CSTs. */
1302 if (tem.length () >= 2
1303 && tem[tem.length () - 2].opcode == MEM_REF)
1305 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1306 new_mem_op->op0
1307 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1308 wi::to_poly_wide (new_mem_op->op0));
1310 else
1311 gcc_assert (tem.last ().opcode == STRING_CST);
1312 ops->pop ();
1313 ops->pop ();
1314 ops->safe_splice (tem);
1315 --*i_p;
1316 return true;
1318 if (!addr_base
1319 || TREE_CODE (addr_base) != MEM_REF
1320 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1321 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1322 0))))
1323 return changed;
1325 off += addr_offset;
1326 off += mem_ref_offset (addr_base);
1327 op->op0 = TREE_OPERAND (addr_base, 0);
1329 else
1331 tree ptr, ptroff;
1332 ptr = gimple_assign_rhs1 (def_stmt);
1333 ptroff = gimple_assign_rhs2 (def_stmt);
1334 if (TREE_CODE (ptr) != SSA_NAME
1335 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1336 /* Make sure to not endlessly recurse.
1337 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1338 happen when we value-number a PHI to its backedge value. */
1339 || SSA_VAL (ptr) == op->op0
1340 || !poly_int_tree_p (ptroff))
1341 return changed;
1343 off += wi::to_poly_offset (ptroff);
1344 op->op0 = ptr;
1347 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1348 if (tree_fits_shwi_p (mem_op->op0))
1349 mem_op->off = tree_to_shwi (mem_op->op0);
1350 else
1351 mem_op->off = -1;
1352 /* ??? Can end up with endless recursion here!?
1353 gcc.c-torture/execute/strcmp-1.c */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 op->op0 = SSA_VAL (op->op0);
1356 if (TREE_CODE (op->op0) != SSA_NAME)
1357 op->opcode = TREE_CODE (op->op0);
1359 changed = true;
1361 /* Tail-recurse. */
1362 while (TREE_CODE (op->op0) == SSA_NAME);
1364 /* Fold a remaining *&. */
1365 if (TREE_CODE (op->op0) == ADDR_EXPR)
1366 vn_reference_fold_indirect (ops, i_p);
1368 return changed;
1371 /* Optimize the reference REF to a constant if possible or return
1372 NULL_TREE if not. */
1374 tree
1375 fully_constant_vn_reference_p (vn_reference_t ref)
1377 vec<vn_reference_op_s> operands = ref->operands;
1378 vn_reference_op_t op;
1380 /* Try to simplify the translated expression if it is
1381 a call to a builtin function with at most two arguments. */
1382 op = &operands[0];
1383 if (op->opcode == CALL_EXPR
1384 && TREE_CODE (op->op0) == ADDR_EXPR
1385 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1386 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1387 && operands.length () >= 2
1388 && operands.length () <= 3)
1390 vn_reference_op_t arg0, arg1 = NULL;
1391 bool anyconst = false;
1392 arg0 = &operands[1];
1393 if (operands.length () > 2)
1394 arg1 = &operands[2];
1395 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1396 || (arg0->opcode == ADDR_EXPR
1397 && is_gimple_min_invariant (arg0->op0)))
1398 anyconst = true;
1399 if (arg1
1400 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1401 || (arg1->opcode == ADDR_EXPR
1402 && is_gimple_min_invariant (arg1->op0))))
1403 anyconst = true;
1404 if (anyconst)
1406 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1407 arg1 ? 2 : 1,
1408 arg0->op0,
1409 arg1 ? arg1->op0 : NULL);
1410 if (folded
1411 && TREE_CODE (folded) == NOP_EXPR)
1412 folded = TREE_OPERAND (folded, 0);
1413 if (folded
1414 && is_gimple_min_invariant (folded))
1415 return folded;
1419 /* Simplify reads from constants or constant initializers. */
1420 else if (BITS_PER_UNIT == 8
1421 && COMPLETE_TYPE_P (ref->type)
1422 && is_gimple_reg_type (ref->type))
1424 poly_int64 off = 0;
1425 HOST_WIDE_INT size;
1426 if (INTEGRAL_TYPE_P (ref->type))
1427 size = TYPE_PRECISION (ref->type);
1428 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1429 size = tree_to_shwi (TYPE_SIZE (ref->type));
1430 else
1431 return NULL_TREE;
1432 if (size % BITS_PER_UNIT != 0
1433 || size > MAX_BITSIZE_MODE_ANY_MODE)
1434 return NULL_TREE;
1435 size /= BITS_PER_UNIT;
1436 unsigned i;
1437 for (i = 0; i < operands.length (); ++i)
1439 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1441 ++i;
1442 break;
1444 if (known_eq (operands[i].off, -1))
1445 return NULL_TREE;
1446 off += operands[i].off;
1447 if (operands[i].opcode == MEM_REF)
1449 ++i;
1450 break;
1453 vn_reference_op_t base = &operands[--i];
1454 tree ctor = error_mark_node;
1455 tree decl = NULL_TREE;
1456 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1457 ctor = base->op0;
1458 else if (base->opcode == MEM_REF
1459 && base[1].opcode == ADDR_EXPR
1460 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1461 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1462 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1464 decl = TREE_OPERAND (base[1].op0, 0);
1465 if (TREE_CODE (decl) == STRING_CST)
1466 ctor = decl;
1467 else
1468 ctor = ctor_for_folding (decl);
1470 if (ctor == NULL_TREE)
1471 return build_zero_cst (ref->type);
1472 else if (ctor != error_mark_node)
1474 HOST_WIDE_INT const_off;
1475 if (decl)
1477 tree res = fold_ctor_reference (ref->type, ctor,
1478 off * BITS_PER_UNIT,
1479 size * BITS_PER_UNIT, decl);
1480 if (res)
1482 STRIP_USELESS_TYPE_CONVERSION (res);
1483 if (is_gimple_min_invariant (res))
1484 return res;
1487 else if (off.is_constant (&const_off))
1489 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1490 int len = native_encode_expr (ctor, buf, size, const_off);
1491 if (len > 0)
1492 return native_interpret_expr (ref->type, buf, len);
1497 return NULL_TREE;
1500 /* Return true if OPS contain a storage order barrier. */
1502 static bool
1503 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1505 vn_reference_op_t op;
1506 unsigned i;
1508 FOR_EACH_VEC_ELT (ops, i, op)
1509 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1510 return true;
1512 return false;
1515 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1516 structures into their value numbers. This is done in-place, and
1517 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1518 whether any operands were valueized. */
1520 static vec<vn_reference_op_s>
1521 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1522 bool with_avail = false)
1524 vn_reference_op_t vro;
1525 unsigned int i;
1527 *valueized_anything = false;
1529 FOR_EACH_VEC_ELT (orig, i, vro)
1531 if (vro->opcode == SSA_NAME
1532 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1534 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1535 if (tem != vro->op0)
1537 *valueized_anything = true;
1538 vro->op0 = tem;
1540 /* If it transforms from an SSA_NAME to a constant, update
1541 the opcode. */
1542 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1543 vro->opcode = TREE_CODE (vro->op0);
1545 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1547 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1548 if (tem != vro->op1)
1550 *valueized_anything = true;
1551 vro->op1 = tem;
1554 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1556 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1557 if (tem != vro->op2)
1559 *valueized_anything = true;
1560 vro->op2 = tem;
1563 /* If it transforms from an SSA_NAME to an address, fold with
1564 a preceding indirect reference. */
1565 if (i > 0
1566 && vro->op0
1567 && TREE_CODE (vro->op0) == ADDR_EXPR
1568 && orig[i - 1].opcode == MEM_REF)
1570 if (vn_reference_fold_indirect (&orig, &i))
1571 *valueized_anything = true;
1573 else if (i > 0
1574 && vro->opcode == SSA_NAME
1575 && orig[i - 1].opcode == MEM_REF)
1577 if (vn_reference_maybe_forwprop_address (&orig, &i))
1578 *valueized_anything = true;
1580 /* If it transforms a non-constant ARRAY_REF into a constant
1581 one, adjust the constant offset. */
1582 else if (vro->opcode == ARRAY_REF
1583 && known_eq (vro->off, -1)
1584 && poly_int_tree_p (vro->op0)
1585 && poly_int_tree_p (vro->op1)
1586 && TREE_CODE (vro->op2) == INTEGER_CST)
1588 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1589 - wi::to_poly_offset (vro->op1))
1590 * wi::to_offset (vro->op2)
1591 * vn_ref_op_align_unit (vro));
1592 off.to_shwi (&vro->off);
1596 return orig;
1599 static vec<vn_reference_op_s>
1600 valueize_refs (vec<vn_reference_op_s> orig)
1602 bool tem;
1603 return valueize_refs_1 (orig, &tem);
1606 static vec<vn_reference_op_s> shared_lookup_references;
1608 /* Create a vector of vn_reference_op_s structures from REF, a
1609 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1610 this function. *VALUEIZED_ANYTHING will specify whether any
1611 operands were valueized. */
1613 static vec<vn_reference_op_s>
1614 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1616 if (!ref)
1617 return vNULL;
1618 shared_lookup_references.truncate (0);
1619 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1620 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1621 valueized_anything);
1622 return shared_lookup_references;
1625 /* Create a vector of vn_reference_op_s structures from CALL, a
1626 call statement. The vector is shared among all callers of
1627 this function. */
1629 static vec<vn_reference_op_s>
1630 valueize_shared_reference_ops_from_call (gcall *call)
1632 if (!call)
1633 return vNULL;
1634 shared_lookup_references.truncate (0);
1635 copy_reference_ops_from_call (call, &shared_lookup_references);
1636 shared_lookup_references = valueize_refs (shared_lookup_references);
1637 return shared_lookup_references;
1640 /* Lookup a SCCVN reference operation VR in the current hash table.
1641 Returns the resulting value number if it exists in the hash table,
1642 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1643 vn_reference_t stored in the hashtable if something is found. */
1645 static tree
1646 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1648 vn_reference_s **slot;
1649 hashval_t hash;
1651 hash = vr->hashcode;
1652 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1653 if (slot)
1655 if (vnresult)
1656 *vnresult = (vn_reference_t)*slot;
1657 return ((vn_reference_t)*slot)->result;
1660 return NULL_TREE;
1664 /* Partial definition tracking support. */
1666 struct pd_range
1668 HOST_WIDE_INT offset;
1669 HOST_WIDE_INT size;
1672 struct pd_data
1674 tree rhs;
1675 HOST_WIDE_INT offset;
1676 HOST_WIDE_INT size;
1679 /* Context for alias walking. */
1681 struct vn_walk_cb_data
1683 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1684 vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
1685 : vr (vr_), last_vuse_ptr (last_vuse_ptr_),
1686 vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL)
1688 ao_ref_init (&orig_ref, orig_ref_);
1690 ~vn_walk_cb_data ();
1691 void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
1693 vn_reference_t vr;
1694 ao_ref orig_ref;
1695 tree *last_vuse_ptr;
1696 vn_lookup_kind vn_walk_kind;
1697 bool tbaa_p;
1699 /* The VDEFs of partial defs we come along. */
1700 auto_vec<pd_data, 2> partial_defs;
1701 /* The first defs range to avoid splay tree setup in most cases. */
1702 pd_range first_range;
1703 tree first_vuse;
1704 splay_tree known_ranges;
1705 obstack ranges_obstack;
1708 vn_walk_cb_data::~vn_walk_cb_data ()
1710 if (known_ranges)
1712 splay_tree_delete (known_ranges);
1713 obstack_free (&ranges_obstack, NULL);
1717 /* pd_range splay-tree helpers. */
1719 static int
1720 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1722 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1723 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1724 if (offset1 < offset2)
1725 return -1;
1726 else if (offset1 > offset2)
1727 return 1;
1728 return 0;
1731 static void *
1732 pd_tree_alloc (int size, void *data_)
1734 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1735 return obstack_alloc (&data->ranges_obstack, size);
1738 static void
1739 pd_tree_dealloc (void *, void *)
1743 /* Push PD to the vector of partial definitions returning a
1744 value when we are ready to combine things with VUSE and MAXSIZEI,
1745 NULL when we want to continue looking for partial defs or -1
1746 on failure. */
1748 void *
1749 vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
1750 HOST_WIDE_INT maxsizei)
1752 if (partial_defs.is_empty ())
1754 partial_defs.safe_push (pd);
1755 first_range.offset = pd.offset;
1756 first_range.size = pd.size;
1757 first_vuse = vuse;
1758 last_vuse_ptr = NULL;
1759 /* Continue looking for partial defs. */
1760 return NULL;
1763 if (!known_ranges)
1765 /* ??? Optimize the case where the 2nd partial def completes things. */
1766 gcc_obstack_init (&ranges_obstack);
1767 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1768 pd_tree_alloc,
1769 pd_tree_dealloc, this);
1770 splay_tree_insert (known_ranges,
1771 (splay_tree_key)&first_range.offset,
1772 (splay_tree_value)&first_range);
1775 pd_range newr = { pd.offset, pd.size };
1776 splay_tree_node n;
1777 pd_range *r;
1778 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1779 HOST_WIDE_INT loffset = newr.offset + 1;
1780 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1781 && ((r = (pd_range *)n->value), true)
1782 && ranges_known_overlap_p (r->offset, r->size + 1,
1783 newr.offset, newr.size))
1785 /* Ignore partial defs already covered. */
1786 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1787 return NULL;
1788 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
1790 else
1792 /* newr.offset wasn't covered yet, insert the range. */
1793 r = XOBNEW (&ranges_obstack, pd_range);
1794 *r = newr;
1795 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
1796 (splay_tree_value)r);
1798 /* Merge r which now contains newr and is a member of the splay tree with
1799 adjacent overlapping ranges. */
1800 pd_range *rafter;
1801 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
1802 && ((rafter = (pd_range *)n->value), true)
1803 && ranges_known_overlap_p (r->offset, r->size + 1,
1804 rafter->offset, rafter->size))
1806 r->size = MAX (r->offset + r->size,
1807 rafter->offset + rafter->size) - r->offset;
1808 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
1810 partial_defs.safe_push (pd);
1812 /* Now we have merged newr into the range tree. When we have covered
1813 [offseti, sizei] then the tree will contain exactly one node which has
1814 the desired properties and it will be 'r'. */
1815 if (!known_subrange_p (0, maxsizei / BITS_PER_UNIT, r->offset, r->size))
1816 /* Continue looking for partial defs. */
1817 return NULL;
1819 /* Now simply native encode all partial defs in reverse order. */
1820 unsigned ndefs = partial_defs.length ();
1821 /* We support up to 512-bit values (for V8DFmode). */
1822 unsigned char buffer[64];
1823 int len;
1825 while (!partial_defs.is_empty ())
1827 pd_data pd = partial_defs.pop ();
1828 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1829 /* Empty CONSTRUCTOR. */
1830 memset (buffer + MAX (0, pd.offset),
1831 0, MIN ((HOST_WIDE_INT)sizeof (buffer) - MAX (0, pd.offset),
1832 pd.size + MIN (0, pd.offset)));
1833 else
1835 unsigned pad = 0;
1836 if (BYTES_BIG_ENDIAN
1837 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (pd.rhs))))
1839 /* On big-endian the padding is at the 'front' so just skip
1840 the initial bytes. */
1841 fixed_size_mode mode
1842 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (pd.rhs)));
1843 pad = GET_MODE_SIZE (mode) - pd.size;
1845 len = native_encode_expr (pd.rhs, buffer + MAX (0, pd.offset),
1846 sizeof (buffer) - MAX (0, pd.offset),
1847 MAX (0, -pd.offset) + pad);
1848 if (len <= 0 || len < (pd.size - MAX (0, -pd.offset)))
1850 if (dump_file && (dump_flags & TDF_DETAILS))
1851 fprintf (dump_file, "Failed to encode %u "
1852 "partial definitions\n", ndefs);
1853 return (void *)-1;
1858 tree type = vr->type;
1859 /* Make sure to interpret in a type that has a range covering the whole
1860 access size. */
1861 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
1862 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
1863 tree val = native_interpret_expr (type, buffer, maxsizei / BITS_PER_UNIT);
1864 /* If we chop off bits because the types precision doesn't match the memory
1865 access size this is ok when optimizing reads but not when called from
1866 the DSE code during elimination. */
1867 if (val && type != vr->type)
1869 if (! int_fits_type_p (val, vr->type))
1870 val = NULL_TREE;
1871 else
1872 val = fold_convert (vr->type, val);
1875 if (val)
1877 if (dump_file && (dump_flags & TDF_DETAILS))
1878 fprintf (dump_file,
1879 "Successfully combined %u partial definitions\n", ndefs);
1880 return vn_reference_lookup_or_insert_for_pieces
1881 (first_vuse, vr->set, vr->type, vr->operands, val);
1883 else
1885 if (dump_file && (dump_flags & TDF_DETAILS))
1886 fprintf (dump_file,
1887 "Failed to interpret %u encoded partial definitions\n", ndefs);
1888 return (void *)-1;
1892 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1893 with the current VUSE and performs the expression lookup. */
1895 static void *
1896 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
1898 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1899 vn_reference_t vr = data->vr;
1900 vn_reference_s **slot;
1901 hashval_t hash;
1903 /* If we have partial definitions recorded we have to go through
1904 vn_reference_lookup_3. */
1905 if (!data->partial_defs.is_empty ())
1906 return NULL;
1908 if (data->last_vuse_ptr)
1909 *data->last_vuse_ptr = vuse;
1911 /* Fixup vuse and hash. */
1912 if (vr->vuse)
1913 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1914 vr->vuse = vuse_ssa_val (vuse);
1915 if (vr->vuse)
1916 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1918 hash = vr->hashcode;
1919 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1920 if (slot)
1921 return *slot;
1923 return NULL;
1926 /* Lookup an existing or insert a new vn_reference entry into the
1927 value table for the VUSE, SET, TYPE, OPERANDS reference which
1928 has the value VALUE which is either a constant or an SSA name. */
1930 static vn_reference_t
1931 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1932 alias_set_type set,
1933 tree type,
1934 vec<vn_reference_op_s,
1935 va_heap> operands,
1936 tree value)
1938 vn_reference_s vr1;
1939 vn_reference_t result;
1940 unsigned value_id;
1941 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1942 vr1.operands = operands;
1943 vr1.type = type;
1944 vr1.set = set;
1945 vr1.hashcode = vn_reference_compute_hash (&vr1);
1946 if (vn_reference_lookup_1 (&vr1, &result))
1947 return result;
1948 if (TREE_CODE (value) == SSA_NAME)
1949 value_id = VN_INFO (value)->value_id;
1950 else
1951 value_id = get_or_alloc_constant_value_id (value);
1952 return vn_reference_insert_pieces (vuse, set, type,
1953 operands.copy (), value, value_id);
1956 /* Return a value-number for RCODE OPS... either by looking up an existing
1957 value-number for the simplified result or by inserting the operation if
1958 INSERT is true. */
1960 static tree
1961 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1963 tree result = NULL_TREE;
1964 /* We will be creating a value number for
1965 RCODE (OPS...).
1966 So first simplify and lookup this expression to see if it
1967 is already available. */
1968 /* For simplification valueize. */
1969 unsigned i;
1970 for (i = 0; i < res_op->num_ops; ++i)
1971 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
1973 tree tem = vn_valueize (res_op->ops[i]);
1974 if (!tem)
1975 break;
1976 res_op->ops[i] = tem;
1978 /* If valueization of an operand fails (it is not available), skip
1979 simplification. */
1980 bool res = false;
1981 if (i == res_op->num_ops)
1983 mprts_hook = vn_lookup_simplify_result;
1984 res = res_op->resimplify (NULL, vn_valueize);
1985 mprts_hook = NULL;
1987 gimple *new_stmt = NULL;
1988 if (res
1989 && gimple_simplified_result_is_gimple_val (res_op))
1991 /* The expression is already available. */
1992 result = res_op->ops[0];
1993 /* Valueize it, simplification returns sth in AVAIL only. */
1994 if (TREE_CODE (result) == SSA_NAME)
1995 result = SSA_VAL (result);
1997 else
1999 tree val = vn_lookup_simplify_result (res_op);
2000 if (!val && insert)
2002 gimple_seq stmts = NULL;
2003 result = maybe_push_res_to_seq (res_op, &stmts);
2004 if (result)
2006 gcc_assert (gimple_seq_singleton_p (stmts));
2007 new_stmt = gimple_seq_first_stmt (stmts);
2010 else
2011 /* The expression is already available. */
2012 result = val;
2014 if (new_stmt)
2016 /* The expression is not yet available, value-number lhs to
2017 the new SSA_NAME we created. */
2018 /* Initialize value-number information properly. */
2019 vn_ssa_aux_t result_info = VN_INFO (result);
2020 result_info->valnum = result;
2021 result_info->value_id = get_next_value_id ();
2022 result_info->visited = 1;
2023 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2024 new_stmt);
2025 result_info->needs_insertion = true;
2026 /* ??? PRE phi-translation inserts NARYs without corresponding
2027 SSA name result. Re-use those but set their result according
2028 to the stmt we just built. */
2029 vn_nary_op_t nary = NULL;
2030 vn_nary_op_lookup_stmt (new_stmt, &nary);
2031 if (nary)
2033 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2034 nary->u.result = gimple_assign_lhs (new_stmt);
2036 /* As all "inserted" statements are singleton SCCs, insert
2037 to the valid table. This is strictly needed to
2038 avoid re-generating new value SSA_NAMEs for the same
2039 expression during SCC iteration over and over (the
2040 optimistic table gets cleared after each iteration).
2041 We do not need to insert into the optimistic table, as
2042 lookups there will fall back to the valid table. */
2043 else
2045 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2046 vn_nary_op_t vno1
2047 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2048 vno1->value_id = result_info->value_id;
2049 vno1->length = length;
2050 vno1->predicated_values = 0;
2051 vno1->u.result = result;
2052 init_vn_nary_op_from_stmt (vno1, new_stmt);
2053 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2054 /* Also do not link it into the undo chain. */
2055 last_inserted_nary = vno1->next;
2056 vno1->next = (vn_nary_op_t)(void *)-1;
2058 if (dump_file && (dump_flags & TDF_DETAILS))
2060 fprintf (dump_file, "Inserting name ");
2061 print_generic_expr (dump_file, result);
2062 fprintf (dump_file, " for expression ");
2063 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2064 fprintf (dump_file, "\n");
2067 return result;
2070 /* Return a value-number for RCODE OPS... either by looking up an existing
2071 value-number for the simplified result or by inserting the operation. */
2073 static tree
2074 vn_nary_build_or_lookup (gimple_match_op *res_op)
2076 return vn_nary_build_or_lookup_1 (res_op, true);
2079 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2080 its value if present. */
2082 tree
2083 vn_nary_simplify (vn_nary_op_t nary)
2085 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2086 return NULL_TREE;
2087 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2088 nary->type, nary->length);
2089 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2090 return vn_nary_build_or_lookup_1 (&op, false);
2093 /* Elimination engine. */
2095 class eliminate_dom_walker : public dom_walker
2097 public:
2098 eliminate_dom_walker (cdi_direction, bitmap);
2099 ~eliminate_dom_walker ();
2101 virtual edge before_dom_children (basic_block);
2102 virtual void after_dom_children (basic_block);
2104 virtual tree eliminate_avail (basic_block, tree op);
2105 virtual void eliminate_push_avail (basic_block, tree op);
2106 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2108 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2110 unsigned eliminate_cleanup (bool region_p = false);
2112 bool do_pre;
2113 unsigned int el_todo;
2114 unsigned int eliminations;
2115 unsigned int insertions;
2117 /* SSA names that had their defs inserted by PRE if do_pre. */
2118 bitmap inserted_exprs;
2120 /* Blocks with statements that have had their EH properties changed. */
2121 bitmap need_eh_cleanup;
2123 /* Blocks with statements that have had their AB properties changed. */
2124 bitmap need_ab_cleanup;
2126 /* Local state for the eliminate domwalk. */
2127 auto_vec<gimple *> to_remove;
2128 auto_vec<gimple *> to_fixup;
2129 auto_vec<tree> avail;
2130 auto_vec<tree> avail_stack;
2133 /* Adaptor to the elimination engine using RPO availability. */
2135 class rpo_elim : public eliminate_dom_walker
2137 public:
2138 rpo_elim(basic_block entry_)
2139 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2140 m_avail_freelist (NULL) {}
2142 virtual tree eliminate_avail (basic_block, tree op);
2144 virtual void eliminate_push_avail (basic_block, tree);
2146 basic_block entry;
2147 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2148 obstack. */
2149 vn_avail *m_avail_freelist;
2152 /* Global RPO state for access from hooks. */
2153 static rpo_elim *rpo_avail;
2154 basic_block vn_context_bb;
2156 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2157 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2158 Otherwise return false. */
2160 static bool
2161 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2162 tree base2, poly_int64 *offset2)
2164 poly_int64 soff;
2165 if (TREE_CODE (base1) == MEM_REF
2166 && TREE_CODE (base2) == MEM_REF)
2168 if (mem_ref_offset (base1).to_shwi (&soff))
2170 base1 = TREE_OPERAND (base1, 0);
2171 *offset1 += soff * BITS_PER_UNIT;
2173 if (mem_ref_offset (base2).to_shwi (&soff))
2175 base2 = TREE_OPERAND (base2, 0);
2176 *offset2 += soff * BITS_PER_UNIT;
2178 return operand_equal_p (base1, base2, 0);
2180 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2183 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2184 from the statement defining VUSE and if not successful tries to
2185 translate *REFP and VR_ through an aggregate copy at the definition
2186 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2187 of *REF and *VR. If only disambiguation was performed then
2188 *DISAMBIGUATE_ONLY is set to true. */
2190 static void *
2191 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2192 translate_flags *disambiguate_only)
2194 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2195 vn_reference_t vr = data->vr;
2196 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2197 tree base = ao_ref_base (ref);
2198 HOST_WIDE_INT offseti, maxsizei;
2199 static vec<vn_reference_op_s> lhs_ops;
2200 ao_ref lhs_ref;
2201 bool lhs_ref_ok = false;
2202 poly_int64 copy_size;
2204 /* First try to disambiguate after value-replacing in the definitions LHS. */
2205 if (is_gimple_assign (def_stmt))
2207 tree lhs = gimple_assign_lhs (def_stmt);
2208 bool valueized_anything = false;
2209 /* Avoid re-allocation overhead. */
2210 lhs_ops.truncate (0);
2211 basic_block saved_rpo_bb = vn_context_bb;
2212 vn_context_bb = gimple_bb (def_stmt);
2213 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2215 copy_reference_ops_from_ref (lhs, &lhs_ops);
2216 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2218 vn_context_bb = saved_rpo_bb;
2219 if (valueized_anything)
2221 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2222 get_alias_set (lhs),
2223 TREE_TYPE (lhs), lhs_ops);
2224 if (lhs_ref_ok
2225 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2227 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2228 return NULL;
2231 else
2233 ao_ref_init (&lhs_ref, lhs);
2234 lhs_ref_ok = true;
2237 /* Besides valueizing the LHS we can also use access-path based
2238 disambiguation on the original non-valueized ref. */
2239 if (!ref->ref
2240 && lhs_ref_ok
2241 && data->orig_ref.ref)
2243 /* We want to use the non-valueized LHS for this, but avoid redundant
2244 work. */
2245 ao_ref *lref = &lhs_ref;
2246 ao_ref lref_alt;
2247 if (valueized_anything)
2249 ao_ref_init (&lref_alt, lhs);
2250 lref = &lref_alt;
2252 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2254 *disambiguate_only = (valueized_anything
2255 ? TR_VALUEIZE_AND_DISAMBIGUATE
2256 : TR_DISAMBIGUATE);
2257 return NULL;
2261 /* If we reach a clobbering statement try to skip it and see if
2262 we find a VN result with exactly the same value as the
2263 possible clobber. In this case we can ignore the clobber
2264 and return the found value. */
2265 if (is_gimple_reg_type (TREE_TYPE (lhs))
2266 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2267 && ref->ref)
2269 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2270 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2271 data->last_vuse_ptr = NULL;
2272 tree saved_vuse = vr->vuse;
2273 hashval_t saved_hashcode = vr->hashcode;
2274 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2275 /* Need to restore vr->vuse and vr->hashcode. */
2276 vr->vuse = saved_vuse;
2277 vr->hashcode = saved_hashcode;
2278 data->last_vuse_ptr = saved_last_vuse_ptr;
2279 if (res && res != (void *)-1)
2281 vn_reference_t vnresult = (vn_reference_t) res;
2282 tree rhs = gimple_assign_rhs1 (def_stmt);
2283 if (TREE_CODE (rhs) == SSA_NAME)
2284 rhs = SSA_VAL (rhs);
2285 if (vnresult->result
2286 && operand_equal_p (vnresult->result, rhs, 0)
2287 /* We have to honor our promise about union type punning
2288 and also support arbitrary overlaps with
2289 -fno-strict-aliasing. So simply resort to alignment to
2290 rule out overlaps. Do this check last because it is
2291 quite expensive compared to the hash-lookup above. */
2292 && multiple_p (get_object_alignment (ref->ref), ref->size)
2293 && multiple_p (get_object_alignment (lhs), ref->size))
2294 return res;
2298 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2299 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2300 && gimple_call_num_args (def_stmt) <= 4)
2302 /* For builtin calls valueize its arguments and call the
2303 alias oracle again. Valueization may improve points-to
2304 info of pointers and constify size and position arguments.
2305 Originally this was motivated by PR61034 which has
2306 conditional calls to free falsely clobbering ref because
2307 of imprecise points-to info of the argument. */
2308 tree oldargs[4];
2309 bool valueized_anything = false;
2310 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2312 oldargs[i] = gimple_call_arg (def_stmt, i);
2313 tree val = vn_valueize (oldargs[i]);
2314 if (val != oldargs[i])
2316 gimple_call_set_arg (def_stmt, i, val);
2317 valueized_anything = true;
2320 if (valueized_anything)
2322 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2323 ref);
2324 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2325 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2326 if (!res)
2328 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2329 return NULL;
2334 /* If we are looking for redundant stores do not create new hashtable
2335 entries from aliasing defs with made up alias-sets. */
2336 if (*disambiguate_only > TR_TRANSLATE || !data->tbaa_p)
2337 return (void *)-1;
2339 /* If we cannot constrain the size of the reference we cannot
2340 test if anything kills it. */
2341 if (!ref->max_size_known_p ())
2342 return (void *)-1;
2344 poly_int64 offset = ref->offset;
2345 poly_int64 maxsize = ref->max_size;
2347 /* We can't deduce anything useful from clobbers. */
2348 if (gimple_clobber_p (def_stmt))
2349 return (void *)-1;
2351 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2352 from that definition.
2353 1) Memset. */
2354 if (is_gimple_reg_type (vr->type)
2355 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2356 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2357 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2358 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2359 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2360 && offset.is_constant (&offseti)
2361 && offseti % BITS_PER_UNIT == 0))
2362 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2363 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2364 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2366 tree base2;
2367 poly_int64 offset2, size2, maxsize2;
2368 bool reverse;
2369 tree ref2 = gimple_call_arg (def_stmt, 0);
2370 if (TREE_CODE (ref2) == SSA_NAME)
2372 ref2 = SSA_VAL (ref2);
2373 if (TREE_CODE (ref2) == SSA_NAME
2374 && (TREE_CODE (base) != MEM_REF
2375 || TREE_OPERAND (base, 0) != ref2))
2377 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2378 if (gimple_assign_single_p (def_stmt)
2379 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2380 ref2 = gimple_assign_rhs1 (def_stmt);
2383 if (TREE_CODE (ref2) == ADDR_EXPR)
2385 ref2 = TREE_OPERAND (ref2, 0);
2386 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2387 &reverse);
2388 if (!known_size_p (maxsize2)
2389 || !known_eq (maxsize2, size2)
2390 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2391 return (void *)-1;
2393 else if (TREE_CODE (ref2) == SSA_NAME)
2395 poly_int64 soff;
2396 if (TREE_CODE (base) != MEM_REF
2397 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2398 return (void *)-1;
2399 offset += soff;
2400 offset2 = 0;
2401 if (TREE_OPERAND (base, 0) != ref2)
2403 gimple *def = SSA_NAME_DEF_STMT (ref2);
2404 if (is_gimple_assign (def)
2405 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2406 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2407 && poly_int_tree_p (gimple_assign_rhs2 (def))
2408 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2409 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2411 ref2 = gimple_assign_rhs1 (def);
2412 if (TREE_CODE (ref2) == SSA_NAME)
2413 ref2 = SSA_VAL (ref2);
2415 else
2416 return (void *)-1;
2419 else
2420 return (void *)-1;
2421 tree len = gimple_call_arg (def_stmt, 2);
2422 HOST_WIDE_INT leni, offset2i, offseti;
2423 if (data->partial_defs.is_empty ()
2424 && known_subrange_p (offset, maxsize, offset2,
2425 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2427 tree val;
2428 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2429 val = build_zero_cst (vr->type);
2430 else if (INTEGRAL_TYPE_P (vr->type)
2431 && known_eq (ref->size, 8))
2433 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2434 vr->type, gimple_call_arg (def_stmt, 1));
2435 val = vn_nary_build_or_lookup (&res_op);
2436 if (!val
2437 || (TREE_CODE (val) == SSA_NAME
2438 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2439 return (void *)-1;
2441 else
2443 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2444 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2445 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2446 len);
2447 val = native_interpret_expr (vr->type, buf, len);
2448 if (!val)
2449 return (void *)-1;
2451 return vn_reference_lookup_or_insert_for_pieces
2452 (vuse, vr->set, vr->type, vr->operands, val);
2454 /* For now handle clearing memory with partial defs. */
2455 else if (known_eq (ref->size, maxsize)
2456 && integer_zerop (gimple_call_arg (def_stmt, 1))
2457 && tree_to_poly_int64 (len).is_constant (&leni)
2458 && offset.is_constant (&offseti)
2459 && offset2.is_constant (&offset2i)
2460 && maxsize.is_constant (&maxsizei))
2462 pd_data pd;
2463 pd.rhs = build_constructor (NULL_TREE, NULL);
2464 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2465 pd.size = leni;
2466 return data->push_partial_def (pd, vuse, maxsizei);
2470 /* 2) Assignment from an empty CONSTRUCTOR. */
2471 else if (is_gimple_reg_type (vr->type)
2472 && gimple_assign_single_p (def_stmt)
2473 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2474 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2476 tree lhs = gimple_assign_lhs (def_stmt);
2477 tree base2;
2478 poly_int64 offset2, size2, maxsize2;
2479 HOST_WIDE_INT offset2i, size2i;
2480 bool reverse;
2481 if (lhs_ref_ok)
2483 base2 = ao_ref_base (&lhs_ref);
2484 offset2 = lhs_ref.offset;
2485 size2 = lhs_ref.size;
2486 maxsize2 = lhs_ref.max_size;
2487 reverse = reverse_storage_order_for_component_p (lhs);
2489 else
2490 base2 = get_ref_base_and_extent (lhs,
2491 &offset2, &size2, &maxsize2, &reverse);
2492 if (known_size_p (maxsize2)
2493 && known_eq (maxsize2, size2)
2494 && adjust_offsets_for_equal_base_address (base, &offset,
2495 base2, &offset2))
2497 if (data->partial_defs.is_empty ()
2498 && known_subrange_p (offset, maxsize, offset2, size2))
2500 tree val = build_zero_cst (vr->type);
2501 return vn_reference_lookup_or_insert_for_pieces
2502 (vuse, vr->set, vr->type, vr->operands, val);
2504 else if (known_eq (ref->size, maxsize)
2505 && maxsize.is_constant (&maxsizei)
2506 && maxsizei % BITS_PER_UNIT == 0
2507 && offset.is_constant (&offseti)
2508 && offseti % BITS_PER_UNIT == 0
2509 && offset2.is_constant (&offset2i)
2510 && offset2i % BITS_PER_UNIT == 0
2511 && size2.is_constant (&size2i)
2512 && size2i % BITS_PER_UNIT == 0)
2514 pd_data pd;
2515 pd.rhs = gimple_assign_rhs1 (def_stmt);
2516 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2517 pd.size = size2i / BITS_PER_UNIT;
2518 return data->push_partial_def (pd, vuse, maxsizei);
2523 /* 3) Assignment from a constant. We can use folds native encode/interpret
2524 routines to extract the assigned bits. */
2525 else if (known_eq (ref->size, maxsize)
2526 && is_gimple_reg_type (vr->type)
2527 && !contains_storage_order_barrier_p (vr->operands)
2528 && gimple_assign_single_p (def_stmt)
2529 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2530 /* native_encode and native_decode operate on arrays of bytes
2531 and so fundamentally need a compile-time size and offset. */
2532 && maxsize.is_constant (&maxsizei)
2533 && maxsizei % BITS_PER_UNIT == 0
2534 && offset.is_constant (&offseti)
2535 && offseti % BITS_PER_UNIT == 0
2536 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2537 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2538 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2540 tree lhs = gimple_assign_lhs (def_stmt);
2541 tree base2;
2542 poly_int64 offset2, size2, maxsize2;
2543 HOST_WIDE_INT offset2i, size2i;
2544 bool reverse;
2545 if (lhs_ref_ok)
2547 base2 = ao_ref_base (&lhs_ref);
2548 offset2 = lhs_ref.offset;
2549 size2 = lhs_ref.size;
2550 maxsize2 = lhs_ref.max_size;
2551 reverse = reverse_storage_order_for_component_p (lhs);
2553 else
2554 base2 = get_ref_base_and_extent (lhs,
2555 &offset2, &size2, &maxsize2, &reverse);
2556 if (base2
2557 && !reverse
2558 && known_eq (maxsize2, size2)
2559 && multiple_p (size2, BITS_PER_UNIT)
2560 && multiple_p (offset2, BITS_PER_UNIT)
2561 && adjust_offsets_for_equal_base_address (base, &offset,
2562 base2, &offset2)
2563 && offset.is_constant (&offseti)
2564 && offset2.is_constant (&offset2i)
2565 && size2.is_constant (&size2i))
2567 if (data->partial_defs.is_empty ()
2568 && known_subrange_p (offseti, maxsizei, offset2, size2))
2570 /* We support up to 512-bit values (for V8DFmode). */
2571 unsigned char buffer[64];
2572 int len;
2574 tree rhs = gimple_assign_rhs1 (def_stmt);
2575 if (TREE_CODE (rhs) == SSA_NAME)
2576 rhs = SSA_VAL (rhs);
2577 unsigned pad = 0;
2578 if (BYTES_BIG_ENDIAN
2579 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2581 /* On big-endian the padding is at the 'front' so
2582 just skip the initial bytes. */
2583 fixed_size_mode mode
2584 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2585 pad = GET_MODE_SIZE (mode) - size2i / BITS_PER_UNIT;
2587 len = native_encode_expr (rhs,
2588 buffer, sizeof (buffer),
2589 ((offseti - offset2i) / BITS_PER_UNIT
2590 + pad));
2591 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2593 tree type = vr->type;
2594 /* Make sure to interpret in a type that has a range
2595 covering the whole access size. */
2596 if (INTEGRAL_TYPE_P (vr->type)
2597 && maxsizei != TYPE_PRECISION (vr->type))
2598 type = build_nonstandard_integer_type (maxsizei,
2599 TYPE_UNSIGNED (type));
2600 tree val = native_interpret_expr (type, buffer,
2601 maxsizei / BITS_PER_UNIT);
2602 /* If we chop off bits because the types precision doesn't
2603 match the memory access size this is ok when optimizing
2604 reads but not when called from the DSE code during
2605 elimination. */
2606 if (val
2607 && type != vr->type)
2609 if (! int_fits_type_p (val, vr->type))
2610 val = NULL_TREE;
2611 else
2612 val = fold_convert (vr->type, val);
2615 if (val)
2616 return vn_reference_lookup_or_insert_for_pieces
2617 (vuse, vr->set, vr->type, vr->operands, val);
2620 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
2622 pd_data pd;
2623 tree rhs = gimple_assign_rhs1 (def_stmt);
2624 if (TREE_CODE (rhs) == SSA_NAME)
2625 rhs = SSA_VAL (rhs);
2626 pd.rhs = rhs;
2627 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2628 pd.size = size2i / BITS_PER_UNIT;
2629 return data->push_partial_def (pd, vuse, maxsizei);
2634 /* 4) Assignment from an SSA name which definition we may be able
2635 to access pieces from. */
2636 else if (known_eq (ref->size, maxsize)
2637 && is_gimple_reg_type (vr->type)
2638 && !contains_storage_order_barrier_p (vr->operands)
2639 && gimple_assign_single_p (def_stmt)
2640 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2641 /* A subset of partial defs from non-constants can be handled
2642 by for example inserting a CONSTRUCTOR, a COMPLEX_EXPR or
2643 even a (series of) BIT_INSERT_EXPR hoping for simplifications
2644 downstream, not so much for actually doing the insertion. */
2645 && data->partial_defs.is_empty ())
2647 tree lhs = gimple_assign_lhs (def_stmt);
2648 tree base2;
2649 poly_int64 offset2, size2, maxsize2;
2650 bool reverse;
2651 if (lhs_ref_ok)
2653 base2 = ao_ref_base (&lhs_ref);
2654 offset2 = lhs_ref.offset;
2655 size2 = lhs_ref.size;
2656 maxsize2 = lhs_ref.max_size;
2657 reverse = reverse_storage_order_for_component_p (lhs);
2659 else
2660 base2 = get_ref_base_and_extent (lhs,
2661 &offset2, &size2, &maxsize2, &reverse);
2662 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2663 if (!reverse
2664 && known_size_p (maxsize2)
2665 && known_eq (maxsize2, size2)
2666 && adjust_offsets_for_equal_base_address (base, &offset,
2667 base2, &offset2)
2668 && known_subrange_p (offset, maxsize, offset2, size2)
2669 /* ??? We can't handle bitfield precision extracts without
2670 either using an alternate type for the BIT_FIELD_REF and
2671 then doing a conversion or possibly adjusting the offset
2672 according to endianness. */
2673 && (! INTEGRAL_TYPE_P (vr->type)
2674 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2675 && multiple_p (ref->size, BITS_PER_UNIT)
2676 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2677 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2679 gimple_match_op op (gimple_match_cond::UNCOND,
2680 BIT_FIELD_REF, vr->type,
2681 vn_valueize (def_rhs),
2682 bitsize_int (ref->size),
2683 bitsize_int (offset - offset2));
2684 tree val = vn_nary_build_or_lookup (&op);
2685 if (val
2686 && (TREE_CODE (val) != SSA_NAME
2687 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2689 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2690 (vuse, vr->set, vr->type, vr->operands, val);
2691 return res;
2696 /* 5) For aggregate copies translate the reference through them if
2697 the copy kills ref. */
2698 else if (data->vn_walk_kind == VN_WALKREWRITE
2699 && gimple_assign_single_p (def_stmt)
2700 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2701 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2702 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2704 tree base2;
2705 int i, j, k;
2706 auto_vec<vn_reference_op_s> rhs;
2707 vn_reference_op_t vro;
2708 ao_ref r;
2710 if (!lhs_ref_ok)
2711 return (void *)-1;
2713 /* See if the assignment kills REF. */
2714 base2 = ao_ref_base (&lhs_ref);
2715 if (!lhs_ref.max_size_known_p ()
2716 || (base != base2
2717 && (TREE_CODE (base) != MEM_REF
2718 || TREE_CODE (base2) != MEM_REF
2719 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2720 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2721 TREE_OPERAND (base2, 1))))
2722 || !stmt_kills_ref_p (def_stmt, ref))
2723 return (void *)-1;
2725 /* Find the common base of ref and the lhs. lhs_ops already
2726 contains valueized operands for the lhs. */
2727 i = vr->operands.length () - 1;
2728 j = lhs_ops.length () - 1;
2729 while (j >= 0 && i >= 0
2730 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2732 i--;
2733 j--;
2736 /* ??? The innermost op should always be a MEM_REF and we already
2737 checked that the assignment to the lhs kills vr. Thus for
2738 aggregate copies using char[] types the vn_reference_op_eq
2739 may fail when comparing types for compatibility. But we really
2740 don't care here - further lookups with the rewritten operands
2741 will simply fail if we messed up types too badly. */
2742 poly_int64 extra_off = 0;
2743 if (j == 0 && i >= 0
2744 && lhs_ops[0].opcode == MEM_REF
2745 && maybe_ne (lhs_ops[0].off, -1))
2747 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2748 i--, j--;
2749 else if (vr->operands[i].opcode == MEM_REF
2750 && maybe_ne (vr->operands[i].off, -1))
2752 extra_off = vr->operands[i].off - lhs_ops[0].off;
2753 i--, j--;
2757 /* i now points to the first additional op.
2758 ??? LHS may not be completely contained in VR, one or more
2759 VIEW_CONVERT_EXPRs could be in its way. We could at least
2760 try handling outermost VIEW_CONVERT_EXPRs. */
2761 if (j != -1)
2762 return (void *)-1;
2764 /* Punt if the additional ops contain a storage order barrier. */
2765 for (k = i; k >= 0; k--)
2767 vro = &vr->operands[k];
2768 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2769 return (void *)-1;
2772 /* Now re-write REF to be based on the rhs of the assignment. */
2773 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2775 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2776 if (maybe_ne (extra_off, 0))
2778 if (rhs.length () < 2)
2779 return (void *)-1;
2780 int ix = rhs.length () - 2;
2781 if (rhs[ix].opcode != MEM_REF
2782 || known_eq (rhs[ix].off, -1))
2783 return (void *)-1;
2784 rhs[ix].off += extra_off;
2785 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2786 build_int_cst (TREE_TYPE (rhs[ix].op0),
2787 extra_off));
2790 /* We need to pre-pend vr->operands[0..i] to rhs. */
2791 vec<vn_reference_op_s> old = vr->operands;
2792 if (i + 1 + rhs.length () > vr->operands.length ())
2793 vr->operands.safe_grow (i + 1 + rhs.length ());
2794 else
2795 vr->operands.truncate (i + 1 + rhs.length ());
2796 FOR_EACH_VEC_ELT (rhs, j, vro)
2797 vr->operands[i + 1 + j] = *vro;
2798 vr->operands = valueize_refs (vr->operands);
2799 if (old == shared_lookup_references)
2800 shared_lookup_references = vr->operands;
2801 vr->hashcode = vn_reference_compute_hash (vr);
2803 /* Try folding the new reference to a constant. */
2804 tree val = fully_constant_vn_reference_p (vr);
2805 if (val)
2807 if (data->partial_defs.is_empty ())
2808 return vn_reference_lookup_or_insert_for_pieces
2809 (vuse, vr->set, vr->type, vr->operands, val);
2810 /* This is the only interesting case for partial-def handling
2811 coming from targets that like to gimplify init-ctors as
2812 aggregate copies from constant data like aarch64 for
2813 PR83518. */
2814 if (maxsize.is_constant (&maxsizei)
2815 && known_eq (ref->size, maxsize))
2817 pd_data pd;
2818 pd.rhs = val;
2819 pd.offset = 0;
2820 pd.size = maxsizei / BITS_PER_UNIT;
2821 return data->push_partial_def (pd, vuse, maxsizei);
2825 /* Continuing with partial defs isn't easily possible here, we
2826 have to find a full def from further lookups from here. Probably
2827 not worth the special-casing everywhere. */
2828 if (!data->partial_defs.is_empty ())
2829 return (void *)-1;
2831 /* Adjust *ref from the new operands. */
2832 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2833 return (void *)-1;
2834 /* This can happen with bitfields. */
2835 if (maybe_ne (ref->size, r.size))
2836 return (void *)-1;
2837 *ref = r;
2839 /* Do not update last seen VUSE after translating. */
2840 data->last_vuse_ptr = NULL;
2841 /* Invalidate the original access path since it now contains
2842 the wrong base. */
2843 data->orig_ref.ref = NULL_TREE;
2845 /* Keep looking for the adjusted *REF / VR pair. */
2846 return NULL;
2849 /* 6) For memcpy copies translate the reference through them if
2850 the copy kills ref. */
2851 else if (data->vn_walk_kind == VN_WALKREWRITE
2852 && is_gimple_reg_type (vr->type)
2853 /* ??? Handle BCOPY as well. */
2854 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2855 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2856 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2857 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2858 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2859 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2860 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2861 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
2862 /* Handling this is more complicated, give up for now. */
2863 && data->partial_defs.is_empty ())
2865 tree lhs, rhs;
2866 ao_ref r;
2867 poly_int64 rhs_offset, lhs_offset;
2868 vn_reference_op_s op;
2869 poly_uint64 mem_offset;
2870 poly_int64 at, byte_maxsize;
2872 /* Only handle non-variable, addressable refs. */
2873 if (maybe_ne (ref->size, maxsize)
2874 || !multiple_p (offset, BITS_PER_UNIT, &at)
2875 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2876 return (void *)-1;
2878 /* Extract a pointer base and an offset for the destination. */
2879 lhs = gimple_call_arg (def_stmt, 0);
2880 lhs_offset = 0;
2881 if (TREE_CODE (lhs) == SSA_NAME)
2883 lhs = vn_valueize (lhs);
2884 if (TREE_CODE (lhs) == SSA_NAME)
2886 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2887 if (gimple_assign_single_p (def_stmt)
2888 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2889 lhs = gimple_assign_rhs1 (def_stmt);
2892 if (TREE_CODE (lhs) == ADDR_EXPR)
2894 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2895 &lhs_offset);
2896 if (!tem)
2897 return (void *)-1;
2898 if (TREE_CODE (tem) == MEM_REF
2899 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2901 lhs = TREE_OPERAND (tem, 0);
2902 if (TREE_CODE (lhs) == SSA_NAME)
2903 lhs = vn_valueize (lhs);
2904 lhs_offset += mem_offset;
2906 else if (DECL_P (tem))
2907 lhs = build_fold_addr_expr (tem);
2908 else
2909 return (void *)-1;
2911 if (TREE_CODE (lhs) != SSA_NAME
2912 && TREE_CODE (lhs) != ADDR_EXPR)
2913 return (void *)-1;
2915 /* Extract a pointer base and an offset for the source. */
2916 rhs = gimple_call_arg (def_stmt, 1);
2917 rhs_offset = 0;
2918 if (TREE_CODE (rhs) == SSA_NAME)
2919 rhs = vn_valueize (rhs);
2920 if (TREE_CODE (rhs) == ADDR_EXPR)
2922 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2923 &rhs_offset);
2924 if (!tem)
2925 return (void *)-1;
2926 if (TREE_CODE (tem) == MEM_REF
2927 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2929 rhs = TREE_OPERAND (tem, 0);
2930 rhs_offset += mem_offset;
2932 else if (DECL_P (tem)
2933 || TREE_CODE (tem) == STRING_CST)
2934 rhs = build_fold_addr_expr (tem);
2935 else
2936 return (void *)-1;
2938 if (TREE_CODE (rhs) != SSA_NAME
2939 && TREE_CODE (rhs) != ADDR_EXPR)
2940 return (void *)-1;
2942 /* The bases of the destination and the references have to agree. */
2943 if (TREE_CODE (base) == MEM_REF)
2945 if (TREE_OPERAND (base, 0) != lhs
2946 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2947 return (void *) -1;
2948 at += mem_offset;
2950 else if (!DECL_P (base)
2951 || TREE_CODE (lhs) != ADDR_EXPR
2952 || TREE_OPERAND (lhs, 0) != base)
2953 return (void *)-1;
2955 /* If the access is completely outside of the memcpy destination
2956 area there is no aliasing. */
2957 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2958 return NULL;
2959 /* And the access has to be contained within the memcpy destination. */
2960 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2961 return (void *)-1;
2963 /* Make room for 2 operands in the new reference. */
2964 if (vr->operands.length () < 2)
2966 vec<vn_reference_op_s> old = vr->operands;
2967 vr->operands.safe_grow_cleared (2);
2968 if (old == shared_lookup_references)
2969 shared_lookup_references = vr->operands;
2971 else
2972 vr->operands.truncate (2);
2974 /* The looked-through reference is a simple MEM_REF. */
2975 memset (&op, 0, sizeof (op));
2976 op.type = vr->type;
2977 op.opcode = MEM_REF;
2978 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2979 op.off = at - lhs_offset + rhs_offset;
2980 vr->operands[0] = op;
2981 op.type = TREE_TYPE (rhs);
2982 op.opcode = TREE_CODE (rhs);
2983 op.op0 = rhs;
2984 op.off = -1;
2985 vr->operands[1] = op;
2986 vr->hashcode = vn_reference_compute_hash (vr);
2988 /* Try folding the new reference to a constant. */
2989 tree val = fully_constant_vn_reference_p (vr);
2990 if (val)
2991 return vn_reference_lookup_or_insert_for_pieces
2992 (vuse, vr->set, vr->type, vr->operands, val);
2994 /* Adjust *ref from the new operands. */
2995 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2996 return (void *)-1;
2997 /* This can happen with bitfields. */
2998 if (maybe_ne (ref->size, r.size))
2999 return (void *)-1;
3000 *ref = r;
3002 /* Do not update last seen VUSE after translating. */
3003 data->last_vuse_ptr = NULL;
3004 /* Invalidate the original access path since it now contains
3005 the wrong base. */
3006 data->orig_ref.ref = NULL_TREE;
3008 /* Keep looking for the adjusted *REF / VR pair. */
3009 return NULL;
3012 /* Bail out and stop walking. */
3013 return (void *)-1;
3016 /* Return a reference op vector from OP that can be used for
3017 vn_reference_lookup_pieces. The caller is responsible for releasing
3018 the vector. */
3020 vec<vn_reference_op_s>
3021 vn_reference_operands_for_lookup (tree op)
3023 bool valueized;
3024 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3027 /* Lookup a reference operation by it's parts, in the current hash table.
3028 Returns the resulting value number if it exists in the hash table,
3029 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3030 vn_reference_t stored in the hashtable if something is found. */
3032 tree
3033 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
3034 vec<vn_reference_op_s> operands,
3035 vn_reference_t *vnresult, vn_lookup_kind kind)
3037 struct vn_reference_s vr1;
3038 vn_reference_t tmp;
3039 tree cst;
3041 if (!vnresult)
3042 vnresult = &tmp;
3043 *vnresult = NULL;
3045 vr1.vuse = vuse_ssa_val (vuse);
3046 shared_lookup_references.truncate (0);
3047 shared_lookup_references.safe_grow (operands.length ());
3048 memcpy (shared_lookup_references.address (),
3049 operands.address (),
3050 sizeof (vn_reference_op_s)
3051 * operands.length ());
3052 vr1.operands = operands = shared_lookup_references
3053 = valueize_refs (shared_lookup_references);
3054 vr1.type = type;
3055 vr1.set = set;
3056 vr1.hashcode = vn_reference_compute_hash (&vr1);
3057 if ((cst = fully_constant_vn_reference_p (&vr1)))
3058 return cst;
3060 vn_reference_lookup_1 (&vr1, vnresult);
3061 if (!*vnresult
3062 && kind != VN_NOWALK
3063 && vr1.vuse)
3065 ao_ref r;
3066 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3067 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
3068 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
3069 *vnresult =
3070 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
3071 vn_reference_lookup_2,
3072 vn_reference_lookup_3,
3073 vuse_valueize, limit, &data);
3074 gcc_checking_assert (vr1.operands == shared_lookup_references);
3077 if (*vnresult)
3078 return (*vnresult)->result;
3080 return NULL_TREE;
3083 /* Lookup OP in the current hash table, and return the resulting value
3084 number if it exists in the hash table. Return NULL_TREE if it does
3085 not exist in the hash table or if the result field of the structure
3086 was NULL.. VNRESULT will be filled in with the vn_reference_t
3087 stored in the hashtable if one exists. When TBAA_P is false assume
3088 we are looking up a store and treat it as having alias-set zero.
3089 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3091 tree
3092 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3093 vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
3095 vec<vn_reference_op_s> operands;
3096 struct vn_reference_s vr1;
3097 tree cst;
3098 bool valuezied_anything;
3100 if (vnresult)
3101 *vnresult = NULL;
3103 vr1.vuse = vuse_ssa_val (vuse);
3104 vr1.operands = operands
3105 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3106 vr1.type = TREE_TYPE (op);
3107 vr1.set = get_alias_set (op);
3108 vr1.hashcode = vn_reference_compute_hash (&vr1);
3109 if ((cst = fully_constant_vn_reference_p (&vr1)))
3110 return cst;
3112 if (kind != VN_NOWALK
3113 && vr1.vuse)
3115 vn_reference_t wvnresult;
3116 ao_ref r;
3117 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3118 /* Make sure to use a valueized reference if we valueized anything.
3119 Otherwise preserve the full reference for advanced TBAA. */
3120 if (!valuezied_anything
3121 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
3122 vr1.operands))
3123 ao_ref_init (&r, op);
3124 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3125 last_vuse_ptr, kind, tbaa_p);
3126 wvnresult =
3127 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
3128 vn_reference_lookup_2,
3129 vn_reference_lookup_3,
3130 vuse_valueize, limit, &data);
3131 gcc_checking_assert (vr1.operands == shared_lookup_references);
3132 if (wvnresult)
3134 if (vnresult)
3135 *vnresult = wvnresult;
3136 return wvnresult->result;
3139 return NULL_TREE;
3142 return vn_reference_lookup_1 (&vr1, vnresult);
3145 /* Lookup CALL in the current hash table and return the entry in
3146 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3148 void
3149 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3150 vn_reference_t vr)
3152 if (vnresult)
3153 *vnresult = NULL;
3155 tree vuse = gimple_vuse (call);
3157 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3158 vr->operands = valueize_shared_reference_ops_from_call (call);
3159 vr->type = gimple_expr_type (call);
3160 vr->set = 0;
3161 vr->hashcode = vn_reference_compute_hash (vr);
3162 vn_reference_lookup_1 (vr, vnresult);
3165 /* Insert OP into the current hash table with a value number of RESULT. */
3167 static void
3168 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3170 vn_reference_s **slot;
3171 vn_reference_t vr1;
3172 bool tem;
3174 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3175 if (TREE_CODE (result) == SSA_NAME)
3176 vr1->value_id = VN_INFO (result)->value_id;
3177 else
3178 vr1->value_id = get_or_alloc_constant_value_id (result);
3179 vr1->vuse = vuse_ssa_val (vuse);
3180 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3181 vr1->type = TREE_TYPE (op);
3182 vr1->set = get_alias_set (op);
3183 vr1->hashcode = vn_reference_compute_hash (vr1);
3184 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3185 vr1->result_vdef = vdef;
3187 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3188 INSERT);
3190 /* Because IL walking on reference lookup can end up visiting
3191 a def that is only to be visited later in iteration order
3192 when we are about to make an irreducible region reducible
3193 the def can be effectively processed and its ref being inserted
3194 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3195 but save a lookup if we deal with already inserted refs here. */
3196 if (*slot)
3198 /* We cannot assert that we have the same value either because
3199 when disentangling an irreducible region we may end up visiting
3200 a use before the corresponding def. That's a missed optimization
3201 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3202 if (dump_file && (dump_flags & TDF_DETAILS)
3203 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3205 fprintf (dump_file, "Keeping old value ");
3206 print_generic_expr (dump_file, (*slot)->result);
3207 fprintf (dump_file, " because of collision\n");
3209 free_reference (vr1);
3210 obstack_free (&vn_tables_obstack, vr1);
3211 return;
3214 *slot = vr1;
3215 vr1->next = last_inserted_ref;
3216 last_inserted_ref = vr1;
3219 /* Insert a reference by it's pieces into the current hash table with
3220 a value number of RESULT. Return the resulting reference
3221 structure we created. */
3223 vn_reference_t
3224 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
3225 vec<vn_reference_op_s> operands,
3226 tree result, unsigned int value_id)
3229 vn_reference_s **slot;
3230 vn_reference_t vr1;
3232 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3233 vr1->value_id = value_id;
3234 vr1->vuse = vuse_ssa_val (vuse);
3235 vr1->operands = valueize_refs (operands);
3236 vr1->type = type;
3237 vr1->set = set;
3238 vr1->hashcode = vn_reference_compute_hash (vr1);
3239 if (result && TREE_CODE (result) == SSA_NAME)
3240 result = SSA_VAL (result);
3241 vr1->result = result;
3243 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3244 INSERT);
3246 /* At this point we should have all the things inserted that we have
3247 seen before, and we should never try inserting something that
3248 already exists. */
3249 gcc_assert (!*slot);
3251 *slot = vr1;
3252 vr1->next = last_inserted_ref;
3253 last_inserted_ref = vr1;
3254 return vr1;
3257 /* Compute and return the hash value for nary operation VBO1. */
3259 static hashval_t
3260 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3262 inchash::hash hstate;
3263 unsigned i;
3265 for (i = 0; i < vno1->length; ++i)
3266 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3267 vno1->op[i] = SSA_VAL (vno1->op[i]);
3269 if (((vno1->length == 2
3270 && commutative_tree_code (vno1->opcode))
3271 || (vno1->length == 3
3272 && commutative_ternary_tree_code (vno1->opcode)))
3273 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3274 std::swap (vno1->op[0], vno1->op[1]);
3275 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3276 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3278 std::swap (vno1->op[0], vno1->op[1]);
3279 vno1->opcode = swap_tree_comparison (vno1->opcode);
3282 hstate.add_int (vno1->opcode);
3283 for (i = 0; i < vno1->length; ++i)
3284 inchash::add_expr (vno1->op[i], hstate);
3286 return hstate.end ();
3289 /* Compare nary operations VNO1 and VNO2 and return true if they are
3290 equivalent. */
3292 bool
3293 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3295 unsigned i;
3297 if (vno1->hashcode != vno2->hashcode)
3298 return false;
3300 if (vno1->length != vno2->length)
3301 return false;
3303 if (vno1->opcode != vno2->opcode
3304 || !types_compatible_p (vno1->type, vno2->type))
3305 return false;
3307 for (i = 0; i < vno1->length; ++i)
3308 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3309 return false;
3311 /* BIT_INSERT_EXPR has an implict operand as the type precision
3312 of op1. Need to check to make sure they are the same. */
3313 if (vno1->opcode == BIT_INSERT_EXPR
3314 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3315 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3316 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3317 return false;
3319 return true;
3322 /* Initialize VNO from the pieces provided. */
3324 static void
3325 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3326 enum tree_code code, tree type, tree *ops)
3328 vno->opcode = code;
3329 vno->length = length;
3330 vno->type = type;
3331 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3334 /* Return the number of operands for a vn_nary ops structure from STMT. */
3336 static unsigned int
3337 vn_nary_length_from_stmt (gimple *stmt)
3339 switch (gimple_assign_rhs_code (stmt))
3341 case REALPART_EXPR:
3342 case IMAGPART_EXPR:
3343 case VIEW_CONVERT_EXPR:
3344 return 1;
3346 case BIT_FIELD_REF:
3347 return 3;
3349 case CONSTRUCTOR:
3350 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3352 default:
3353 return gimple_num_ops (stmt) - 1;
3357 /* Initialize VNO from STMT. */
3359 static void
3360 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3362 unsigned i;
3364 vno->opcode = gimple_assign_rhs_code (stmt);
3365 vno->type = gimple_expr_type (stmt);
3366 switch (vno->opcode)
3368 case REALPART_EXPR:
3369 case IMAGPART_EXPR:
3370 case VIEW_CONVERT_EXPR:
3371 vno->length = 1;
3372 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3373 break;
3375 case BIT_FIELD_REF:
3376 vno->length = 3;
3377 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3378 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3379 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3380 break;
3382 case CONSTRUCTOR:
3383 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3384 for (i = 0; i < vno->length; ++i)
3385 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3386 break;
3388 default:
3389 gcc_checking_assert (!gimple_assign_single_p (stmt));
3390 vno->length = gimple_num_ops (stmt) - 1;
3391 for (i = 0; i < vno->length; ++i)
3392 vno->op[i] = gimple_op (stmt, i + 1);
3396 /* Compute the hashcode for VNO and look for it in the hash table;
3397 return the resulting value number if it exists in the hash table.
3398 Return NULL_TREE if it does not exist in the hash table or if the
3399 result field of the operation is NULL. VNRESULT will contain the
3400 vn_nary_op_t from the hashtable if it exists. */
3402 static tree
3403 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3405 vn_nary_op_s **slot;
3407 if (vnresult)
3408 *vnresult = NULL;
3410 vno->hashcode = vn_nary_op_compute_hash (vno);
3411 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3412 if (!slot)
3413 return NULL_TREE;
3414 if (vnresult)
3415 *vnresult = *slot;
3416 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3419 /* Lookup a n-ary operation by its pieces and return the resulting value
3420 number if it exists in the hash table. Return NULL_TREE if it does
3421 not exist in the hash table or if the result field of the operation
3422 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3423 if it exists. */
3425 tree
3426 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3427 tree type, tree *ops, vn_nary_op_t *vnresult)
3429 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3430 sizeof_vn_nary_op (length));
3431 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3432 return vn_nary_op_lookup_1 (vno1, vnresult);
3435 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3436 value number if it exists in the hash table. Return NULL_TREE if
3437 it does not exist in the hash table. VNRESULT will contain the
3438 vn_nary_op_t from the hashtable if it exists. */
3440 tree
3441 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3443 vn_nary_op_t vno1
3444 = XALLOCAVAR (struct vn_nary_op_s,
3445 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3446 init_vn_nary_op_from_stmt (vno1, stmt);
3447 return vn_nary_op_lookup_1 (vno1, vnresult);
3450 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3452 static vn_nary_op_t
3453 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3455 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3458 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3459 obstack. */
3461 static vn_nary_op_t
3462 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3464 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3466 vno1->value_id = value_id;
3467 vno1->length = length;
3468 vno1->predicated_values = 0;
3469 vno1->u.result = result;
3471 return vno1;
3474 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3475 VNO->HASHCODE first. */
3477 static vn_nary_op_t
3478 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3479 bool compute_hash)
3481 vn_nary_op_s **slot;
3483 if (compute_hash)
3485 vno->hashcode = vn_nary_op_compute_hash (vno);
3486 gcc_assert (! vno->predicated_values
3487 || (! vno->u.values->next
3488 && vno->u.values->n == 1));
3491 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3492 vno->unwind_to = *slot;
3493 if (*slot)
3495 /* Prefer non-predicated values.
3496 ??? Only if those are constant, otherwise, with constant predicated
3497 value, turn them into predicated values with entry-block validity
3498 (??? but we always find the first valid result currently). */
3499 if ((*slot)->predicated_values
3500 && ! vno->predicated_values)
3502 /* ??? We cannot remove *slot from the unwind stack list.
3503 For the moment we deal with this by skipping not found
3504 entries but this isn't ideal ... */
3505 *slot = vno;
3506 /* ??? Maintain a stack of states we can unwind in
3507 vn_nary_op_s? But how far do we unwind? In reality
3508 we need to push change records somewhere... Or not
3509 unwind vn_nary_op_s and linking them but instead
3510 unwind the results "list", linking that, which also
3511 doesn't move on hashtable resize. */
3512 /* We can also have a ->unwind_to recording *slot there.
3513 That way we can make u.values a fixed size array with
3514 recording the number of entries but of course we then
3515 have always N copies for each unwind_to-state. Or we
3516 make sure to only ever append and each unwinding will
3517 pop off one entry (but how to deal with predicated
3518 replaced with non-predicated here?) */
3519 vno->next = last_inserted_nary;
3520 last_inserted_nary = vno;
3521 return vno;
3523 else if (vno->predicated_values
3524 && ! (*slot)->predicated_values)
3525 return *slot;
3526 else if (vno->predicated_values
3527 && (*slot)->predicated_values)
3529 /* ??? Factor this all into a insert_single_predicated_value
3530 routine. */
3531 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3532 basic_block vno_bb
3533 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3534 vn_pval *nval = vno->u.values;
3535 vn_pval **next = &vno->u.values;
3536 bool found = false;
3537 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3539 if (expressions_equal_p (val->result, vno->u.values->result))
3541 found = true;
3542 for (unsigned i = 0; i < val->n; ++i)
3544 basic_block val_bb
3545 = BASIC_BLOCK_FOR_FN (cfun,
3546 val->valid_dominated_by_p[i]);
3547 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3548 /* Value registered with more generic predicate. */
3549 return *slot;
3550 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3551 /* Shouldn't happen, we insert in RPO order. */
3552 gcc_unreachable ();
3554 /* Append value. */
3555 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3556 sizeof (vn_pval)
3557 + val->n * sizeof (int));
3558 (*next)->next = NULL;
3559 (*next)->result = val->result;
3560 (*next)->n = val->n + 1;
3561 memcpy ((*next)->valid_dominated_by_p,
3562 val->valid_dominated_by_p,
3563 val->n * sizeof (int));
3564 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3565 next = &(*next)->next;
3566 if (dump_file && (dump_flags & TDF_DETAILS))
3567 fprintf (dump_file, "Appending predicate to value.\n");
3568 continue;
3570 /* Copy other predicated values. */
3571 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3572 sizeof (vn_pval)
3573 + (val->n-1) * sizeof (int));
3574 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3575 (*next)->next = NULL;
3576 next = &(*next)->next;
3578 if (!found)
3579 *next = nval;
3581 *slot = vno;
3582 vno->next = last_inserted_nary;
3583 last_inserted_nary = vno;
3584 return vno;
3587 /* While we do not want to insert things twice it's awkward to
3588 avoid it in the case where visit_nary_op pattern-matches stuff
3589 and ends up simplifying the replacement to itself. We then
3590 get two inserts, one from visit_nary_op and one from
3591 vn_nary_build_or_lookup.
3592 So allow inserts with the same value number. */
3593 if ((*slot)->u.result == vno->u.result)
3594 return *slot;
3597 /* ??? There's also optimistic vs. previous commited state merging
3598 that is problematic for the case of unwinding. */
3600 /* ??? We should return NULL if we do not use 'vno' and have the
3601 caller release it. */
3602 gcc_assert (!*slot);
3604 *slot = vno;
3605 vno->next = last_inserted_nary;
3606 last_inserted_nary = vno;
3607 return vno;
3610 /* Insert a n-ary operation into the current hash table using it's
3611 pieces. Return the vn_nary_op_t structure we created and put in
3612 the hashtable. */
3614 vn_nary_op_t
3615 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3616 tree type, tree *ops,
3617 tree result, unsigned int value_id)
3619 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3620 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3621 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3624 static vn_nary_op_t
3625 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3626 tree type, tree *ops,
3627 tree result, unsigned int value_id,
3628 edge pred_e)
3630 /* ??? Currently tracking BBs. */
3631 if (! single_pred_p (pred_e->dest))
3633 /* Never record for backedges. */
3634 if (pred_e->flags & EDGE_DFS_BACK)
3635 return NULL;
3636 edge_iterator ei;
3637 edge e;
3638 int cnt = 0;
3639 /* Ignore backedges. */
3640 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3641 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3642 cnt++;
3643 if (cnt != 1)
3644 return NULL;
3646 if (dump_file && (dump_flags & TDF_DETAILS)
3647 /* ??? Fix dumping, but currently we only get comparisons. */
3648 && TREE_CODE_CLASS (code) == tcc_comparison)
3650 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3651 pred_e->dest->index);
3652 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3653 fprintf (dump_file, " %s ", get_tree_code_name (code));
3654 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3655 fprintf (dump_file, " == %s\n",
3656 integer_zerop (result) ? "false" : "true");
3658 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3659 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3660 vno1->predicated_values = 1;
3661 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3662 sizeof (vn_pval));
3663 vno1->u.values->next = NULL;
3664 vno1->u.values->result = result;
3665 vno1->u.values->n = 1;
3666 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3667 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3670 static bool
3671 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3673 static tree
3674 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3676 if (! vno->predicated_values)
3677 return vno->u.result;
3678 for (vn_pval *val = vno->u.values; val; val = val->next)
3679 for (unsigned i = 0; i < val->n; ++i)
3680 if (dominated_by_p_w_unex (bb,
3681 BASIC_BLOCK_FOR_FN
3682 (cfun, val->valid_dominated_by_p[i])))
3683 return val->result;
3684 return NULL_TREE;
3687 /* Insert the rhs of STMT into the current hash table with a value number of
3688 RESULT. */
3690 static vn_nary_op_t
3691 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3693 vn_nary_op_t vno1
3694 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3695 result, VN_INFO (result)->value_id);
3696 init_vn_nary_op_from_stmt (vno1, stmt);
3697 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3700 /* Compute a hashcode for PHI operation VP1 and return it. */
3702 static inline hashval_t
3703 vn_phi_compute_hash (vn_phi_t vp1)
3705 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3706 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3707 tree phi1op;
3708 tree type;
3709 edge e;
3710 edge_iterator ei;
3712 /* If all PHI arguments are constants we need to distinguish
3713 the PHI node via its type. */
3714 type = vp1->type;
3715 hstate.merge_hash (vn_hash_type (type));
3717 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3719 /* Don't hash backedge values they need to be handled as VN_TOP
3720 for optimistic value-numbering. */
3721 if (e->flags & EDGE_DFS_BACK)
3722 continue;
3724 phi1op = vp1->phiargs[e->dest_idx];
3725 if (phi1op == VN_TOP)
3726 continue;
3727 inchash::add_expr (phi1op, hstate);
3730 return hstate.end ();
3734 /* Return true if COND1 and COND2 represent the same condition, set
3735 *INVERTED_P if one needs to be inverted to make it the same as
3736 the other. */
3738 static bool
3739 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3740 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3742 enum tree_code code1 = gimple_cond_code (cond1);
3743 enum tree_code code2 = gimple_cond_code (cond2);
3745 *inverted_p = false;
3746 if (code1 == code2)
3748 else if (code1 == swap_tree_comparison (code2))
3749 std::swap (lhs2, rhs2);
3750 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3751 *inverted_p = true;
3752 else if (code1 == invert_tree_comparison
3753 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3755 std::swap (lhs2, rhs2);
3756 *inverted_p = true;
3758 else
3759 return false;
3761 return ((expressions_equal_p (lhs1, lhs2)
3762 && expressions_equal_p (rhs1, rhs2))
3763 || (commutative_tree_code (code1)
3764 && expressions_equal_p (lhs1, rhs2)
3765 && expressions_equal_p (rhs1, lhs2)));
3768 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3770 static int
3771 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3773 if (vp1->hashcode != vp2->hashcode)
3774 return false;
3776 if (vp1->block != vp2->block)
3778 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3779 return false;
3781 switch (EDGE_COUNT (vp1->block->preds))
3783 case 1:
3784 /* Single-arg PHIs are just copies. */
3785 break;
3787 case 2:
3789 /* Rule out backedges into the PHI. */
3790 if (vp1->block->loop_father->header == vp1->block
3791 || vp2->block->loop_father->header == vp2->block)
3792 return false;
3794 /* If the PHI nodes do not have compatible types
3795 they are not the same. */
3796 if (!types_compatible_p (vp1->type, vp2->type))
3797 return false;
3799 basic_block idom1
3800 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3801 basic_block idom2
3802 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3803 /* If the immediate dominator end in switch stmts multiple
3804 values may end up in the same PHI arg via intermediate
3805 CFG merges. */
3806 if (EDGE_COUNT (idom1->succs) != 2
3807 || EDGE_COUNT (idom2->succs) != 2)
3808 return false;
3810 /* Verify the controlling stmt is the same. */
3811 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3812 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3813 if (! last1 || ! last2)
3814 return false;
3815 bool inverted_p;
3816 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3817 last2, vp2->cclhs, vp2->ccrhs,
3818 &inverted_p))
3819 return false;
3821 /* Get at true/false controlled edges into the PHI. */
3822 edge te1, te2, fe1, fe2;
3823 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3824 &te1, &fe1)
3825 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3826 &te2, &fe2))
3827 return false;
3829 /* Swap edges if the second condition is the inverted of the
3830 first. */
3831 if (inverted_p)
3832 std::swap (te2, fe2);
3834 /* ??? Handle VN_TOP specially. */
3835 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3836 vp2->phiargs[te2->dest_idx])
3837 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3838 vp2->phiargs[fe2->dest_idx]))
3839 return false;
3841 return true;
3844 default:
3845 return false;
3849 /* If the PHI nodes do not have compatible types
3850 they are not the same. */
3851 if (!types_compatible_p (vp1->type, vp2->type))
3852 return false;
3854 /* Any phi in the same block will have it's arguments in the
3855 same edge order, because of how we store phi nodes. */
3856 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3858 tree phi1op = vp1->phiargs[i];
3859 tree phi2op = vp2->phiargs[i];
3860 if (phi1op == VN_TOP || phi2op == VN_TOP)
3861 continue;
3862 if (!expressions_equal_p (phi1op, phi2op))
3863 return false;
3866 return true;
3869 /* Lookup PHI in the current hash table, and return the resulting
3870 value number if it exists in the hash table. Return NULL_TREE if
3871 it does not exist in the hash table. */
3873 static tree
3874 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3876 vn_phi_s **slot;
3877 struct vn_phi_s *vp1;
3878 edge e;
3879 edge_iterator ei;
3881 vp1 = XALLOCAVAR (struct vn_phi_s,
3882 sizeof (struct vn_phi_s)
3883 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3885 /* Canonicalize the SSA_NAME's to their value number. */
3886 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3888 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3889 if (TREE_CODE (def) == SSA_NAME
3890 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3891 def = SSA_VAL (def);
3892 vp1->phiargs[e->dest_idx] = def;
3894 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3895 vp1->block = gimple_bb (phi);
3896 /* Extract values of the controlling condition. */
3897 vp1->cclhs = NULL_TREE;
3898 vp1->ccrhs = NULL_TREE;
3899 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3900 if (EDGE_COUNT (idom1->succs) == 2)
3901 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3903 /* ??? We want to use SSA_VAL here. But possibly not
3904 allow VN_TOP. */
3905 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3906 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3908 vp1->hashcode = vn_phi_compute_hash (vp1);
3909 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3910 if (!slot)
3911 return NULL_TREE;
3912 return (*slot)->result;
3915 /* Insert PHI into the current hash table with a value number of
3916 RESULT. */
3918 static vn_phi_t
3919 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3921 vn_phi_s **slot;
3922 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3923 sizeof (vn_phi_s)
3924 + ((gimple_phi_num_args (phi) - 1)
3925 * sizeof (tree)));
3926 edge e;
3927 edge_iterator ei;
3929 /* Canonicalize the SSA_NAME's to their value number. */
3930 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3932 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3933 if (TREE_CODE (def) == SSA_NAME
3934 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3935 def = SSA_VAL (def);
3936 vp1->phiargs[e->dest_idx] = def;
3938 vp1->value_id = VN_INFO (result)->value_id;
3939 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3940 vp1->block = gimple_bb (phi);
3941 /* Extract values of the controlling condition. */
3942 vp1->cclhs = NULL_TREE;
3943 vp1->ccrhs = NULL_TREE;
3944 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3945 if (EDGE_COUNT (idom1->succs) == 2)
3946 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3948 /* ??? We want to use SSA_VAL here. But possibly not
3949 allow VN_TOP. */
3950 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3951 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3953 vp1->result = result;
3954 vp1->hashcode = vn_phi_compute_hash (vp1);
3956 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3957 gcc_assert (!*slot);
3959 *slot = vp1;
3960 vp1->next = last_inserted_phi;
3961 last_inserted_phi = vp1;
3962 return vp1;
3966 /* Return true if BB1 is dominated by BB2 taking into account edges
3967 that are not executable. */
3969 static bool
3970 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3972 edge_iterator ei;
3973 edge e;
3975 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3976 return true;
3978 /* Before iterating we'd like to know if there exists a
3979 (executable) path from bb2 to bb1 at all, if not we can
3980 directly return false. For now simply iterate once. */
3982 /* Iterate to the single executable bb1 predecessor. */
3983 if (EDGE_COUNT (bb1->preds) > 1)
3985 edge prede = NULL;
3986 FOR_EACH_EDGE (e, ei, bb1->preds)
3987 if (e->flags & EDGE_EXECUTABLE)
3989 if (prede)
3991 prede = NULL;
3992 break;
3994 prede = e;
3996 if (prede)
3998 bb1 = prede->src;
4000 /* Re-do the dominance check with changed bb1. */
4001 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4002 return true;
4006 /* Iterate to the single executable bb2 successor. */
4007 edge succe = NULL;
4008 FOR_EACH_EDGE (e, ei, bb2->succs)
4009 if (e->flags & EDGE_EXECUTABLE)
4011 if (succe)
4013 succe = NULL;
4014 break;
4016 succe = e;
4018 if (succe)
4020 /* Verify the reached block is only reached through succe.
4021 If there is only one edge we can spare us the dominator
4022 check and iterate directly. */
4023 if (EDGE_COUNT (succe->dest->preds) > 1)
4025 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4026 if (e != succe
4027 && (e->flags & EDGE_EXECUTABLE))
4029 succe = NULL;
4030 break;
4033 if (succe)
4035 bb2 = succe->dest;
4037 /* Re-do the dominance check with changed bb2. */
4038 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4039 return true;
4043 /* We could now iterate updating bb1 / bb2. */
4044 return false;
4047 /* Set the value number of FROM to TO, return true if it has changed
4048 as a result. */
4050 static inline bool
4051 set_ssa_val_to (tree from, tree to)
4053 vn_ssa_aux_t from_info = VN_INFO (from);
4054 tree currval = from_info->valnum; // SSA_VAL (from)
4055 poly_int64 toff, coff;
4057 /* The only thing we allow as value numbers are ssa_names
4058 and invariants. So assert that here. We don't allow VN_TOP
4059 as visiting a stmt should produce a value-number other than
4060 that.
4061 ??? Still VN_TOP can happen for unreachable code, so force
4062 it to varying in that case. Not all code is prepared to
4063 get VN_TOP on valueization. */
4064 if (to == VN_TOP)
4066 /* ??? When iterating and visiting PHI <undef, backedge-value>
4067 for the first time we rightfully get VN_TOP and we need to
4068 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4069 With SCCVN we were simply lucky we iterated the other PHI
4070 cycles first and thus visited the backedge-value DEF. */
4071 if (currval == VN_TOP)
4072 goto set_and_exit;
4073 if (dump_file && (dump_flags & TDF_DETAILS))
4074 fprintf (dump_file, "Forcing value number to varying on "
4075 "receiving VN_TOP\n");
4076 to = from;
4079 gcc_checking_assert (to != NULL_TREE
4080 && ((TREE_CODE (to) == SSA_NAME
4081 && (to == from || SSA_VAL (to) == to))
4082 || is_gimple_min_invariant (to)));
4084 if (from != to)
4086 if (currval == from)
4088 if (dump_file && (dump_flags & TDF_DETAILS))
4090 fprintf (dump_file, "Not changing value number of ");
4091 print_generic_expr (dump_file, from);
4092 fprintf (dump_file, " from VARYING to ");
4093 print_generic_expr (dump_file, to);
4094 fprintf (dump_file, "\n");
4096 return false;
4098 bool curr_invariant = is_gimple_min_invariant (currval);
4099 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
4100 && ssa_undefined_value_p (currval, false));
4101 if (currval != VN_TOP
4102 && !curr_invariant
4103 && !curr_undefined
4104 && is_gimple_min_invariant (to))
4106 if (dump_file && (dump_flags & TDF_DETAILS))
4108 fprintf (dump_file, "Forcing VARYING instead of changing "
4109 "value number of ");
4110 print_generic_expr (dump_file, from);
4111 fprintf (dump_file, " from ");
4112 print_generic_expr (dump_file, currval);
4113 fprintf (dump_file, " (non-constant) to ");
4114 print_generic_expr (dump_file, to);
4115 fprintf (dump_file, " (constant)\n");
4117 to = from;
4119 else if (currval != VN_TOP
4120 && !curr_undefined
4121 && TREE_CODE (to) == SSA_NAME
4122 && ssa_undefined_value_p (to, false))
4124 if (dump_file && (dump_flags & TDF_DETAILS))
4126 fprintf (dump_file, "Forcing VARYING instead of changing "
4127 "value number of ");
4128 print_generic_expr (dump_file, from);
4129 fprintf (dump_file, " from ");
4130 print_generic_expr (dump_file, currval);
4131 fprintf (dump_file, " (non-undefined) to ");
4132 print_generic_expr (dump_file, to);
4133 fprintf (dump_file, " (undefined)\n");
4135 to = from;
4137 else if (TREE_CODE (to) == SSA_NAME
4138 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4139 to = from;
4142 set_and_exit:
4143 if (dump_file && (dump_flags & TDF_DETAILS))
4145 fprintf (dump_file, "Setting value number of ");
4146 print_generic_expr (dump_file, from);
4147 fprintf (dump_file, " to ");
4148 print_generic_expr (dump_file, to);
4151 if (currval != to
4152 && !operand_equal_p (currval, to, 0)
4153 /* Different undefined SSA names are not actually different. See
4154 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4155 && !(TREE_CODE (currval) == SSA_NAME
4156 && TREE_CODE (to) == SSA_NAME
4157 && ssa_undefined_value_p (currval, false)
4158 && ssa_undefined_value_p (to, false))
4159 /* ??? For addresses involving volatile objects or types operand_equal_p
4160 does not reliably detect ADDR_EXPRs as equal. We know we are only
4161 getting invariant gimple addresses here, so can use
4162 get_addr_base_and_unit_offset to do this comparison. */
4163 && !(TREE_CODE (currval) == ADDR_EXPR
4164 && TREE_CODE (to) == ADDR_EXPR
4165 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4166 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4167 && known_eq (coff, toff)))
4169 if (dump_file && (dump_flags & TDF_DETAILS))
4170 fprintf (dump_file, " (changed)\n");
4171 from_info->valnum = to;
4172 return true;
4174 if (dump_file && (dump_flags & TDF_DETAILS))
4175 fprintf (dump_file, "\n");
4176 return false;
4179 /* Set all definitions in STMT to value number to themselves.
4180 Return true if a value number changed. */
4182 static bool
4183 defs_to_varying (gimple *stmt)
4185 bool changed = false;
4186 ssa_op_iter iter;
4187 def_operand_p defp;
4189 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4191 tree def = DEF_FROM_PTR (defp);
4192 changed |= set_ssa_val_to (def, def);
4194 return changed;
4197 /* Visit a copy between LHS and RHS, return true if the value number
4198 changed. */
4200 static bool
4201 visit_copy (tree lhs, tree rhs)
4203 /* Valueize. */
4204 rhs = SSA_VAL (rhs);
4206 return set_ssa_val_to (lhs, rhs);
4209 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4210 is the same. */
4212 static tree
4213 valueized_wider_op (tree wide_type, tree op)
4215 if (TREE_CODE (op) == SSA_NAME)
4216 op = vn_valueize (op);
4218 /* Either we have the op widened available. */
4219 tree ops[3] = {};
4220 ops[0] = op;
4221 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4222 wide_type, ops, NULL);
4223 if (tem)
4224 return tem;
4226 /* Or the op is truncated from some existing value. */
4227 if (TREE_CODE (op) == SSA_NAME)
4229 gimple *def = SSA_NAME_DEF_STMT (op);
4230 if (is_gimple_assign (def)
4231 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4233 tem = gimple_assign_rhs1 (def);
4234 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4236 if (TREE_CODE (tem) == SSA_NAME)
4237 tem = vn_valueize (tem);
4238 return tem;
4243 /* For constants simply extend it. */
4244 if (TREE_CODE (op) == INTEGER_CST)
4245 return wide_int_to_tree (wide_type, wi::to_wide (op));
4247 return NULL_TREE;
4250 /* Visit a nary operator RHS, value number it, and return true if the
4251 value number of LHS has changed as a result. */
4253 static bool
4254 visit_nary_op (tree lhs, gassign *stmt)
4256 vn_nary_op_t vnresult;
4257 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4258 if (! result && vnresult)
4259 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4260 if (result)
4261 return set_ssa_val_to (lhs, result);
4263 /* Do some special pattern matching for redundancies of operations
4264 in different types. */
4265 enum tree_code code = gimple_assign_rhs_code (stmt);
4266 tree type = TREE_TYPE (lhs);
4267 tree rhs1 = gimple_assign_rhs1 (stmt);
4268 switch (code)
4270 CASE_CONVERT:
4271 /* Match arithmetic done in a different type where we can easily
4272 substitute the result from some earlier sign-changed or widened
4273 operation. */
4274 if (INTEGRAL_TYPE_P (type)
4275 && TREE_CODE (rhs1) == SSA_NAME
4276 /* We only handle sign-changes, zero-extension -> & mask or
4277 sign-extension if we know the inner operation doesn't
4278 overflow. */
4279 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4280 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4281 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4282 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4283 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4285 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4286 if (def
4287 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4288 || gimple_assign_rhs_code (def) == MINUS_EXPR
4289 || gimple_assign_rhs_code (def) == MULT_EXPR))
4291 tree ops[3] = {};
4292 /* Either we have the op widened available. */
4293 ops[0] = valueized_wider_op (type,
4294 gimple_assign_rhs1 (def));
4295 if (ops[0])
4296 ops[1] = valueized_wider_op (type,
4297 gimple_assign_rhs2 (def));
4298 if (ops[0] && ops[1])
4300 ops[0] = vn_nary_op_lookup_pieces
4301 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4302 /* We have wider operation available. */
4303 if (ops[0]
4304 /* If the leader is a wrapping operation we can
4305 insert it for code hoisting w/o introducing
4306 undefined overflow. If it is not it has to
4307 be available. See PR86554. */
4308 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4309 || (rpo_avail && vn_context_bb
4310 && rpo_avail->eliminate_avail (vn_context_bb,
4311 ops[0]))))
4313 unsigned lhs_prec = TYPE_PRECISION (type);
4314 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4315 if (lhs_prec == rhs_prec
4316 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4317 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4319 gimple_match_op match_op (gimple_match_cond::UNCOND,
4320 NOP_EXPR, type, ops[0]);
4321 result = vn_nary_build_or_lookup (&match_op);
4322 if (result)
4324 bool changed = set_ssa_val_to (lhs, result);
4325 vn_nary_op_insert_stmt (stmt, result);
4326 return changed;
4329 else
4331 tree mask = wide_int_to_tree
4332 (type, wi::mask (rhs_prec, false, lhs_prec));
4333 gimple_match_op match_op (gimple_match_cond::UNCOND,
4334 BIT_AND_EXPR,
4335 TREE_TYPE (lhs),
4336 ops[0], mask);
4337 result = vn_nary_build_or_lookup (&match_op);
4338 if (result)
4340 bool changed = set_ssa_val_to (lhs, result);
4341 vn_nary_op_insert_stmt (stmt, result);
4342 return changed;
4349 default:;
4352 bool changed = set_ssa_val_to (lhs, lhs);
4353 vn_nary_op_insert_stmt (stmt, lhs);
4354 return changed;
4357 /* Visit a call STMT storing into LHS. Return true if the value number
4358 of the LHS has changed as a result. */
4360 static bool
4361 visit_reference_op_call (tree lhs, gcall *stmt)
4363 bool changed = false;
4364 struct vn_reference_s vr1;
4365 vn_reference_t vnresult = NULL;
4366 tree vdef = gimple_vdef (stmt);
4368 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4369 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4370 lhs = NULL_TREE;
4372 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4373 if (vnresult)
4375 if (vnresult->result_vdef && vdef)
4376 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4377 else if (vdef)
4378 /* If the call was discovered to be pure or const reflect
4379 that as far as possible. */
4380 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4382 if (!vnresult->result && lhs)
4383 vnresult->result = lhs;
4385 if (vnresult->result && lhs)
4386 changed |= set_ssa_val_to (lhs, vnresult->result);
4388 else
4390 vn_reference_t vr2;
4391 vn_reference_s **slot;
4392 tree vdef_val = vdef;
4393 if (vdef)
4395 /* If we value numbered an indirect functions function to
4396 one not clobbering memory value number its VDEF to its
4397 VUSE. */
4398 tree fn = gimple_call_fn (stmt);
4399 if (fn && TREE_CODE (fn) == SSA_NAME)
4401 fn = SSA_VAL (fn);
4402 if (TREE_CODE (fn) == ADDR_EXPR
4403 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4404 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4405 & (ECF_CONST | ECF_PURE)))
4406 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4408 changed |= set_ssa_val_to (vdef, vdef_val);
4410 if (lhs)
4411 changed |= set_ssa_val_to (lhs, lhs);
4412 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4413 vr2->vuse = vr1.vuse;
4414 /* As we are not walking the virtual operand chain we know the
4415 shared_lookup_references are still original so we can re-use
4416 them here. */
4417 vr2->operands = vr1.operands.copy ();
4418 vr2->type = vr1.type;
4419 vr2->set = vr1.set;
4420 vr2->hashcode = vr1.hashcode;
4421 vr2->result = lhs;
4422 vr2->result_vdef = vdef_val;
4423 vr2->value_id = 0;
4424 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4425 INSERT);
4426 gcc_assert (!*slot);
4427 *slot = vr2;
4428 vr2->next = last_inserted_ref;
4429 last_inserted_ref = vr2;
4432 return changed;
4435 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4436 and return true if the value number of the LHS has changed as a result. */
4438 static bool
4439 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4441 bool changed = false;
4442 tree last_vuse;
4443 tree result;
4445 last_vuse = gimple_vuse (stmt);
4446 result = vn_reference_lookup (op, gimple_vuse (stmt),
4447 default_vn_walk_kind, NULL, true, &last_vuse);
4449 /* We handle type-punning through unions by value-numbering based
4450 on offset and size of the access. Be prepared to handle a
4451 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4452 if (result
4453 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4455 /* We will be setting the value number of lhs to the value number
4456 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4457 So first simplify and lookup this expression to see if it
4458 is already available. */
4459 gimple_match_op res_op (gimple_match_cond::UNCOND,
4460 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4461 result = vn_nary_build_or_lookup (&res_op);
4462 /* When building the conversion fails avoid inserting the reference
4463 again. */
4464 if (!result)
4465 return set_ssa_val_to (lhs, lhs);
4468 if (result)
4469 changed = set_ssa_val_to (lhs, result);
4470 else
4472 changed = set_ssa_val_to (lhs, lhs);
4473 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4476 return changed;
4480 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4481 and return true if the value number of the LHS has changed as a result. */
4483 static bool
4484 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4486 bool changed = false;
4487 vn_reference_t vnresult = NULL;
4488 tree assign;
4489 bool resultsame = false;
4490 tree vuse = gimple_vuse (stmt);
4491 tree vdef = gimple_vdef (stmt);
4493 if (TREE_CODE (op) == SSA_NAME)
4494 op = SSA_VAL (op);
4496 /* First we want to lookup using the *vuses* from the store and see
4497 if there the last store to this location with the same address
4498 had the same value.
4500 The vuses represent the memory state before the store. If the
4501 memory state, address, and value of the store is the same as the
4502 last store to this location, then this store will produce the
4503 same memory state as that store.
4505 In this case the vdef versions for this store are value numbered to those
4506 vuse versions, since they represent the same memory state after
4507 this store.
4509 Otherwise, the vdefs for the store are used when inserting into
4510 the table, since the store generates a new memory state. */
4512 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4513 if (vnresult
4514 && vnresult->result)
4516 tree result = vnresult->result;
4517 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4518 || result == SSA_VAL (result));
4519 resultsame = expressions_equal_p (result, op);
4520 if (resultsame)
4522 /* If the TBAA state isn't compatible for downstream reads
4523 we cannot value-number the VDEFs the same. */
4524 alias_set_type set = get_alias_set (lhs);
4525 if (vnresult->set != set
4526 && ! alias_set_subset_of (set, vnresult->set))
4527 resultsame = false;
4531 if (!resultsame)
4533 /* Only perform the following when being called from PRE
4534 which embeds tail merging. */
4535 if (default_vn_walk_kind == VN_WALK)
4537 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4538 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4539 if (vnresult)
4541 VN_INFO (vdef)->visited = true;
4542 return set_ssa_val_to (vdef, vnresult->result_vdef);
4546 if (dump_file && (dump_flags & TDF_DETAILS))
4548 fprintf (dump_file, "No store match\n");
4549 fprintf (dump_file, "Value numbering store ");
4550 print_generic_expr (dump_file, lhs);
4551 fprintf (dump_file, " to ");
4552 print_generic_expr (dump_file, op);
4553 fprintf (dump_file, "\n");
4555 /* Have to set value numbers before insert, since insert is
4556 going to valueize the references in-place. */
4557 if (vdef)
4558 changed |= set_ssa_val_to (vdef, vdef);
4560 /* Do not insert structure copies into the tables. */
4561 if (is_gimple_min_invariant (op)
4562 || is_gimple_reg (op))
4563 vn_reference_insert (lhs, op, vdef, NULL);
4565 /* Only perform the following when being called from PRE
4566 which embeds tail merging. */
4567 if (default_vn_walk_kind == VN_WALK)
4569 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4570 vn_reference_insert (assign, lhs, vuse, vdef);
4573 else
4575 /* We had a match, so value number the vdef to have the value
4576 number of the vuse it came from. */
4578 if (dump_file && (dump_flags & TDF_DETAILS))
4579 fprintf (dump_file, "Store matched earlier value, "
4580 "value numbering store vdefs to matching vuses.\n");
4582 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4585 return changed;
4588 /* Visit and value number PHI, return true if the value number
4589 changed. When BACKEDGES_VARYING_P is true then assume all
4590 backedge values are varying. When INSERTED is not NULL then
4591 this is just a ahead query for a possible iteration, set INSERTED
4592 to true if we'd insert into the hashtable. */
4594 static bool
4595 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4597 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4598 tree backedge_val = NULL_TREE;
4599 bool seen_non_backedge = false;
4600 tree sameval_base = NULL_TREE;
4601 poly_int64 soff, doff;
4602 unsigned n_executable = 0;
4603 edge_iterator ei;
4604 edge e;
4606 /* TODO: We could check for this in initialization, and replace this
4607 with a gcc_assert. */
4608 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4609 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4611 /* We track whether a PHI was CSEd to to avoid excessive iterations
4612 that would be necessary only because the PHI changed arguments
4613 but not value. */
4614 if (!inserted)
4615 gimple_set_plf (phi, GF_PLF_1, false);
4617 /* See if all non-TOP arguments have the same value. TOP is
4618 equivalent to everything, so we can ignore it. */
4619 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4620 if (e->flags & EDGE_EXECUTABLE)
4622 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4624 ++n_executable;
4625 if (TREE_CODE (def) == SSA_NAME)
4627 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4628 def = SSA_VAL (def);
4629 if (e->flags & EDGE_DFS_BACK)
4630 backedge_val = def;
4632 if (!(e->flags & EDGE_DFS_BACK))
4633 seen_non_backedge = true;
4634 if (def == VN_TOP)
4636 /* Ignore undefined defs for sameval but record one. */
4637 else if (TREE_CODE (def) == SSA_NAME
4638 && ! virtual_operand_p (def)
4639 && ssa_undefined_value_p (def, false))
4640 seen_undef = def;
4641 else if (sameval == VN_TOP)
4642 sameval = def;
4643 else if (!expressions_equal_p (def, sameval))
4645 /* We know we're arriving only with invariant addresses here,
4646 try harder comparing them. We can do some caching here
4647 which we cannot do in expressions_equal_p. */
4648 if (TREE_CODE (def) == ADDR_EXPR
4649 && TREE_CODE (sameval) == ADDR_EXPR
4650 && sameval_base != (void *)-1)
4652 if (!sameval_base)
4653 sameval_base = get_addr_base_and_unit_offset
4654 (TREE_OPERAND (sameval, 0), &soff);
4655 if (!sameval_base)
4656 sameval_base = (tree)(void *)-1;
4657 else if ((get_addr_base_and_unit_offset
4658 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4659 && known_eq (soff, doff))
4660 continue;
4662 sameval = NULL_TREE;
4663 break;
4667 /* If the value we want to use is flowing over the backedge and we
4668 should take it as VARYING but it has a non-VARYING value drop to
4669 VARYING.
4670 If we value-number a virtual operand never value-number to the
4671 value from the backedge as that confuses the alias-walking code.
4672 See gcc.dg/torture/pr87176.c. If the value is the same on a
4673 non-backedge everything is OK though. */
4674 bool visited_p;
4675 if ((backedge_val
4676 && !seen_non_backedge
4677 && TREE_CODE (backedge_val) == SSA_NAME
4678 && sameval == backedge_val
4679 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4680 || SSA_VAL (backedge_val) != backedge_val))
4681 /* Do not value-number a virtual operand to sth not visited though
4682 given that allows us to escape a region in alias walking. */
4683 || (sameval
4684 && TREE_CODE (sameval) == SSA_NAME
4685 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4686 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4687 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4688 /* Note this just drops to VARYING without inserting the PHI into
4689 the hashes. */
4690 result = PHI_RESULT (phi);
4691 /* If none of the edges was executable keep the value-number at VN_TOP,
4692 if only a single edge is exectuable use its value. */
4693 else if (n_executable <= 1)
4694 result = seen_undef ? seen_undef : sameval;
4695 /* If we saw only undefined values and VN_TOP use one of the
4696 undefined values. */
4697 else if (sameval == VN_TOP)
4698 result = seen_undef ? seen_undef : sameval;
4699 /* First see if it is equivalent to a phi node in this block. We prefer
4700 this as it allows IV elimination - see PRs 66502 and 67167. */
4701 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4703 if (!inserted
4704 && TREE_CODE (result) == SSA_NAME
4705 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4707 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4708 if (dump_file && (dump_flags & TDF_DETAILS))
4710 fprintf (dump_file, "Marking CSEd to PHI node ");
4711 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4712 0, TDF_SLIM);
4713 fprintf (dump_file, "\n");
4717 /* If all values are the same use that, unless we've seen undefined
4718 values as well and the value isn't constant.
4719 CCP/copyprop have the same restriction to not remove uninit warnings. */
4720 else if (sameval
4721 && (! seen_undef || is_gimple_min_invariant (sameval)))
4722 result = sameval;
4723 else
4725 result = PHI_RESULT (phi);
4726 /* Only insert PHIs that are varying, for constant value numbers
4727 we mess up equivalences otherwise as we are only comparing
4728 the immediate controlling predicates. */
4729 vn_phi_insert (phi, result, backedges_varying_p);
4730 if (inserted)
4731 *inserted = true;
4734 return set_ssa_val_to (PHI_RESULT (phi), result);
4737 /* Try to simplify RHS using equivalences and constant folding. */
4739 static tree
4740 try_to_simplify (gassign *stmt)
4742 enum tree_code code = gimple_assign_rhs_code (stmt);
4743 tree tem;
4745 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4746 in this case, there is no point in doing extra work. */
4747 if (code == SSA_NAME)
4748 return NULL_TREE;
4750 /* First try constant folding based on our current lattice. */
4751 mprts_hook = vn_lookup_simplify_result;
4752 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4753 mprts_hook = NULL;
4754 if (tem
4755 && (TREE_CODE (tem) == SSA_NAME
4756 || is_gimple_min_invariant (tem)))
4757 return tem;
4759 return NULL_TREE;
4762 /* Visit and value number STMT, return true if the value number
4763 changed. */
4765 static bool
4766 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4768 bool changed = false;
4770 if (dump_file && (dump_flags & TDF_DETAILS))
4772 fprintf (dump_file, "Value numbering stmt = ");
4773 print_gimple_stmt (dump_file, stmt, 0);
4776 if (gimple_code (stmt) == GIMPLE_PHI)
4777 changed = visit_phi (stmt, NULL, backedges_varying_p);
4778 else if (gimple_has_volatile_ops (stmt))
4779 changed = defs_to_varying (stmt);
4780 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4782 enum tree_code code = gimple_assign_rhs_code (ass);
4783 tree lhs = gimple_assign_lhs (ass);
4784 tree rhs1 = gimple_assign_rhs1 (ass);
4785 tree simplified;
4787 /* Shortcut for copies. Simplifying copies is pointless,
4788 since we copy the expression and value they represent. */
4789 if (code == SSA_NAME
4790 && TREE_CODE (lhs) == SSA_NAME)
4792 changed = visit_copy (lhs, rhs1);
4793 goto done;
4795 simplified = try_to_simplify (ass);
4796 if (simplified)
4798 if (dump_file && (dump_flags & TDF_DETAILS))
4800 fprintf (dump_file, "RHS ");
4801 print_gimple_expr (dump_file, ass, 0);
4802 fprintf (dump_file, " simplified to ");
4803 print_generic_expr (dump_file, simplified);
4804 fprintf (dump_file, "\n");
4807 /* Setting value numbers to constants will occasionally
4808 screw up phi congruence because constants are not
4809 uniquely associated with a single ssa name that can be
4810 looked up. */
4811 if (simplified
4812 && is_gimple_min_invariant (simplified)
4813 && TREE_CODE (lhs) == SSA_NAME)
4815 changed = set_ssa_val_to (lhs, simplified);
4816 goto done;
4818 else if (simplified
4819 && TREE_CODE (simplified) == SSA_NAME
4820 && TREE_CODE (lhs) == SSA_NAME)
4822 changed = visit_copy (lhs, simplified);
4823 goto done;
4826 if ((TREE_CODE (lhs) == SSA_NAME
4827 /* We can substitute SSA_NAMEs that are live over
4828 abnormal edges with their constant value. */
4829 && !(gimple_assign_copy_p (ass)
4830 && is_gimple_min_invariant (rhs1))
4831 && !(simplified
4832 && is_gimple_min_invariant (simplified))
4833 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4834 /* Stores or copies from SSA_NAMEs that are live over
4835 abnormal edges are a problem. */
4836 || (code == SSA_NAME
4837 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4838 changed = defs_to_varying (ass);
4839 else if (REFERENCE_CLASS_P (lhs)
4840 || DECL_P (lhs))
4841 changed = visit_reference_op_store (lhs, rhs1, ass);
4842 else if (TREE_CODE (lhs) == SSA_NAME)
4844 if ((gimple_assign_copy_p (ass)
4845 && is_gimple_min_invariant (rhs1))
4846 || (simplified
4847 && is_gimple_min_invariant (simplified)))
4849 if (simplified)
4850 changed = set_ssa_val_to (lhs, simplified);
4851 else
4852 changed = set_ssa_val_to (lhs, rhs1);
4854 else
4856 /* Visit the original statement. */
4857 switch (vn_get_stmt_kind (ass))
4859 case VN_NARY:
4860 changed = visit_nary_op (lhs, ass);
4861 break;
4862 case VN_REFERENCE:
4863 changed = visit_reference_op_load (lhs, rhs1, ass);
4864 break;
4865 default:
4866 changed = defs_to_varying (ass);
4867 break;
4871 else
4872 changed = defs_to_varying (ass);
4874 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4876 tree lhs = gimple_call_lhs (call_stmt);
4877 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4879 /* Try constant folding based on our current lattice. */
4880 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4881 vn_valueize);
4882 if (simplified)
4884 if (dump_file && (dump_flags & TDF_DETAILS))
4886 fprintf (dump_file, "call ");
4887 print_gimple_expr (dump_file, call_stmt, 0);
4888 fprintf (dump_file, " simplified to ");
4889 print_generic_expr (dump_file, simplified);
4890 fprintf (dump_file, "\n");
4893 /* Setting value numbers to constants will occasionally
4894 screw up phi congruence because constants are not
4895 uniquely associated with a single ssa name that can be
4896 looked up. */
4897 if (simplified
4898 && is_gimple_min_invariant (simplified))
4900 changed = set_ssa_val_to (lhs, simplified);
4901 if (gimple_vdef (call_stmt))
4902 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4903 SSA_VAL (gimple_vuse (call_stmt)));
4904 goto done;
4906 else if (simplified
4907 && TREE_CODE (simplified) == SSA_NAME)
4909 changed = visit_copy (lhs, simplified);
4910 if (gimple_vdef (call_stmt))
4911 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4912 SSA_VAL (gimple_vuse (call_stmt)));
4913 goto done;
4915 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4917 changed = defs_to_varying (call_stmt);
4918 goto done;
4922 /* Pick up flags from a devirtualization target. */
4923 tree fn = gimple_call_fn (stmt);
4924 int extra_fnflags = 0;
4925 if (fn && TREE_CODE (fn) == SSA_NAME)
4927 fn = SSA_VAL (fn);
4928 if (TREE_CODE (fn) == ADDR_EXPR
4929 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4930 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4932 if (!gimple_call_internal_p (call_stmt)
4933 && (/* Calls to the same function with the same vuse
4934 and the same operands do not necessarily return the same
4935 value, unless they're pure or const. */
4936 ((gimple_call_flags (call_stmt) | extra_fnflags)
4937 & (ECF_PURE | ECF_CONST))
4938 /* If calls have a vdef, subsequent calls won't have
4939 the same incoming vuse. So, if 2 calls with vdef have the
4940 same vuse, we know they're not subsequent.
4941 We can value number 2 calls to the same function with the
4942 same vuse and the same operands which are not subsequent
4943 the same, because there is no code in the program that can
4944 compare the 2 values... */
4945 || (gimple_vdef (call_stmt)
4946 /* ... unless the call returns a pointer which does
4947 not alias with anything else. In which case the
4948 information that the values are distinct are encoded
4949 in the IL. */
4950 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4951 /* Only perform the following when being called from PRE
4952 which embeds tail merging. */
4953 && default_vn_walk_kind == VN_WALK)))
4954 changed = visit_reference_op_call (lhs, call_stmt);
4955 else
4956 changed = defs_to_varying (call_stmt);
4958 else
4959 changed = defs_to_varying (stmt);
4960 done:
4961 return changed;
4965 /* Allocate a value number table. */
4967 static void
4968 allocate_vn_table (vn_tables_t table, unsigned size)
4970 table->phis = new vn_phi_table_type (size);
4971 table->nary = new vn_nary_op_table_type (size);
4972 table->references = new vn_reference_table_type (size);
4975 /* Free a value number table. */
4977 static void
4978 free_vn_table (vn_tables_t table)
4980 /* Walk over elements and release vectors. */
4981 vn_reference_iterator_type hir;
4982 vn_reference_t vr;
4983 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4984 vr->operands.release ();
4985 delete table->phis;
4986 table->phis = NULL;
4987 delete table->nary;
4988 table->nary = NULL;
4989 delete table->references;
4990 table->references = NULL;
4993 /* Set *ID according to RESULT. */
4995 static void
4996 set_value_id_for_result (tree result, unsigned int *id)
4998 if (result && TREE_CODE (result) == SSA_NAME)
4999 *id = VN_INFO (result)->value_id;
5000 else if (result && is_gimple_min_invariant (result))
5001 *id = get_or_alloc_constant_value_id (result);
5002 else
5003 *id = get_next_value_id ();
5006 /* Set the value ids in the valid hash tables. */
5008 static void
5009 set_hashtable_value_ids (void)
5011 vn_nary_op_iterator_type hin;
5012 vn_phi_iterator_type hip;
5013 vn_reference_iterator_type hir;
5014 vn_nary_op_t vno;
5015 vn_reference_t vr;
5016 vn_phi_t vp;
5018 /* Now set the value ids of the things we had put in the hash
5019 table. */
5021 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5022 if (! vno->predicated_values)
5023 set_value_id_for_result (vno->u.result, &vno->value_id);
5025 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5026 set_value_id_for_result (vp->result, &vp->value_id);
5028 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5029 hir)
5030 set_value_id_for_result (vr->result, &vr->value_id);
5033 /* Return the maximum value id we have ever seen. */
5035 unsigned int
5036 get_max_value_id (void)
5038 return next_value_id;
5041 /* Return the next unique value id. */
5043 unsigned int
5044 get_next_value_id (void)
5046 return next_value_id++;
5050 /* Compare two expressions E1 and E2 and return true if they are equal. */
5052 bool
5053 expressions_equal_p (tree e1, tree e2)
5055 /* The obvious case. */
5056 if (e1 == e2)
5057 return true;
5059 /* If either one is VN_TOP consider them equal. */
5060 if (e1 == VN_TOP || e2 == VN_TOP)
5061 return true;
5063 /* If only one of them is null, they cannot be equal. */
5064 if (!e1 || !e2)
5065 return false;
5067 /* Now perform the actual comparison. */
5068 if (TREE_CODE (e1) == TREE_CODE (e2)
5069 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5070 return true;
5072 return false;
5076 /* Return true if the nary operation NARY may trap. This is a copy
5077 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5079 bool
5080 vn_nary_may_trap (vn_nary_op_t nary)
5082 tree type;
5083 tree rhs2 = NULL_TREE;
5084 bool honor_nans = false;
5085 bool honor_snans = false;
5086 bool fp_operation = false;
5087 bool honor_trapv = false;
5088 bool handled, ret;
5089 unsigned i;
5091 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5092 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5093 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5095 type = nary->type;
5096 fp_operation = FLOAT_TYPE_P (type);
5097 if (fp_operation)
5099 honor_nans = flag_trapping_math && !flag_finite_math_only;
5100 honor_snans = flag_signaling_nans != 0;
5102 else if (INTEGRAL_TYPE_P (type)
5103 && TYPE_OVERFLOW_TRAPS (type))
5104 honor_trapv = true;
5106 if (nary->length >= 2)
5107 rhs2 = nary->op[1];
5108 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5109 honor_trapv,
5110 honor_nans, honor_snans, rhs2,
5111 &handled);
5112 if (handled
5113 && ret)
5114 return true;
5116 for (i = 0; i < nary->length; ++i)
5117 if (tree_could_trap_p (nary->op[i]))
5118 return true;
5120 return false;
5123 /* Return true if the reference operation REF may trap. */
5125 bool
5126 vn_reference_may_trap (vn_reference_t ref)
5128 switch (ref->operands[0].opcode)
5130 case MODIFY_EXPR:
5131 case CALL_EXPR:
5132 /* We do not handle calls. */
5133 case ADDR_EXPR:
5134 /* And toplevel address computations never trap. */
5135 return false;
5136 default:;
5139 vn_reference_op_t op;
5140 unsigned i;
5141 FOR_EACH_VEC_ELT (ref->operands, i, op)
5143 switch (op->opcode)
5145 case WITH_SIZE_EXPR:
5146 case TARGET_MEM_REF:
5147 /* Always variable. */
5148 return true;
5149 case COMPONENT_REF:
5150 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5151 return true;
5152 break;
5153 case ARRAY_RANGE_REF:
5154 case ARRAY_REF:
5155 if (TREE_CODE (op->op0) == SSA_NAME)
5156 return true;
5157 break;
5158 case MEM_REF:
5159 /* Nothing interesting in itself, the base is separate. */
5160 break;
5161 /* The following are the address bases. */
5162 case SSA_NAME:
5163 return true;
5164 case ADDR_EXPR:
5165 if (op->op0)
5166 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5167 return false;
5168 default:;
5171 return false;
5174 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5175 bitmap inserted_exprs_)
5176 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5177 el_todo (0), eliminations (0), insertions (0),
5178 inserted_exprs (inserted_exprs_)
5180 need_eh_cleanup = BITMAP_ALLOC (NULL);
5181 need_ab_cleanup = BITMAP_ALLOC (NULL);
5184 eliminate_dom_walker::~eliminate_dom_walker ()
5186 BITMAP_FREE (need_eh_cleanup);
5187 BITMAP_FREE (need_ab_cleanup);
5190 /* Return a leader for OP that is available at the current point of the
5191 eliminate domwalk. */
5193 tree
5194 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5196 tree valnum = VN_INFO (op)->valnum;
5197 if (TREE_CODE (valnum) == SSA_NAME)
5199 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5200 return valnum;
5201 if (avail.length () > SSA_NAME_VERSION (valnum))
5202 return avail[SSA_NAME_VERSION (valnum)];
5204 else if (is_gimple_min_invariant (valnum))
5205 return valnum;
5206 return NULL_TREE;
5209 /* At the current point of the eliminate domwalk make OP available. */
5211 void
5212 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5214 tree valnum = VN_INFO (op)->valnum;
5215 if (TREE_CODE (valnum) == SSA_NAME)
5217 if (avail.length () <= SSA_NAME_VERSION (valnum))
5218 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5219 tree pushop = op;
5220 if (avail[SSA_NAME_VERSION (valnum)])
5221 pushop = avail[SSA_NAME_VERSION (valnum)];
5222 avail_stack.safe_push (pushop);
5223 avail[SSA_NAME_VERSION (valnum)] = op;
5227 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5228 the leader for the expression if insertion was successful. */
5230 tree
5231 eliminate_dom_walker::eliminate_insert (basic_block bb,
5232 gimple_stmt_iterator *gsi, tree val)
5234 /* We can insert a sequence with a single assignment only. */
5235 gimple_seq stmts = VN_INFO (val)->expr;
5236 if (!gimple_seq_singleton_p (stmts))
5237 return NULL_TREE;
5238 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5239 if (!stmt
5240 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5241 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5242 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5243 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5244 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5245 return NULL_TREE;
5247 tree op = gimple_assign_rhs1 (stmt);
5248 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5249 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5250 op = TREE_OPERAND (op, 0);
5251 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5252 if (!leader)
5253 return NULL_TREE;
5255 tree res;
5256 stmts = NULL;
5257 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5258 res = gimple_build (&stmts, BIT_FIELD_REF,
5259 TREE_TYPE (val), leader,
5260 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5261 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5262 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5263 res = gimple_build (&stmts, BIT_AND_EXPR,
5264 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5265 else
5266 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5267 TREE_TYPE (val), leader);
5268 if (TREE_CODE (res) != SSA_NAME
5269 || SSA_NAME_IS_DEFAULT_DEF (res)
5270 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5272 gimple_seq_discard (stmts);
5274 /* During propagation we have to treat SSA info conservatively
5275 and thus we can end up simplifying the inserted expression
5276 at elimination time to sth not defined in stmts. */
5277 /* But then this is a redundancy we failed to detect. Which means
5278 res now has two values. That doesn't play well with how
5279 we track availability here, so give up. */
5280 if (dump_file && (dump_flags & TDF_DETAILS))
5282 if (TREE_CODE (res) == SSA_NAME)
5283 res = eliminate_avail (bb, res);
5284 if (res)
5286 fprintf (dump_file, "Failed to insert expression for value ");
5287 print_generic_expr (dump_file, val);
5288 fprintf (dump_file, " which is really fully redundant to ");
5289 print_generic_expr (dump_file, res);
5290 fprintf (dump_file, "\n");
5294 return NULL_TREE;
5296 else
5298 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5299 VN_INFO (res)->valnum = val;
5300 VN_INFO (res)->visited = true;
5303 insertions++;
5304 if (dump_file && (dump_flags & TDF_DETAILS))
5306 fprintf (dump_file, "Inserted ");
5307 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5310 return res;
5313 void
5314 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5316 tree sprime = NULL_TREE;
5317 gimple *stmt = gsi_stmt (*gsi);
5318 tree lhs = gimple_get_lhs (stmt);
5319 if (lhs && TREE_CODE (lhs) == SSA_NAME
5320 && !gimple_has_volatile_ops (stmt)
5321 /* See PR43491. Do not replace a global register variable when
5322 it is a the RHS of an assignment. Do replace local register
5323 variables since gcc does not guarantee a local variable will
5324 be allocated in register.
5325 ??? The fix isn't effective here. This should instead
5326 be ensured by not value-numbering them the same but treating
5327 them like volatiles? */
5328 && !(gimple_assign_single_p (stmt)
5329 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5330 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5331 && is_global_var (gimple_assign_rhs1 (stmt)))))
5333 sprime = eliminate_avail (b, lhs);
5334 if (!sprime)
5336 /* If there is no existing usable leader but SCCVN thinks
5337 it has an expression it wants to use as replacement,
5338 insert that. */
5339 tree val = VN_INFO (lhs)->valnum;
5340 if (val != VN_TOP
5341 && TREE_CODE (val) == SSA_NAME
5342 && VN_INFO (val)->needs_insertion
5343 && VN_INFO (val)->expr != NULL
5344 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5345 eliminate_push_avail (b, sprime);
5348 /* If this now constitutes a copy duplicate points-to
5349 and range info appropriately. This is especially
5350 important for inserted code. See tree-ssa-copy.c
5351 for similar code. */
5352 if (sprime
5353 && TREE_CODE (sprime) == SSA_NAME)
5355 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5356 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5357 && SSA_NAME_PTR_INFO (lhs)
5358 && ! SSA_NAME_PTR_INFO (sprime))
5360 duplicate_ssa_name_ptr_info (sprime,
5361 SSA_NAME_PTR_INFO (lhs));
5362 if (b != sprime_b)
5363 mark_ptr_info_alignment_unknown
5364 (SSA_NAME_PTR_INFO (sprime));
5366 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5367 && SSA_NAME_RANGE_INFO (lhs)
5368 && ! SSA_NAME_RANGE_INFO (sprime)
5369 && b == sprime_b)
5370 duplicate_ssa_name_range_info (sprime,
5371 SSA_NAME_RANGE_TYPE (lhs),
5372 SSA_NAME_RANGE_INFO (lhs));
5375 /* Inhibit the use of an inserted PHI on a loop header when
5376 the address of the memory reference is a simple induction
5377 variable. In other cases the vectorizer won't do anything
5378 anyway (either it's loop invariant or a complicated
5379 expression). */
5380 if (sprime
5381 && TREE_CODE (sprime) == SSA_NAME
5382 && do_pre
5383 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5384 && loop_outer (b->loop_father)
5385 && has_zero_uses (sprime)
5386 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5387 && gimple_assign_load_p (stmt))
5389 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5390 basic_block def_bb = gimple_bb (def_stmt);
5391 if (gimple_code (def_stmt) == GIMPLE_PHI
5392 && def_bb->loop_father->header == def_bb)
5394 loop_p loop = def_bb->loop_father;
5395 ssa_op_iter iter;
5396 tree op;
5397 bool found = false;
5398 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5400 affine_iv iv;
5401 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5402 if (def_bb
5403 && flow_bb_inside_loop_p (loop, def_bb)
5404 && simple_iv (loop, loop, op, &iv, true))
5406 found = true;
5407 break;
5410 if (found)
5412 if (dump_file && (dump_flags & TDF_DETAILS))
5414 fprintf (dump_file, "Not replacing ");
5415 print_gimple_expr (dump_file, stmt, 0);
5416 fprintf (dump_file, " with ");
5417 print_generic_expr (dump_file, sprime);
5418 fprintf (dump_file, " which would add a loop"
5419 " carried dependence to loop %d\n",
5420 loop->num);
5422 /* Don't keep sprime available. */
5423 sprime = NULL_TREE;
5428 if (sprime)
5430 /* If we can propagate the value computed for LHS into
5431 all uses don't bother doing anything with this stmt. */
5432 if (may_propagate_copy (lhs, sprime))
5434 /* Mark it for removal. */
5435 to_remove.safe_push (stmt);
5437 /* ??? Don't count copy/constant propagations. */
5438 if (gimple_assign_single_p (stmt)
5439 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5440 || gimple_assign_rhs1 (stmt) == sprime))
5441 return;
5443 if (dump_file && (dump_flags & TDF_DETAILS))
5445 fprintf (dump_file, "Replaced ");
5446 print_gimple_expr (dump_file, stmt, 0);
5447 fprintf (dump_file, " with ");
5448 print_generic_expr (dump_file, sprime);
5449 fprintf (dump_file, " in all uses of ");
5450 print_gimple_stmt (dump_file, stmt, 0);
5453 eliminations++;
5454 return;
5457 /* If this is an assignment from our leader (which
5458 happens in the case the value-number is a constant)
5459 then there is nothing to do. */
5460 if (gimple_assign_single_p (stmt)
5461 && sprime == gimple_assign_rhs1 (stmt))
5462 return;
5464 /* Else replace its RHS. */
5465 if (dump_file && (dump_flags & TDF_DETAILS))
5467 fprintf (dump_file, "Replaced ");
5468 print_gimple_expr (dump_file, stmt, 0);
5469 fprintf (dump_file, " with ");
5470 print_generic_expr (dump_file, sprime);
5471 fprintf (dump_file, " in ");
5472 print_gimple_stmt (dump_file, stmt, 0);
5474 eliminations++;
5476 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5477 && stmt_can_make_abnormal_goto (stmt));
5478 gimple *orig_stmt = stmt;
5479 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5480 TREE_TYPE (sprime)))
5482 /* We preserve conversions to but not from function or method
5483 types. This asymmetry makes it necessary to re-instantiate
5484 conversions here. */
5485 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5486 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5487 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5488 else
5489 gcc_unreachable ();
5491 tree vdef = gimple_vdef (stmt);
5492 tree vuse = gimple_vuse (stmt);
5493 propagate_tree_value_into_stmt (gsi, sprime);
5494 stmt = gsi_stmt (*gsi);
5495 update_stmt (stmt);
5496 /* In case the VDEF on the original stmt was released, value-number
5497 it to the VUSE. This is to make vuse_ssa_val able to skip
5498 released virtual operands. */
5499 if (vdef != gimple_vdef (stmt))
5501 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5502 VN_INFO (vdef)->valnum = vuse;
5505 /* If we removed EH side-effects from the statement, clean
5506 its EH information. */
5507 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5509 bitmap_set_bit (need_eh_cleanup,
5510 gimple_bb (stmt)->index);
5511 if (dump_file && (dump_flags & TDF_DETAILS))
5512 fprintf (dump_file, " Removed EH side-effects.\n");
5515 /* Likewise for AB side-effects. */
5516 if (can_make_abnormal_goto
5517 && !stmt_can_make_abnormal_goto (stmt))
5519 bitmap_set_bit (need_ab_cleanup,
5520 gimple_bb (stmt)->index);
5521 if (dump_file && (dump_flags & TDF_DETAILS))
5522 fprintf (dump_file, " Removed AB side-effects.\n");
5525 return;
5529 /* If the statement is a scalar store, see if the expression
5530 has the same value number as its rhs. If so, the store is
5531 dead. */
5532 if (gimple_assign_single_p (stmt)
5533 && !gimple_has_volatile_ops (stmt)
5534 && !is_gimple_reg (gimple_assign_lhs (stmt))
5535 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5536 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5538 tree val;
5539 tree rhs = gimple_assign_rhs1 (stmt);
5540 vn_reference_t vnresult;
5541 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5542 &vnresult, false);
5543 if (TREE_CODE (rhs) == SSA_NAME)
5544 rhs = VN_INFO (rhs)->valnum;
5545 if (val
5546 && operand_equal_p (val, rhs, 0))
5548 /* We can only remove the later store if the former aliases
5549 at least all accesses the later one does or if the store
5550 was to readonly memory storing the same value. */
5551 alias_set_type set = get_alias_set (lhs);
5552 if (! vnresult
5553 || vnresult->set == set
5554 || alias_set_subset_of (set, vnresult->set))
5556 if (dump_file && (dump_flags & TDF_DETAILS))
5558 fprintf (dump_file, "Deleted redundant store ");
5559 print_gimple_stmt (dump_file, stmt, 0);
5562 /* Queue stmt for removal. */
5563 to_remove.safe_push (stmt);
5564 return;
5569 /* If this is a control statement value numbering left edges
5570 unexecuted on force the condition in a way consistent with
5571 that. */
5572 if (gcond *cond = dyn_cast <gcond *> (stmt))
5574 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5575 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5577 if (dump_file && (dump_flags & TDF_DETAILS))
5579 fprintf (dump_file, "Removing unexecutable edge from ");
5580 print_gimple_stmt (dump_file, stmt, 0);
5582 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5583 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5584 gimple_cond_make_true (cond);
5585 else
5586 gimple_cond_make_false (cond);
5587 update_stmt (cond);
5588 el_todo |= TODO_cleanup_cfg;
5589 return;
5593 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5594 bool was_noreturn = (is_gimple_call (stmt)
5595 && gimple_call_noreturn_p (stmt));
5596 tree vdef = gimple_vdef (stmt);
5597 tree vuse = gimple_vuse (stmt);
5599 /* If we didn't replace the whole stmt (or propagate the result
5600 into all uses), replace all uses on this stmt with their
5601 leaders. */
5602 bool modified = false;
5603 use_operand_p use_p;
5604 ssa_op_iter iter;
5605 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5607 tree use = USE_FROM_PTR (use_p);
5608 /* ??? The call code above leaves stmt operands un-updated. */
5609 if (TREE_CODE (use) != SSA_NAME)
5610 continue;
5611 tree sprime;
5612 if (SSA_NAME_IS_DEFAULT_DEF (use))
5613 /* ??? For default defs BB shouldn't matter, but we have to
5614 solve the inconsistency between rpo eliminate and
5615 dom eliminate avail valueization first. */
5616 sprime = eliminate_avail (b, use);
5617 else
5618 /* Look for sth available at the definition block of the argument.
5619 This avoids inconsistencies between availability there which
5620 decides if the stmt can be removed and availability at the
5621 use site. The SSA property ensures that things available
5622 at the definition are also available at uses. */
5623 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5624 if (sprime && sprime != use
5625 && may_propagate_copy (use, sprime)
5626 /* We substitute into debug stmts to avoid excessive
5627 debug temporaries created by removed stmts, but we need
5628 to avoid doing so for inserted sprimes as we never want
5629 to create debug temporaries for them. */
5630 && (!inserted_exprs
5631 || TREE_CODE (sprime) != SSA_NAME
5632 || !is_gimple_debug (stmt)
5633 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5635 propagate_value (use_p, sprime);
5636 modified = true;
5640 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5641 into which is a requirement for the IPA devirt machinery. */
5642 gimple *old_stmt = stmt;
5643 if (modified)
5645 /* If a formerly non-invariant ADDR_EXPR is turned into an
5646 invariant one it was on a separate stmt. */
5647 if (gimple_assign_single_p (stmt)
5648 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5649 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5650 gimple_stmt_iterator prev = *gsi;
5651 gsi_prev (&prev);
5652 if (fold_stmt (gsi))
5654 /* fold_stmt may have created new stmts inbetween
5655 the previous stmt and the folded stmt. Mark
5656 all defs created there as varying to not confuse
5657 the SCCVN machinery as we're using that even during
5658 elimination. */
5659 if (gsi_end_p (prev))
5660 prev = gsi_start_bb (b);
5661 else
5662 gsi_next (&prev);
5663 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5666 tree def;
5667 ssa_op_iter dit;
5668 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5669 dit, SSA_OP_ALL_DEFS)
5670 /* As existing DEFs may move between stmts
5671 only process new ones. */
5672 if (! has_VN_INFO (def))
5674 VN_INFO (def)->valnum = def;
5675 VN_INFO (def)->visited = true;
5677 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5678 break;
5679 gsi_next (&prev);
5681 while (1);
5683 stmt = gsi_stmt (*gsi);
5684 /* In case we folded the stmt away schedule the NOP for removal. */
5685 if (gimple_nop_p (stmt))
5686 to_remove.safe_push (stmt);
5689 /* Visit indirect calls and turn them into direct calls if
5690 possible using the devirtualization machinery. Do this before
5691 checking for required EH/abnormal/noreturn cleanup as devird
5692 may expose more of those. */
5693 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5695 tree fn = gimple_call_fn (call_stmt);
5696 if (fn
5697 && flag_devirtualize
5698 && virtual_method_call_p (fn))
5700 tree otr_type = obj_type_ref_class (fn);
5701 unsigned HOST_WIDE_INT otr_tok
5702 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5703 tree instance;
5704 ipa_polymorphic_call_context context (current_function_decl,
5705 fn, stmt, &instance);
5706 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5707 otr_type, stmt, NULL);
5708 bool final;
5709 vec <cgraph_node *> targets
5710 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5711 otr_tok, context, &final);
5712 if (dump_file)
5713 dump_possible_polymorphic_call_targets (dump_file,
5714 obj_type_ref_class (fn),
5715 otr_tok, context);
5716 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5718 tree fn;
5719 if (targets.length () == 1)
5720 fn = targets[0]->decl;
5721 else
5722 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5723 if (dump_enabled_p ())
5725 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5726 "converting indirect call to "
5727 "function %s\n",
5728 lang_hooks.decl_printable_name (fn, 2));
5730 gimple_call_set_fndecl (call_stmt, fn);
5731 /* If changing the call to __builtin_unreachable
5732 or similar noreturn function, adjust gimple_call_fntype
5733 too. */
5734 if (gimple_call_noreturn_p (call_stmt)
5735 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5736 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5737 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5738 == void_type_node))
5739 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5740 maybe_remove_unused_call_args (cfun, call_stmt);
5741 modified = true;
5746 if (modified)
5748 /* When changing a call into a noreturn call, cfg cleanup
5749 is needed to fix up the noreturn call. */
5750 if (!was_noreturn
5751 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5752 to_fixup.safe_push (stmt);
5753 /* When changing a condition or switch into one we know what
5754 edge will be executed, schedule a cfg cleanup. */
5755 if ((gimple_code (stmt) == GIMPLE_COND
5756 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5757 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5758 || (gimple_code (stmt) == GIMPLE_SWITCH
5759 && TREE_CODE (gimple_switch_index
5760 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5761 el_todo |= TODO_cleanup_cfg;
5762 /* If we removed EH side-effects from the statement, clean
5763 its EH information. */
5764 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5766 bitmap_set_bit (need_eh_cleanup,
5767 gimple_bb (stmt)->index);
5768 if (dump_file && (dump_flags & TDF_DETAILS))
5769 fprintf (dump_file, " Removed EH side-effects.\n");
5771 /* Likewise for AB side-effects. */
5772 if (can_make_abnormal_goto
5773 && !stmt_can_make_abnormal_goto (stmt))
5775 bitmap_set_bit (need_ab_cleanup,
5776 gimple_bb (stmt)->index);
5777 if (dump_file && (dump_flags & TDF_DETAILS))
5778 fprintf (dump_file, " Removed AB side-effects.\n");
5780 update_stmt (stmt);
5781 /* In case the VDEF on the original stmt was released, value-number
5782 it to the VUSE. This is to make vuse_ssa_val able to skip
5783 released virtual operands. */
5784 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5785 VN_INFO (vdef)->valnum = vuse;
5788 /* Make new values available - for fully redundant LHS we
5789 continue with the next stmt above and skip this. */
5790 def_operand_p defp;
5791 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5792 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5795 /* Perform elimination for the basic-block B during the domwalk. */
5797 edge
5798 eliminate_dom_walker::before_dom_children (basic_block b)
5800 /* Mark new bb. */
5801 avail_stack.safe_push (NULL_TREE);
5803 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5804 if (!(b->flags & BB_EXECUTABLE))
5805 return NULL;
5807 vn_context_bb = b;
5809 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5811 gphi *phi = gsi.phi ();
5812 tree res = PHI_RESULT (phi);
5814 if (virtual_operand_p (res))
5816 gsi_next (&gsi);
5817 continue;
5820 tree sprime = eliminate_avail (b, res);
5821 if (sprime
5822 && sprime != res)
5824 if (dump_file && (dump_flags & TDF_DETAILS))
5826 fprintf (dump_file, "Replaced redundant PHI node defining ");
5827 print_generic_expr (dump_file, res);
5828 fprintf (dump_file, " with ");
5829 print_generic_expr (dump_file, sprime);
5830 fprintf (dump_file, "\n");
5833 /* If we inserted this PHI node ourself, it's not an elimination. */
5834 if (! inserted_exprs
5835 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5836 eliminations++;
5838 /* If we will propagate into all uses don't bother to do
5839 anything. */
5840 if (may_propagate_copy (res, sprime))
5842 /* Mark the PHI for removal. */
5843 to_remove.safe_push (phi);
5844 gsi_next (&gsi);
5845 continue;
5848 remove_phi_node (&gsi, false);
5850 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5851 sprime = fold_convert (TREE_TYPE (res), sprime);
5852 gimple *stmt = gimple_build_assign (res, sprime);
5853 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5854 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5855 continue;
5858 eliminate_push_avail (b, res);
5859 gsi_next (&gsi);
5862 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5863 !gsi_end_p (gsi);
5864 gsi_next (&gsi))
5865 eliminate_stmt (b, &gsi);
5867 /* Replace destination PHI arguments. */
5868 edge_iterator ei;
5869 edge e;
5870 FOR_EACH_EDGE (e, ei, b->succs)
5871 if (e->flags & EDGE_EXECUTABLE)
5872 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5873 !gsi_end_p (gsi);
5874 gsi_next (&gsi))
5876 gphi *phi = gsi.phi ();
5877 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5878 tree arg = USE_FROM_PTR (use_p);
5879 if (TREE_CODE (arg) != SSA_NAME
5880 || virtual_operand_p (arg))
5881 continue;
5882 tree sprime = eliminate_avail (b, arg);
5883 if (sprime && may_propagate_copy (arg, sprime))
5884 propagate_value (use_p, sprime);
5887 vn_context_bb = NULL;
5889 return NULL;
5892 /* Make no longer available leaders no longer available. */
5894 void
5895 eliminate_dom_walker::after_dom_children (basic_block)
5897 tree entry;
5898 while ((entry = avail_stack.pop ()) != NULL_TREE)
5900 tree valnum = VN_INFO (entry)->valnum;
5901 tree old = avail[SSA_NAME_VERSION (valnum)];
5902 if (old == entry)
5903 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5904 else
5905 avail[SSA_NAME_VERSION (valnum)] = entry;
5909 /* Remove queued stmts and perform delayed cleanups. */
5911 unsigned
5912 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5914 statistics_counter_event (cfun, "Eliminated", eliminations);
5915 statistics_counter_event (cfun, "Insertions", insertions);
5917 /* We cannot remove stmts during BB walk, especially not release SSA
5918 names there as this confuses the VN machinery. The stmts ending
5919 up in to_remove are either stores or simple copies.
5920 Remove stmts in reverse order to make debug stmt creation possible. */
5921 while (!to_remove.is_empty ())
5923 bool do_release_defs = true;
5924 gimple *stmt = to_remove.pop ();
5926 /* When we are value-numbering a region we do not require exit PHIs to
5927 be present so we have to make sure to deal with uses outside of the
5928 region of stmts that we thought are eliminated.
5929 ??? Note we may be confused by uses in dead regions we didn't run
5930 elimination on. Rather than checking individual uses we accept
5931 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5932 contains such example). */
5933 if (region_p)
5935 if (gphi *phi = dyn_cast <gphi *> (stmt))
5937 tree lhs = gimple_phi_result (phi);
5938 if (!has_zero_uses (lhs))
5940 if (dump_file && (dump_flags & TDF_DETAILS))
5941 fprintf (dump_file, "Keeping eliminated stmt live "
5942 "as copy because of out-of-region uses\n");
5943 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5944 gimple *copy = gimple_build_assign (lhs, sprime);
5945 gimple_stmt_iterator gsi
5946 = gsi_after_labels (gimple_bb (stmt));
5947 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5948 do_release_defs = false;
5951 else if (tree lhs = gimple_get_lhs (stmt))
5952 if (TREE_CODE (lhs) == SSA_NAME
5953 && !has_zero_uses (lhs))
5955 if (dump_file && (dump_flags & TDF_DETAILS))
5956 fprintf (dump_file, "Keeping eliminated stmt live "
5957 "as copy because of out-of-region uses\n");
5958 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5959 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5960 if (is_gimple_assign (stmt))
5962 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5963 stmt = gsi_stmt (gsi);
5964 update_stmt (stmt);
5965 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
5966 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
5967 continue;
5969 else
5971 gimple *copy = gimple_build_assign (lhs, sprime);
5972 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5973 do_release_defs = false;
5978 if (dump_file && (dump_flags & TDF_DETAILS))
5980 fprintf (dump_file, "Removing dead stmt ");
5981 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5984 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5985 if (gimple_code (stmt) == GIMPLE_PHI)
5986 remove_phi_node (&gsi, do_release_defs);
5987 else
5989 basic_block bb = gimple_bb (stmt);
5990 unlink_stmt_vdef (stmt);
5991 if (gsi_remove (&gsi, true))
5992 bitmap_set_bit (need_eh_cleanup, bb->index);
5993 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5994 bitmap_set_bit (need_ab_cleanup, bb->index);
5995 if (do_release_defs)
5996 release_defs (stmt);
5999 /* Removing a stmt may expose a forwarder block. */
6000 el_todo |= TODO_cleanup_cfg;
6003 /* Fixup stmts that became noreturn calls. This may require splitting
6004 blocks and thus isn't possible during the dominator walk. Do this
6005 in reverse order so we don't inadvertedly remove a stmt we want to
6006 fixup by visiting a dominating now noreturn call first. */
6007 while (!to_fixup.is_empty ())
6009 gimple *stmt = to_fixup.pop ();
6011 if (dump_file && (dump_flags & TDF_DETAILS))
6013 fprintf (dump_file, "Fixing up noreturn call ");
6014 print_gimple_stmt (dump_file, stmt, 0);
6017 if (fixup_noreturn_call (stmt))
6018 el_todo |= TODO_cleanup_cfg;
6021 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6022 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6024 if (do_eh_cleanup)
6025 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6027 if (do_ab_cleanup)
6028 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6030 if (do_eh_cleanup || do_ab_cleanup)
6031 el_todo |= TODO_cleanup_cfg;
6033 return el_todo;
6036 /* Eliminate fully redundant computations. */
6038 unsigned
6039 eliminate_with_rpo_vn (bitmap inserted_exprs)
6041 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6043 walker.walk (cfun->cfg->x_entry_block_ptr);
6044 return walker.eliminate_cleanup ();
6047 static unsigned
6048 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6049 bool iterate, bool eliminate);
6051 void
6052 run_rpo_vn (vn_lookup_kind kind)
6054 default_vn_walk_kind = kind;
6055 do_rpo_vn (cfun, NULL, NULL, true, false);
6057 /* ??? Prune requirement of these. */
6058 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6059 constant_value_ids = BITMAP_ALLOC (NULL);
6061 /* Initialize the value ids and prune out remaining VN_TOPs
6062 from dead code. */
6063 tree name;
6064 unsigned i;
6065 FOR_EACH_SSA_NAME (i, name, cfun)
6067 vn_ssa_aux_t info = VN_INFO (name);
6068 if (!info->visited
6069 || info->valnum == VN_TOP)
6070 info->valnum = name;
6071 if (info->valnum == name)
6072 info->value_id = get_next_value_id ();
6073 else if (is_gimple_min_invariant (info->valnum))
6074 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6077 /* Propagate. */
6078 FOR_EACH_SSA_NAME (i, name, cfun)
6080 vn_ssa_aux_t info = VN_INFO (name);
6081 if (TREE_CODE (info->valnum) == SSA_NAME
6082 && info->valnum != name
6083 && info->value_id != VN_INFO (info->valnum)->value_id)
6084 info->value_id = VN_INFO (info->valnum)->value_id;
6087 set_hashtable_value_ids ();
6089 if (dump_file && (dump_flags & TDF_DETAILS))
6091 fprintf (dump_file, "Value numbers:\n");
6092 FOR_EACH_SSA_NAME (i, name, cfun)
6094 if (VN_INFO (name)->visited
6095 && SSA_VAL (name) != name)
6097 print_generic_expr (dump_file, name);
6098 fprintf (dump_file, " = ");
6099 print_generic_expr (dump_file, SSA_VAL (name));
6100 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6106 /* Free VN associated data structures. */
6108 void
6109 free_rpo_vn (void)
6111 free_vn_table (valid_info);
6112 XDELETE (valid_info);
6113 obstack_free (&vn_tables_obstack, NULL);
6114 obstack_free (&vn_tables_insert_obstack, NULL);
6116 vn_ssa_aux_iterator_type it;
6117 vn_ssa_aux_t info;
6118 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6119 if (info->needs_insertion)
6120 release_ssa_name (info->name);
6121 obstack_free (&vn_ssa_aux_obstack, NULL);
6122 delete vn_ssa_aux_hash;
6124 delete constant_to_value_id;
6125 constant_to_value_id = NULL;
6126 BITMAP_FREE (constant_value_ids);
6129 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6131 static tree
6132 vn_lookup_simplify_result (gimple_match_op *res_op)
6134 if (!res_op->code.is_tree_code ())
6135 return NULL_TREE;
6136 tree *ops = res_op->ops;
6137 unsigned int length = res_op->num_ops;
6138 if (res_op->code == CONSTRUCTOR
6139 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6140 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6141 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6143 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6144 ops = XALLOCAVEC (tree, length);
6145 for (unsigned i = 0; i < length; ++i)
6146 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6148 vn_nary_op_t vnresult = NULL;
6149 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6150 res_op->type, ops, &vnresult);
6151 /* If this is used from expression simplification make sure to
6152 return an available expression. */
6153 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6154 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6155 return res;
6158 /* Return a leader for OPs value that is valid at BB. */
6160 tree
6161 rpo_elim::eliminate_avail (basic_block bb, tree op)
6163 bool visited;
6164 tree valnum = SSA_VAL (op, &visited);
6165 /* If we didn't visit OP then it must be defined outside of the
6166 region we process and also dominate it. So it is available. */
6167 if (!visited)
6168 return op;
6169 if (TREE_CODE (valnum) == SSA_NAME)
6171 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6172 return valnum;
6173 vn_avail *av = VN_INFO (valnum)->avail;
6174 if (!av)
6175 return NULL_TREE;
6176 if (av->location == bb->index)
6177 /* On tramp3d 90% of the cases are here. */
6178 return ssa_name (av->leader);
6181 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6182 /* ??? During elimination we have to use availability at the
6183 definition site of a use we try to replace. This
6184 is required to not run into inconsistencies because
6185 of dominated_by_p_w_unex behavior and removing a definition
6186 while not replacing all uses.
6187 ??? We could try to consistently walk dominators
6188 ignoring non-executable regions. The nearest common
6189 dominator of bb and abb is where we can stop walking. We
6190 may also be able to "pre-compute" (bits of) the next immediate
6191 (non-)dominator during the RPO walk when marking edges as
6192 executable. */
6193 if (dominated_by_p_w_unex (bb, abb))
6195 tree leader = ssa_name (av->leader);
6196 /* Prevent eliminations that break loop-closed SSA. */
6197 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6198 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6199 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6200 (leader))->loop_father,
6201 bb))
6202 return NULL_TREE;
6203 if (dump_file && (dump_flags & TDF_DETAILS))
6205 print_generic_expr (dump_file, leader);
6206 fprintf (dump_file, " is available for ");
6207 print_generic_expr (dump_file, valnum);
6208 fprintf (dump_file, "\n");
6210 /* On tramp3d 99% of the _remaining_ cases succeed at
6211 the first enty. */
6212 return leader;
6214 /* ??? Can we somehow skip to the immediate dominator
6215 RPO index (bb_to_rpo)? Again, maybe not worth, on
6216 tramp3d the worst number of elements in the vector is 9. */
6217 av = av->next;
6219 while (av);
6221 else if (valnum != VN_TOP)
6222 /* valnum is is_gimple_min_invariant. */
6223 return valnum;
6224 return NULL_TREE;
6227 /* Make LEADER a leader for its value at BB. */
6229 void
6230 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6232 tree valnum = VN_INFO (leader)->valnum;
6233 if (valnum == VN_TOP
6234 || is_gimple_min_invariant (valnum))
6235 return;
6236 if (dump_file && (dump_flags & TDF_DETAILS))
6238 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6239 print_generic_expr (dump_file, leader);
6240 fprintf (dump_file, " for value ");
6241 print_generic_expr (dump_file, valnum);
6242 fprintf (dump_file, "\n");
6244 vn_ssa_aux_t value = VN_INFO (valnum);
6245 vn_avail *av;
6246 if (m_avail_freelist)
6248 av = m_avail_freelist;
6249 m_avail_freelist = m_avail_freelist->next;
6251 else
6252 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
6253 av->location = bb->index;
6254 av->leader = SSA_NAME_VERSION (leader);
6255 av->next = value->avail;
6256 value->avail = av;
6259 /* Valueization hook for RPO VN plus required state. */
6261 tree
6262 rpo_vn_valueize (tree name)
6264 if (TREE_CODE (name) == SSA_NAME)
6266 vn_ssa_aux_t val = VN_INFO (name);
6267 if (val)
6269 tree tem = val->valnum;
6270 if (tem != VN_TOP && tem != name)
6272 if (TREE_CODE (tem) != SSA_NAME)
6273 return tem;
6274 /* For all values we only valueize to an available leader
6275 which means we can use SSA name info without restriction. */
6276 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6277 if (tem)
6278 return tem;
6282 return name;
6285 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6286 inverted condition. */
6288 static void
6289 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6291 switch (code)
6293 case LT_EXPR:
6294 /* a < b -> a {!,<}= b */
6295 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6296 ops, boolean_true_node, 0, pred_e);
6297 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6298 ops, boolean_true_node, 0, pred_e);
6299 /* a < b -> ! a {>,=} b */
6300 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6301 ops, boolean_false_node, 0, pred_e);
6302 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6303 ops, boolean_false_node, 0, pred_e);
6304 break;
6305 case GT_EXPR:
6306 /* a > b -> a {!,>}= b */
6307 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6308 ops, boolean_true_node, 0, pred_e);
6309 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6310 ops, boolean_true_node, 0, pred_e);
6311 /* a > b -> ! a {<,=} b */
6312 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6313 ops, boolean_false_node, 0, pred_e);
6314 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6315 ops, boolean_false_node, 0, pred_e);
6316 break;
6317 case EQ_EXPR:
6318 /* a == b -> ! a {<,>} b */
6319 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6320 ops, boolean_false_node, 0, pred_e);
6321 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6322 ops, boolean_false_node, 0, pred_e);
6323 break;
6324 case LE_EXPR:
6325 case GE_EXPR:
6326 case NE_EXPR:
6327 /* Nothing besides inverted condition. */
6328 break;
6329 default:;
6333 /* Main stmt worker for RPO VN, process BB. */
6335 static unsigned
6336 process_bb (rpo_elim &avail, basic_block bb,
6337 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6338 bool do_region, bitmap exit_bbs, bool skip_phis)
6340 unsigned todo = 0;
6341 edge_iterator ei;
6342 edge e;
6344 vn_context_bb = bb;
6346 /* If we are in loop-closed SSA preserve this state. This is
6347 relevant when called on regions from outside of FRE/PRE. */
6348 bool lc_phi_nodes = false;
6349 if (!skip_phis
6350 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6351 FOR_EACH_EDGE (e, ei, bb->preds)
6352 if (e->src->loop_father != e->dest->loop_father
6353 && flow_loop_nested_p (e->dest->loop_father,
6354 e->src->loop_father))
6356 lc_phi_nodes = true;
6357 break;
6360 /* When we visit a loop header substitute into loop info. */
6361 if (!iterate && eliminate && bb->loop_father->header == bb)
6363 /* Keep fields in sync with substitute_in_loop_info. */
6364 if (bb->loop_father->nb_iterations)
6365 bb->loop_father->nb_iterations
6366 = simplify_replace_tree (bb->loop_father->nb_iterations,
6367 NULL_TREE, NULL_TREE, vn_valueize);
6370 /* Value-number all defs in the basic-block. */
6371 if (!skip_phis)
6372 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6373 gsi_next (&gsi))
6375 gphi *phi = gsi.phi ();
6376 tree res = PHI_RESULT (phi);
6377 vn_ssa_aux_t res_info = VN_INFO (res);
6378 if (!bb_visited)
6380 gcc_assert (!res_info->visited);
6381 res_info->valnum = VN_TOP;
6382 res_info->visited = true;
6385 /* When not iterating force backedge values to varying. */
6386 visit_stmt (phi, !iterate_phis);
6387 if (virtual_operand_p (res))
6388 continue;
6390 /* Eliminate */
6391 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6392 how we handle backedges and availability.
6393 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6394 tree val = res_info->valnum;
6395 if (res != val && !iterate && eliminate)
6397 if (tree leader = avail.eliminate_avail (bb, res))
6399 if (leader != res
6400 /* Preserve loop-closed SSA form. */
6401 && (! lc_phi_nodes
6402 || is_gimple_min_invariant (leader)))
6404 if (dump_file && (dump_flags & TDF_DETAILS))
6406 fprintf (dump_file, "Replaced redundant PHI node "
6407 "defining ");
6408 print_generic_expr (dump_file, res);
6409 fprintf (dump_file, " with ");
6410 print_generic_expr (dump_file, leader);
6411 fprintf (dump_file, "\n");
6413 avail.eliminations++;
6415 if (may_propagate_copy (res, leader))
6417 /* Schedule for removal. */
6418 avail.to_remove.safe_push (phi);
6419 continue;
6421 /* ??? Else generate a copy stmt. */
6425 /* Only make defs available that not already are. But make
6426 sure loop-closed SSA PHI node defs are picked up for
6427 downstream uses. */
6428 if (lc_phi_nodes
6429 || res == val
6430 || ! avail.eliminate_avail (bb, res))
6431 avail.eliminate_push_avail (bb, res);
6434 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6435 we do this when processing the last stmt as we have to do this
6436 before elimination which otherwise forces GIMPLE_CONDs to
6437 if (1 != 0) style when seeing non-executable edges. */
6438 if (gsi_end_p (gsi_start_bb (bb)))
6440 FOR_EACH_EDGE (e, ei, bb->succs)
6442 if (!(e->flags & EDGE_EXECUTABLE))
6444 if (dump_file && (dump_flags & TDF_DETAILS))
6445 fprintf (dump_file,
6446 "marking outgoing edge %d -> %d executable\n",
6447 e->src->index, e->dest->index);
6448 e->flags |= EDGE_EXECUTABLE;
6449 e->dest->flags |= BB_EXECUTABLE;
6451 else if (!(e->dest->flags & BB_EXECUTABLE))
6453 if (dump_file && (dump_flags & TDF_DETAILS))
6454 fprintf (dump_file,
6455 "marking destination block %d reachable\n",
6456 e->dest->index);
6457 e->dest->flags |= BB_EXECUTABLE;
6461 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6462 !gsi_end_p (gsi); gsi_next (&gsi))
6464 ssa_op_iter i;
6465 tree op;
6466 if (!bb_visited)
6468 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6470 vn_ssa_aux_t op_info = VN_INFO (op);
6471 gcc_assert (!op_info->visited);
6472 op_info->valnum = VN_TOP;
6473 op_info->visited = true;
6476 /* We somehow have to deal with uses that are not defined
6477 in the processed region. Forcing unvisited uses to
6478 varying here doesn't play well with def-use following during
6479 expression simplification, so we deal with this by checking
6480 the visited flag in SSA_VAL. */
6483 visit_stmt (gsi_stmt (gsi));
6485 gimple *last = gsi_stmt (gsi);
6486 e = NULL;
6487 switch (gimple_code (last))
6489 case GIMPLE_SWITCH:
6490 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6491 (as_a <gswitch *> (last))));
6492 break;
6493 case GIMPLE_COND:
6495 tree lhs = vn_valueize (gimple_cond_lhs (last));
6496 tree rhs = vn_valueize (gimple_cond_rhs (last));
6497 tree val = gimple_simplify (gimple_cond_code (last),
6498 boolean_type_node, lhs, rhs,
6499 NULL, vn_valueize);
6500 /* If the condition didn't simplfy see if we have recorded
6501 an expression from sofar taken edges. */
6502 if (! val || TREE_CODE (val) != INTEGER_CST)
6504 vn_nary_op_t vnresult;
6505 tree ops[2];
6506 ops[0] = lhs;
6507 ops[1] = rhs;
6508 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6509 boolean_type_node, ops,
6510 &vnresult);
6511 /* Did we get a predicated value? */
6512 if (! val && vnresult && vnresult->predicated_values)
6514 val = vn_nary_op_get_predicated_value (vnresult, bb);
6515 if (val && dump_file && (dump_flags & TDF_DETAILS))
6517 fprintf (dump_file, "Got predicated value ");
6518 print_generic_expr (dump_file, val, TDF_NONE);
6519 fprintf (dump_file, " for ");
6520 print_gimple_stmt (dump_file, last, TDF_SLIM);
6524 if (val)
6525 e = find_taken_edge (bb, val);
6526 if (! e)
6528 /* If we didn't manage to compute the taken edge then
6529 push predicated expressions for the condition itself
6530 and related conditions to the hashtables. This allows
6531 simplification of redundant conditions which is
6532 important as early cleanup. */
6533 edge true_e, false_e;
6534 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6535 enum tree_code code = gimple_cond_code (last);
6536 enum tree_code icode
6537 = invert_tree_comparison (code, HONOR_NANS (lhs));
6538 tree ops[2];
6539 ops[0] = lhs;
6540 ops[1] = rhs;
6541 if (do_region
6542 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6543 true_e = NULL;
6544 if (do_region
6545 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6546 false_e = NULL;
6547 if (true_e)
6548 vn_nary_op_insert_pieces_predicated
6549 (2, code, boolean_type_node, ops,
6550 boolean_true_node, 0, true_e);
6551 if (false_e)
6552 vn_nary_op_insert_pieces_predicated
6553 (2, code, boolean_type_node, ops,
6554 boolean_false_node, 0, false_e);
6555 if (icode != ERROR_MARK)
6557 if (true_e)
6558 vn_nary_op_insert_pieces_predicated
6559 (2, icode, boolean_type_node, ops,
6560 boolean_false_node, 0, true_e);
6561 if (false_e)
6562 vn_nary_op_insert_pieces_predicated
6563 (2, icode, boolean_type_node, ops,
6564 boolean_true_node, 0, false_e);
6566 /* Relax for non-integers, inverted condition handled
6567 above. */
6568 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6570 if (true_e)
6571 insert_related_predicates_on_edge (code, ops, true_e);
6572 if (false_e)
6573 insert_related_predicates_on_edge (icode, ops, false_e);
6576 break;
6578 case GIMPLE_GOTO:
6579 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6580 break;
6581 default:
6582 e = NULL;
6584 if (e)
6586 todo = TODO_cleanup_cfg;
6587 if (!(e->flags & EDGE_EXECUTABLE))
6589 if (dump_file && (dump_flags & TDF_DETAILS))
6590 fprintf (dump_file,
6591 "marking known outgoing %sedge %d -> %d executable\n",
6592 e->flags & EDGE_DFS_BACK ? "back-" : "",
6593 e->src->index, e->dest->index);
6594 e->flags |= EDGE_EXECUTABLE;
6595 e->dest->flags |= BB_EXECUTABLE;
6597 else if (!(e->dest->flags & BB_EXECUTABLE))
6599 if (dump_file && (dump_flags & TDF_DETAILS))
6600 fprintf (dump_file,
6601 "marking destination block %d reachable\n",
6602 e->dest->index);
6603 e->dest->flags |= BB_EXECUTABLE;
6606 else if (gsi_one_before_end_p (gsi))
6608 FOR_EACH_EDGE (e, ei, bb->succs)
6610 if (!(e->flags & EDGE_EXECUTABLE))
6612 if (dump_file && (dump_flags & TDF_DETAILS))
6613 fprintf (dump_file,
6614 "marking outgoing edge %d -> %d executable\n",
6615 e->src->index, e->dest->index);
6616 e->flags |= EDGE_EXECUTABLE;
6617 e->dest->flags |= BB_EXECUTABLE;
6619 else if (!(e->dest->flags & BB_EXECUTABLE))
6621 if (dump_file && (dump_flags & TDF_DETAILS))
6622 fprintf (dump_file,
6623 "marking destination block %d reachable\n",
6624 e->dest->index);
6625 e->dest->flags |= BB_EXECUTABLE;
6630 /* Eliminate. That also pushes to avail. */
6631 if (eliminate && ! iterate)
6632 avail.eliminate_stmt (bb, &gsi);
6633 else
6634 /* If not eliminating, make all not already available defs
6635 available. */
6636 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6637 if (! avail.eliminate_avail (bb, op))
6638 avail.eliminate_push_avail (bb, op);
6641 /* Eliminate in destination PHI arguments. Always substitute in dest
6642 PHIs, even for non-executable edges. This handles region
6643 exits PHIs. */
6644 if (!iterate && eliminate)
6645 FOR_EACH_EDGE (e, ei, bb->succs)
6646 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6647 !gsi_end_p (gsi); gsi_next (&gsi))
6649 gphi *phi = gsi.phi ();
6650 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6651 tree arg = USE_FROM_PTR (use_p);
6652 if (TREE_CODE (arg) != SSA_NAME
6653 || virtual_operand_p (arg))
6654 continue;
6655 tree sprime;
6656 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6658 sprime = SSA_VAL (arg);
6659 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6660 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6662 else
6663 /* Look for sth available at the definition block of the argument.
6664 This avoids inconsistencies between availability there which
6665 decides if the stmt can be removed and availability at the
6666 use site. The SSA property ensures that things available
6667 at the definition are also available at uses. */
6668 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6669 arg);
6670 if (sprime
6671 && sprime != arg
6672 && may_propagate_copy (arg, sprime))
6673 propagate_value (use_p, sprime);
6676 vn_context_bb = NULL;
6677 return todo;
6680 /* Unwind state per basic-block. */
6682 struct unwind_state
6684 /* Times this block has been visited. */
6685 unsigned visited;
6686 /* Whether to handle this as iteration point or whether to treat
6687 incoming backedge PHI values as varying. */
6688 bool iterate;
6689 /* Maximum RPO index this block is reachable from. */
6690 int max_rpo;
6691 /* Unwind state. */
6692 void *ob_top;
6693 vn_reference_t ref_top;
6694 vn_phi_t phi_top;
6695 vn_nary_op_t nary_top;
6698 /* Unwind the RPO VN state for iteration. */
6700 static void
6701 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6703 gcc_assert (to->iterate);
6704 for (; last_inserted_nary != to->nary_top;
6705 last_inserted_nary = last_inserted_nary->next)
6707 vn_nary_op_t *slot;
6708 slot = valid_info->nary->find_slot_with_hash
6709 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6710 /* Predication causes the need to restore previous state. */
6711 if ((*slot)->unwind_to)
6712 *slot = (*slot)->unwind_to;
6713 else
6714 valid_info->nary->clear_slot (slot);
6716 for (; last_inserted_phi != to->phi_top;
6717 last_inserted_phi = last_inserted_phi->next)
6719 vn_phi_t *slot;
6720 slot = valid_info->phis->find_slot_with_hash
6721 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6722 valid_info->phis->clear_slot (slot);
6724 for (; last_inserted_ref != to->ref_top;
6725 last_inserted_ref = last_inserted_ref->next)
6727 vn_reference_t *slot;
6728 slot = valid_info->references->find_slot_with_hash
6729 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6730 (*slot)->operands.release ();
6731 valid_info->references->clear_slot (slot);
6733 obstack_free (&vn_tables_obstack, to->ob_top);
6735 /* Prune [rpo_idx, ] from avail. */
6736 /* ??? This is O(number-of-values-in-region) which is
6737 O(region-size) rather than O(iteration-piece). */
6738 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
6739 i != vn_ssa_aux_hash->end (); ++i)
6741 while ((*i)->avail)
6743 if (bb_to_rpo[(*i)->avail->location] < rpo_idx)
6744 break;
6745 vn_avail *av = (*i)->avail;
6746 (*i)->avail = (*i)->avail->next;
6747 av->next = avail.m_avail_freelist;
6748 avail.m_avail_freelist = av;
6753 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6754 If ITERATE is true then treat backedges optimistically as not
6755 executed and iterate. If ELIMINATE is true then perform
6756 elimination, otherwise leave that to the caller. */
6758 static unsigned
6759 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6760 bool iterate, bool eliminate)
6762 unsigned todo = 0;
6764 /* We currently do not support region-based iteration when
6765 elimination is requested. */
6766 gcc_assert (!entry || !iterate || !eliminate);
6767 /* When iterating we need loop info up-to-date. */
6768 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6770 bool do_region = entry != NULL;
6771 if (!do_region)
6773 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6774 exit_bbs = BITMAP_ALLOC (NULL);
6775 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6778 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6779 re-mark those that are contained in the region. */
6780 edge_iterator ei;
6781 edge e;
6782 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6783 e->flags &= ~EDGE_DFS_BACK;
6785 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6786 int n = rev_post_order_and_mark_dfs_back_seme
6787 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6788 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6789 for (int i = 0; i < n / 2; ++i)
6790 std::swap (rpo[i], rpo[n-i-1]);
6792 if (!do_region)
6793 BITMAP_FREE (exit_bbs);
6795 /* If there are any non-DFS_BACK edges into entry->dest skip
6796 processing PHI nodes for that block. This supports
6797 value-numbering loop bodies w/o the actual loop. */
6798 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6799 if (e != entry
6800 && !(e->flags & EDGE_DFS_BACK))
6801 break;
6802 bool skip_entry_phis = e != NULL;
6803 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6804 fprintf (dump_file, "Region does not contain all edges into "
6805 "the entry block, skipping its PHIs.\n");
6807 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6808 for (int i = 0; i < n; ++i)
6809 bb_to_rpo[rpo[i]] = i;
6811 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6813 rpo_elim avail (entry->dest);
6814 rpo_avail = &avail;
6816 /* Verify we have no extra entries into the region. */
6817 if (flag_checking && do_region)
6819 auto_bb_flag bb_in_region (fn);
6820 for (int i = 0; i < n; ++i)
6822 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6823 bb->flags |= bb_in_region;
6825 /* We can't merge the first two loops because we cannot rely
6826 on EDGE_DFS_BACK for edges not within the region. But if
6827 we decide to always have the bb_in_region flag we can
6828 do the checking during the RPO walk itself (but then it's
6829 also easy to handle MEME conservatively). */
6830 for (int i = 0; i < n; ++i)
6832 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6833 edge e;
6834 edge_iterator ei;
6835 FOR_EACH_EDGE (e, ei, bb->preds)
6836 gcc_assert (e == entry
6837 || (skip_entry_phis && bb == entry->dest)
6838 || (e->src->flags & bb_in_region));
6840 for (int i = 0; i < n; ++i)
6842 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6843 bb->flags &= ~bb_in_region;
6847 /* Create the VN state. For the initial size of the various hashtables
6848 use a heuristic based on region size and number of SSA names. */
6849 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6850 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6851 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6852 next_value_id = 1;
6854 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6855 gcc_obstack_init (&vn_ssa_aux_obstack);
6857 gcc_obstack_init (&vn_tables_obstack);
6858 gcc_obstack_init (&vn_tables_insert_obstack);
6859 valid_info = XCNEW (struct vn_tables_s);
6860 allocate_vn_table (valid_info, region_size);
6861 last_inserted_ref = NULL;
6862 last_inserted_phi = NULL;
6863 last_inserted_nary = NULL;
6865 vn_valueize = rpo_vn_valueize;
6867 /* Initialize the unwind state and edge/BB executable state. */
6868 bool need_max_rpo_iterate = false;
6869 for (int i = 0; i < n; ++i)
6871 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6872 rpo_state[i].visited = 0;
6873 rpo_state[i].max_rpo = i;
6874 bb->flags &= ~BB_EXECUTABLE;
6875 bool has_backedges = false;
6876 edge e;
6877 edge_iterator ei;
6878 FOR_EACH_EDGE (e, ei, bb->preds)
6880 if (e->flags & EDGE_DFS_BACK)
6881 has_backedges = true;
6882 e->flags &= ~EDGE_EXECUTABLE;
6883 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6884 continue;
6885 if (bb_to_rpo[e->src->index] > i)
6887 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6888 bb_to_rpo[e->src->index]);
6889 need_max_rpo_iterate = true;
6891 else
6892 rpo_state[i].max_rpo
6893 = MAX (rpo_state[i].max_rpo,
6894 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6896 rpo_state[i].iterate = iterate && has_backedges;
6898 entry->flags |= EDGE_EXECUTABLE;
6899 entry->dest->flags |= BB_EXECUTABLE;
6901 /* When there are irreducible regions the simplistic max_rpo computation
6902 above for the case of backedges doesn't work and we need to iterate
6903 until there are no more changes. */
6904 unsigned nit = 0;
6905 while (need_max_rpo_iterate)
6907 nit++;
6908 need_max_rpo_iterate = false;
6909 for (int i = 0; i < n; ++i)
6911 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6912 edge e;
6913 edge_iterator ei;
6914 FOR_EACH_EDGE (e, ei, bb->preds)
6916 if (e == entry || (skip_entry_phis && bb == entry->dest))
6917 continue;
6918 int max_rpo = MAX (rpo_state[i].max_rpo,
6919 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6920 if (rpo_state[i].max_rpo != max_rpo)
6922 rpo_state[i].max_rpo = max_rpo;
6923 need_max_rpo_iterate = true;
6928 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6930 /* As heuristic to improve compile-time we handle only the N innermost
6931 loops and the outermost one optimistically. */
6932 if (iterate)
6934 loop_p loop;
6935 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6936 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6937 if (loop_depth (loop) > max_depth)
6938 for (unsigned i = 2;
6939 i < loop_depth (loop) - max_depth; ++i)
6941 basic_block header = superloop_at_depth (loop, i)->header;
6942 bool non_latch_backedge = false;
6943 edge e;
6944 edge_iterator ei;
6945 FOR_EACH_EDGE (e, ei, header->preds)
6946 if (e->flags & EDGE_DFS_BACK)
6948 /* There can be a non-latch backedge into the header
6949 which is part of an outer irreducible region. We
6950 cannot avoid iterating this block then. */
6951 if (!dominated_by_p (CDI_DOMINATORS,
6952 e->src, e->dest))
6954 if (dump_file && (dump_flags & TDF_DETAILS))
6955 fprintf (dump_file, "non-latch backedge %d -> %d "
6956 "forces iteration of loop %d\n",
6957 e->src->index, e->dest->index, loop->num);
6958 non_latch_backedge = true;
6960 else
6961 e->flags |= EDGE_EXECUTABLE;
6963 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6967 uint64_t nblk = 0;
6968 int idx = 0;
6969 if (iterate)
6970 /* Go and process all blocks, iterating as necessary. */
6973 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6975 /* If the block has incoming backedges remember unwind state. This
6976 is required even for non-executable blocks since in irreducible
6977 regions we might reach them via the backedge and re-start iterating
6978 from there.
6979 Note we can individually mark blocks with incoming backedges to
6980 not iterate where we then handle PHIs conservatively. We do that
6981 heuristically to reduce compile-time for degenerate cases. */
6982 if (rpo_state[idx].iterate)
6984 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6985 rpo_state[idx].ref_top = last_inserted_ref;
6986 rpo_state[idx].phi_top = last_inserted_phi;
6987 rpo_state[idx].nary_top = last_inserted_nary;
6990 if (!(bb->flags & BB_EXECUTABLE))
6992 if (dump_file && (dump_flags & TDF_DETAILS))
6993 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6994 idx, bb->index);
6995 idx++;
6996 continue;
6999 if (dump_file && (dump_flags & TDF_DETAILS))
7000 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7001 nblk++;
7002 todo |= process_bb (avail, bb,
7003 rpo_state[idx].visited != 0,
7004 rpo_state[idx].iterate,
7005 iterate, eliminate, do_region, exit_bbs, false);
7006 rpo_state[idx].visited++;
7008 /* Verify if changed values flow over executable outgoing backedges
7009 and those change destination PHI values (that's the thing we
7010 can easily verify). Reduce over all such edges to the farthest
7011 away PHI. */
7012 int iterate_to = -1;
7013 edge_iterator ei;
7014 edge e;
7015 FOR_EACH_EDGE (e, ei, bb->succs)
7016 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7017 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7018 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7020 int destidx = bb_to_rpo[e->dest->index];
7021 if (!rpo_state[destidx].visited)
7023 if (dump_file && (dump_flags & TDF_DETAILS))
7024 fprintf (dump_file, "Unvisited destination %d\n",
7025 e->dest->index);
7026 if (iterate_to == -1 || destidx < iterate_to)
7027 iterate_to = destidx;
7028 continue;
7030 if (dump_file && (dump_flags & TDF_DETAILS))
7031 fprintf (dump_file, "Looking for changed values of backedge"
7032 " %d->%d destination PHIs\n",
7033 e->src->index, e->dest->index);
7034 vn_context_bb = e->dest;
7035 gphi_iterator gsi;
7036 for (gsi = gsi_start_phis (e->dest);
7037 !gsi_end_p (gsi); gsi_next (&gsi))
7039 bool inserted = false;
7040 /* While we'd ideally just iterate on value changes
7041 we CSE PHIs and do that even across basic-block
7042 boundaries. So even hashtable state changes can
7043 be important (which is roughly equivalent to
7044 PHI argument value changes). To not excessively
7045 iterate because of that we track whether a PHI
7046 was CSEd to with GF_PLF_1. */
7047 bool phival_changed;
7048 if ((phival_changed = visit_phi (gsi.phi (),
7049 &inserted, false))
7050 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7052 if (!phival_changed
7053 && dump_file && (dump_flags & TDF_DETAILS))
7054 fprintf (dump_file, "PHI was CSEd and hashtable "
7055 "state (changed)\n");
7056 if (iterate_to == -1 || destidx < iterate_to)
7057 iterate_to = destidx;
7058 break;
7061 vn_context_bb = NULL;
7063 if (iterate_to != -1)
7065 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7066 idx = iterate_to;
7067 if (dump_file && (dump_flags & TDF_DETAILS))
7068 fprintf (dump_file, "Iterating to %d BB%d\n",
7069 iterate_to, rpo[iterate_to]);
7070 continue;
7073 idx++;
7075 while (idx < n);
7077 else /* !iterate */
7079 /* Process all blocks greedily with a worklist that enforces RPO
7080 processing of reachable blocks. */
7081 auto_bitmap worklist;
7082 bitmap_set_bit (worklist, 0);
7083 while (!bitmap_empty_p (worklist))
7085 int idx = bitmap_first_set_bit (worklist);
7086 bitmap_clear_bit (worklist, idx);
7087 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7088 gcc_assert ((bb->flags & BB_EXECUTABLE)
7089 && !rpo_state[idx].visited);
7091 if (dump_file && (dump_flags & TDF_DETAILS))
7092 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7094 /* When we run into predecessor edges where we cannot trust its
7095 executable state mark them executable so PHI processing will
7096 be conservative.
7097 ??? Do we need to force arguments flowing over that edge
7098 to be varying or will they even always be? */
7099 edge_iterator ei;
7100 edge e;
7101 FOR_EACH_EDGE (e, ei, bb->preds)
7102 if (!(e->flags & EDGE_EXECUTABLE)
7103 && (bb == entry->dest
7104 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7105 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7106 >= (int)idx))))
7108 if (dump_file && (dump_flags & TDF_DETAILS))
7109 fprintf (dump_file, "Cannot trust state of predecessor "
7110 "edge %d -> %d, marking executable\n",
7111 e->src->index, e->dest->index);
7112 e->flags |= EDGE_EXECUTABLE;
7115 nblk++;
7116 todo |= process_bb (avail, bb, false, false, false, eliminate,
7117 do_region, exit_bbs,
7118 skip_entry_phis && bb == entry->dest);
7119 rpo_state[idx].visited++;
7121 FOR_EACH_EDGE (e, ei, bb->succs)
7122 if ((e->flags & EDGE_EXECUTABLE)
7123 && e->dest->index != EXIT_BLOCK
7124 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7125 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7126 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7130 /* If statistics or dump file active. */
7131 int nex = 0;
7132 unsigned max_visited = 1;
7133 for (int i = 0; i < n; ++i)
7135 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7136 if (bb->flags & BB_EXECUTABLE)
7137 nex++;
7138 statistics_histogram_event (cfun, "RPO block visited times",
7139 rpo_state[i].visited);
7140 if (rpo_state[i].visited > max_visited)
7141 max_visited = rpo_state[i].visited;
7143 unsigned nvalues = 0, navail = 0;
7144 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7145 i != vn_ssa_aux_hash->end (); ++i)
7147 nvalues++;
7148 vn_avail *av = (*i)->avail;
7149 while (av)
7151 navail++;
7152 av = av->next;
7155 statistics_counter_event (cfun, "RPO blocks", n);
7156 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7157 statistics_counter_event (cfun, "RPO blocks executable", nex);
7158 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7159 statistics_histogram_event (cfun, "RPO num values", nvalues);
7160 statistics_histogram_event (cfun, "RPO num avail", navail);
7161 statistics_histogram_event (cfun, "RPO num lattice",
7162 vn_ssa_aux_hash->elements ());
7163 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7165 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7166 " blocks in total discovering %d executable blocks iterating "
7167 "%d.%d times, a block was visited max. %u times\n",
7168 n, nblk, nex,
7169 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7170 max_visited);
7171 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7172 "and %" PRIu64 " lattice elements\n",
7173 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7176 if (eliminate)
7178 /* When !iterate we already performed elimination during the RPO
7179 walk. */
7180 if (iterate)
7182 /* Elimination for region-based VN needs to be done within the
7183 RPO walk. */
7184 gcc_assert (! do_region);
7185 /* Note we can't use avail.walk here because that gets confused
7186 by the existing availability and it will be less efficient
7187 as well. */
7188 todo |= eliminate_with_rpo_vn (NULL);
7190 else
7191 todo |= avail.eliminate_cleanup (do_region);
7194 vn_valueize = NULL;
7195 rpo_avail = NULL;
7197 XDELETEVEC (bb_to_rpo);
7198 XDELETEVEC (rpo);
7199 XDELETEVEC (rpo_state);
7201 return todo;
7204 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7205 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7206 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7207 are not considered. */
7209 unsigned
7210 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7212 default_vn_walk_kind = VN_WALKREWRITE;
7213 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7214 free_rpo_vn ();
7215 return todo;
7219 namespace {
7221 const pass_data pass_data_fre =
7223 GIMPLE_PASS, /* type */
7224 "fre", /* name */
7225 OPTGROUP_NONE, /* optinfo_flags */
7226 TV_TREE_FRE, /* tv_id */
7227 ( PROP_cfg | PROP_ssa ), /* properties_required */
7228 0, /* properties_provided */
7229 0, /* properties_destroyed */
7230 0, /* todo_flags_start */
7231 0, /* todo_flags_finish */
7234 class pass_fre : public gimple_opt_pass
7236 public:
7237 pass_fre (gcc::context *ctxt)
7238 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7241 /* opt_pass methods: */
7242 opt_pass * clone () { return new pass_fre (m_ctxt); }
7243 void set_pass_param (unsigned int n, bool param)
7245 gcc_assert (n == 0);
7246 may_iterate = param;
7248 virtual bool gate (function *)
7250 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7252 virtual unsigned int execute (function *);
7254 private:
7255 bool may_iterate;
7256 }; // class pass_fre
7258 unsigned int
7259 pass_fre::execute (function *fun)
7261 unsigned todo = 0;
7263 /* At -O[1g] use the cheap non-iterating mode. */
7264 bool iterate_p = may_iterate && (optimize > 1);
7265 calculate_dominance_info (CDI_DOMINATORS);
7266 if (iterate_p)
7267 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7269 default_vn_walk_kind = VN_WALKREWRITE;
7270 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7271 free_rpo_vn ();
7273 if (iterate_p)
7274 loop_optimizer_finalize ();
7276 /* For late FRE after IVOPTs and unrolling, see if we can
7277 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
7278 if (!may_iterate)
7279 todo |= TODO_update_address_taken;
7281 return todo;
7284 } // anon namespace
7286 gimple_opt_pass *
7287 make_pass_fre (gcc::context *ctxt)
7289 return new pass_fre (ctxt);
7292 #undef BB_EXECUTABLE