re PR tree-optimization/91126 (Incorrect constant propagation of BIT_FIELD_REF)
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob73c77d1df4b24e92f528f088e8fa4a62a6300082
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2019 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "params.h"
57 #include "tree-ssa-propagate.h"
58 #include "tree-cfg.h"
59 #include "domwalk.h"
60 #include "gimple-iterator.h"
61 #include "gimple-match.h"
62 #include "stringpool.h"
63 #include "attribs.h"
64 #include "tree-pass.h"
65 #include "statistics.h"
66 #include "langhooks.h"
67 #include "ipa-utils.h"
68 #include "dbgcnt.h"
69 #include "tree-cfgcleanup.h"
70 #include "tree-ssa-loop.h"
71 #include "tree-scalar-evolution.h"
72 #include "tree-ssa-loop-niter.h"
73 #include "builtins.h"
74 #include "tree-ssa-sccvn.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
236 /* Return the hashcode for a given reference operation P1. */
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
241 return vr1->hashcode;
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
247 return v == c || vn_reference_eq (v, c);
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
254 /* The set of VN hashtables. */
256 typedef struct vn_tables_s
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
264 /* vn_constant hashtable helpers. */
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
272 /* Hash table hash function for vn_constant_t. */
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
277 return vc1->hashcode;
280 /* Hash table equality function for vn_constant_t. */
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
317 tree VN_TOP;
319 /* Unique counter for our value ids. */
321 static unsigned int next_value_id;
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
343 return SSA_NAME_VERSION (entry->name);
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
349 return name == entry->name;
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
365 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
366 (tree, alias_set_type, tree, vec<vn_reference_op_s, va_heap>, tree);
368 /* Return whether there is value numbering information for a given SSA name. */
370 bool
371 has_VN_INFO (tree name)
373 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
376 vn_ssa_aux_t
377 VN_INFO (tree name)
379 vn_ssa_aux_t *res
380 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
381 INSERT);
382 if (*res != NULL)
383 return *res;
385 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
386 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
387 newinfo->name = name;
388 newinfo->valnum = VN_TOP;
389 /* We are using the visited flag to handle uses with defs not within the
390 region being value-numbered. */
391 newinfo->visited = false;
393 /* Given we create the VN_INFOs on-demand now we have to do initialization
394 different than VN_TOP here. */
395 if (SSA_NAME_IS_DEFAULT_DEF (name))
396 switch (TREE_CODE (SSA_NAME_VAR (name)))
398 case VAR_DECL:
399 /* All undefined vars are VARYING. */
400 newinfo->valnum = name;
401 newinfo->visited = true;
402 break;
404 case PARM_DECL:
405 /* Parameters are VARYING but we can record a condition
406 if we know it is a non-NULL pointer. */
407 newinfo->visited = true;
408 newinfo->valnum = name;
409 if (POINTER_TYPE_P (TREE_TYPE (name))
410 && nonnull_arg_p (SSA_NAME_VAR (name)))
412 tree ops[2];
413 ops[0] = name;
414 ops[1] = build_int_cst (TREE_TYPE (name), 0);
415 vn_nary_op_t nary;
416 /* Allocate from non-unwinding stack. */
417 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
418 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
419 boolean_type_node, ops);
420 nary->predicated_values = 0;
421 nary->u.result = boolean_true_node;
422 vn_nary_op_insert_into (nary, valid_info->nary, true);
423 gcc_assert (nary->unwind_to == NULL);
424 /* Also do not link it into the undo chain. */
425 last_inserted_nary = nary->next;
426 nary->next = (vn_nary_op_t)(void *)-1;
427 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
428 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
429 boolean_type_node, ops);
430 nary->predicated_values = 0;
431 nary->u.result = boolean_false_node;
432 vn_nary_op_insert_into (nary, valid_info->nary, true);
433 gcc_assert (nary->unwind_to == NULL);
434 last_inserted_nary = nary->next;
435 nary->next = (vn_nary_op_t)(void *)-1;
436 if (dump_file && (dump_flags & TDF_DETAILS))
438 fprintf (dump_file, "Recording ");
439 print_generic_expr (dump_file, name, TDF_SLIM);
440 fprintf (dump_file, " != 0\n");
443 break;
445 case RESULT_DECL:
446 /* If the result is passed by invisible reference the default
447 def is initialized, otherwise it's uninitialized. Still
448 undefined is varying. */
449 newinfo->visited = true;
450 newinfo->valnum = name;
451 break;
453 default:
454 gcc_unreachable ();
456 return newinfo;
459 /* Return the SSA value of X. */
461 inline tree
462 SSA_VAL (tree x, bool *visited = NULL)
464 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
465 if (visited)
466 *visited = tem && tem->visited;
467 return tem && tem->visited ? tem->valnum : x;
470 /* Return the SSA value of the VUSE x, supporting released VDEFs
471 during elimination which will value-number the VDEF to the
472 associated VUSE (but not substitute in the whole lattice). */
474 static inline tree
475 vuse_ssa_val (tree x)
477 if (!x)
478 return NULL_TREE;
482 x = SSA_VAL (x);
483 gcc_assert (x != VN_TOP);
485 while (SSA_NAME_IN_FREE_LIST (x));
487 return x;
490 /* Similar to the above but used as callback for walk_non_aliases_vuses
491 and thus should stop at unvisited VUSE to not walk across region
492 boundaries. */
494 static tree
495 vuse_valueize (tree vuse)
499 bool visited;
500 vuse = SSA_VAL (vuse, &visited);
501 if (!visited)
502 return NULL_TREE;
503 gcc_assert (vuse != VN_TOP);
505 while (SSA_NAME_IN_FREE_LIST (vuse));
506 return vuse;
510 /* Return the vn_kind the expression computed by the stmt should be
511 associated with. */
513 enum vn_kind
514 vn_get_stmt_kind (gimple *stmt)
516 switch (gimple_code (stmt))
518 case GIMPLE_CALL:
519 return VN_REFERENCE;
520 case GIMPLE_PHI:
521 return VN_PHI;
522 case GIMPLE_ASSIGN:
524 enum tree_code code = gimple_assign_rhs_code (stmt);
525 tree rhs1 = gimple_assign_rhs1 (stmt);
526 switch (get_gimple_rhs_class (code))
528 case GIMPLE_UNARY_RHS:
529 case GIMPLE_BINARY_RHS:
530 case GIMPLE_TERNARY_RHS:
531 return VN_NARY;
532 case GIMPLE_SINGLE_RHS:
533 switch (TREE_CODE_CLASS (code))
535 case tcc_reference:
536 /* VOP-less references can go through unary case. */
537 if ((code == REALPART_EXPR
538 || code == IMAGPART_EXPR
539 || code == VIEW_CONVERT_EXPR
540 || code == BIT_FIELD_REF)
541 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
542 return VN_NARY;
544 /* Fallthrough. */
545 case tcc_declaration:
546 return VN_REFERENCE;
548 case tcc_constant:
549 return VN_CONSTANT;
551 default:
552 if (code == ADDR_EXPR)
553 return (is_gimple_min_invariant (rhs1)
554 ? VN_CONSTANT : VN_REFERENCE);
555 else if (code == CONSTRUCTOR)
556 return VN_NARY;
557 return VN_NONE;
559 default:
560 return VN_NONE;
563 default:
564 return VN_NONE;
568 /* Lookup a value id for CONSTANT and return it. If it does not
569 exist returns 0. */
571 unsigned int
572 get_constant_value_id (tree constant)
574 vn_constant_s **slot;
575 struct vn_constant_s vc;
577 vc.hashcode = vn_hash_constant_with_type (constant);
578 vc.constant = constant;
579 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
580 if (slot)
581 return (*slot)->value_id;
582 return 0;
585 /* Lookup a value id for CONSTANT, and if it does not exist, create a
586 new one and return it. If it does exist, return it. */
588 unsigned int
589 get_or_alloc_constant_value_id (tree constant)
591 vn_constant_s **slot;
592 struct vn_constant_s vc;
593 vn_constant_t vcp;
595 /* If the hashtable isn't initialized we're not running from PRE and thus
596 do not need value-ids. */
597 if (!constant_to_value_id)
598 return 0;
600 vc.hashcode = vn_hash_constant_with_type (constant);
601 vc.constant = constant;
602 slot = constant_to_value_id->find_slot (&vc, INSERT);
603 if (*slot)
604 return (*slot)->value_id;
606 vcp = XNEW (struct vn_constant_s);
607 vcp->hashcode = vc.hashcode;
608 vcp->constant = constant;
609 vcp->value_id = get_next_value_id ();
610 *slot = vcp;
611 bitmap_set_bit (constant_value_ids, vcp->value_id);
612 return vcp->value_id;
615 /* Return true if V is a value id for a constant. */
617 bool
618 value_id_constant_p (unsigned int v)
620 return bitmap_bit_p (constant_value_ids, v);
623 /* Compute the hash for a reference operand VRO1. */
625 static void
626 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
628 hstate.add_int (vro1->opcode);
629 if (vro1->op0)
630 inchash::add_expr (vro1->op0, hstate);
631 if (vro1->op1)
632 inchash::add_expr (vro1->op1, hstate);
633 if (vro1->op2)
634 inchash::add_expr (vro1->op2, hstate);
637 /* Compute a hash for the reference operation VR1 and return it. */
639 static hashval_t
640 vn_reference_compute_hash (const vn_reference_t vr1)
642 inchash::hash hstate;
643 hashval_t result;
644 int i;
645 vn_reference_op_t vro;
646 poly_int64 off = -1;
647 bool deref = false;
649 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
651 if (vro->opcode == MEM_REF)
652 deref = true;
653 else if (vro->opcode != ADDR_EXPR)
654 deref = false;
655 if (maybe_ne (vro->off, -1))
657 if (known_eq (off, -1))
658 off = 0;
659 off += vro->off;
661 else
663 if (maybe_ne (off, -1)
664 && maybe_ne (off, 0))
665 hstate.add_poly_int (off);
666 off = -1;
667 if (deref
668 && vro->opcode == ADDR_EXPR)
670 if (vro->op0)
672 tree op = TREE_OPERAND (vro->op0, 0);
673 hstate.add_int (TREE_CODE (op));
674 inchash::add_expr (op, hstate);
677 else
678 vn_reference_op_compute_hash (vro, hstate);
681 result = hstate.end ();
682 /* ??? We would ICE later if we hash instead of adding that in. */
683 if (vr1->vuse)
684 result += SSA_NAME_VERSION (vr1->vuse);
686 return result;
689 /* Return true if reference operations VR1 and VR2 are equivalent. This
690 means they have the same set of operands and vuses. */
692 bool
693 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
695 unsigned i, j;
697 /* Early out if this is not a hash collision. */
698 if (vr1->hashcode != vr2->hashcode)
699 return false;
701 /* The VOP needs to be the same. */
702 if (vr1->vuse != vr2->vuse)
703 return false;
705 /* If the operands are the same we are done. */
706 if (vr1->operands == vr2->operands)
707 return true;
709 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
710 return false;
712 if (INTEGRAL_TYPE_P (vr1->type)
713 && INTEGRAL_TYPE_P (vr2->type))
715 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
716 return false;
718 else if (INTEGRAL_TYPE_P (vr1->type)
719 && (TYPE_PRECISION (vr1->type)
720 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
721 return false;
722 else if (INTEGRAL_TYPE_P (vr2->type)
723 && (TYPE_PRECISION (vr2->type)
724 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
725 return false;
727 i = 0;
728 j = 0;
731 poly_int64 off1 = 0, off2 = 0;
732 vn_reference_op_t vro1, vro2;
733 vn_reference_op_s tem1, tem2;
734 bool deref1 = false, deref2 = false;
735 for (; vr1->operands.iterate (i, &vro1); i++)
737 if (vro1->opcode == MEM_REF)
738 deref1 = true;
739 /* Do not look through a storage order barrier. */
740 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
741 return false;
742 if (known_eq (vro1->off, -1))
743 break;
744 off1 += vro1->off;
746 for (; vr2->operands.iterate (j, &vro2); j++)
748 if (vro2->opcode == MEM_REF)
749 deref2 = true;
750 /* Do not look through a storage order barrier. */
751 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
752 return false;
753 if (known_eq (vro2->off, -1))
754 break;
755 off2 += vro2->off;
757 if (maybe_ne (off1, off2))
758 return false;
759 if (deref1 && vro1->opcode == ADDR_EXPR)
761 memset (&tem1, 0, sizeof (tem1));
762 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
763 tem1.type = TREE_TYPE (tem1.op0);
764 tem1.opcode = TREE_CODE (tem1.op0);
765 vro1 = &tem1;
766 deref1 = false;
768 if (deref2 && vro2->opcode == ADDR_EXPR)
770 memset (&tem2, 0, sizeof (tem2));
771 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
772 tem2.type = TREE_TYPE (tem2.op0);
773 tem2.opcode = TREE_CODE (tem2.op0);
774 vro2 = &tem2;
775 deref2 = false;
777 if (deref1 != deref2)
778 return false;
779 if (!vn_reference_op_eq (vro1, vro2))
780 return false;
781 ++j;
782 ++i;
784 while (vr1->operands.length () != i
785 || vr2->operands.length () != j);
787 return true;
790 /* Copy the operations present in load/store REF into RESULT, a vector of
791 vn_reference_op_s's. */
793 static void
794 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
796 /* For non-calls, store the information that makes up the address. */
797 tree orig = ref;
798 while (ref)
800 vn_reference_op_s temp;
802 memset (&temp, 0, sizeof (temp));
803 temp.type = TREE_TYPE (ref);
804 temp.opcode = TREE_CODE (ref);
805 temp.off = -1;
807 switch (temp.opcode)
809 case MODIFY_EXPR:
810 temp.op0 = TREE_OPERAND (ref, 1);
811 break;
812 case WITH_SIZE_EXPR:
813 temp.op0 = TREE_OPERAND (ref, 1);
814 temp.off = 0;
815 break;
816 case MEM_REF:
817 /* The base address gets its own vn_reference_op_s structure. */
818 temp.op0 = TREE_OPERAND (ref, 1);
819 if (!mem_ref_offset (ref).to_shwi (&temp.off))
820 temp.off = -1;
821 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
822 temp.base = MR_DEPENDENCE_BASE (ref);
823 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
824 break;
825 case TARGET_MEM_REF:
826 /* The base address gets its own vn_reference_op_s structure. */
827 temp.op0 = TMR_INDEX (ref);
828 temp.op1 = TMR_STEP (ref);
829 temp.op2 = TMR_OFFSET (ref);
830 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
831 temp.base = MR_DEPENDENCE_BASE (ref);
832 result->safe_push (temp);
833 memset (&temp, 0, sizeof (temp));
834 temp.type = NULL_TREE;
835 temp.opcode = ERROR_MARK;
836 temp.op0 = TMR_INDEX2 (ref);
837 temp.off = -1;
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits, position and storage order. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
846 break;
847 case COMPONENT_REF:
848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
857 && poly_int_tree_p (this_offset))
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
894 && TREE_CODE (temp.op2) == INTEGER_CST)
896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
903 break;
904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
907 temp.op0 = ref;
908 break;
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
919 result->safe_push (temp);
920 temp.opcode = ADDR_EXPR;
921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
938 temp.op0 = ref;
939 break;
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
961 result->safe_push (temp);
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
981 vec<vn_reference_op_s> ops)
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
991 alias_set_type base_alias_set = -1;
993 /* First get the final access size from just the outermost expression. */
994 op = &ops[0];
995 if (op->opcode == COMPONENT_REF)
996 size_tree = DECL_SIZE (op->op0);
997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1001 machine_mode mode = TYPE_MODE (type);
1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
1005 size = GET_MODE_BITSIZE (mode);
1007 if (size_tree != NULL_TREE
1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
1017 FOR_EACH_VEC_ELT (ops, i, op)
1019 switch (op->opcode)
1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
1023 case ADDR_EXPR:
1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1030 vn_reference_op_t pop = &ops[i-1];
1031 base = TREE_OPERAND (op->op0, 0);
1032 if (known_eq (pop->off, -1))
1034 max_size = -1;
1035 offset = 0;
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1042 /* Fallthru. */
1043 case CALL_EXPR:
1044 return false;
1046 /* Record the base objects. */
1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
1060 *op0_p = op->op0;
1061 op0_p = NULL;
1062 break;
1064 /* And now the usual component-reference style ops. */
1065 case BIT_FIELD_REF:
1066 offset += wi::to_poly_offset (op->op1);
1067 break;
1069 case COMPONENT_REF:
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
1075 tree this_offset = DECL_FIELD_OFFSET (field);
1077 if (op->op1 || !poly_int_tree_p (this_offset))
1078 max_size = -1;
1079 else
1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
1086 break;
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
1094 || TREE_CODE (op->op2) != INTEGER_CST)
1095 max_size = -1;
1096 else
1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1103 woffset <<= LOG2_BITS_PER_UNIT;
1104 offset += woffset;
1106 break;
1108 case REALPART_EXPR:
1109 break;
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1115 case VIEW_CONVERT_EXPR:
1116 break;
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
1124 case CONST_DECL:
1125 return false;
1127 default:
1128 return false;
1132 if (base == NULL_TREE)
1133 return false;
1135 ref->ref = NULL_TREE;
1136 ref->base = base;
1137 ref->ref_alias_set = set;
1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1153 if (!offset.to_shwi (&ref->offset))
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1161 ref->max_size = -1;
1163 return true;
1166 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1169 static void
1170 copy_reference_ops_from_call (gcall *call,
1171 vec<vn_reference_op_s> *result)
1173 vn_reference_op_s temp;
1174 unsigned i;
1175 tree lhs = gimple_call_lhs (call);
1176 int lr;
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
1188 result->safe_push (temp);
1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
1192 memset (&temp, 0, sizeof (temp));
1193 temp.type = gimple_call_fntype (call);
1194 temp.opcode = CALL_EXPR;
1195 temp.op0 = gimple_call_fn (call);
1196 temp.op1 = gimple_call_chain (call);
1197 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1198 temp.op2 = size_int (lr);
1199 temp.off = -1;
1200 result->safe_push (temp);
1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1206 tree callarg = gimple_call_arg (call, i);
1207 copy_reference_ops_from_ref (callarg, result);
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static bool
1214 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 tree addr_base;
1221 poly_int64 addr_offset = 0;
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1224 from .foo.bar to the preceding MEM_REF offset and replace the
1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1229 if (addr_base != TREE_OPERAND (op->op0, 0))
1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1236 op->op0 = build_fold_addr_expr (addr_base);
1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
1239 else
1240 mem_op->off = -1;
1241 return true;
1243 return false;
1246 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
1248 static bool
1249 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1250 unsigned int *i_p)
1252 unsigned int i = *i_p;
1253 vn_reference_op_t op = &(*ops)[i];
1254 vn_reference_op_t mem_op = &(*ops)[i - 1];
1255 gimple *def_stmt;
1256 enum tree_code code;
1257 poly_offset_int off;
1259 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1260 if (!is_gimple_assign (def_stmt))
1261 return false;
1263 code = gimple_assign_rhs_code (def_stmt);
1264 if (code != ADDR_EXPR
1265 && code != POINTER_PLUS_EXPR)
1266 return false;
1268 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1270 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1271 from .foo.bar to the preceding MEM_REF offset and replace the
1272 address with &OBJ. */
1273 if (code == ADDR_EXPR)
1275 tree addr, addr_base;
1276 poly_int64 addr_offset;
1278 addr = gimple_assign_rhs1 (def_stmt);
1279 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1280 &addr_offset);
1281 /* If that didn't work because the address isn't invariant propagate
1282 the reference tree from the address operation in case the current
1283 dereference isn't offsetted. */
1284 if (!addr_base
1285 && *i_p == ops->length () - 1
1286 && known_eq (off, 0)
1287 /* This makes us disable this transform for PRE where the
1288 reference ops might be also used for code insertion which
1289 is invalid. */
1290 && default_vn_walk_kind == VN_WALKREWRITE)
1292 auto_vec<vn_reference_op_s, 32> tem;
1293 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1294 /* Make sure to preserve TBAA info. The only objects not
1295 wrapped in MEM_REFs that can have their address taken are
1296 STRING_CSTs. */
1297 if (tem.length () >= 2
1298 && tem[tem.length () - 2].opcode == MEM_REF)
1300 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1301 new_mem_op->op0
1302 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1303 wi::to_poly_wide (new_mem_op->op0));
1305 else
1306 gcc_assert (tem.last ().opcode == STRING_CST);
1307 ops->pop ();
1308 ops->pop ();
1309 ops->safe_splice (tem);
1310 --*i_p;
1311 return true;
1313 if (!addr_base
1314 || TREE_CODE (addr_base) != MEM_REF
1315 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1317 return false;
1319 off += addr_offset;
1320 off += mem_ref_offset (addr_base);
1321 op->op0 = TREE_OPERAND (addr_base, 0);
1323 else
1325 tree ptr, ptroff;
1326 ptr = gimple_assign_rhs1 (def_stmt);
1327 ptroff = gimple_assign_rhs2 (def_stmt);
1328 if (TREE_CODE (ptr) != SSA_NAME
1329 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1330 /* Make sure to not endlessly recurse.
1331 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1332 happen when we value-number a PHI to its backedge value. */
1333 || SSA_VAL (ptr) == op->op0
1334 || !poly_int_tree_p (ptroff))
1335 return false;
1337 off += wi::to_poly_offset (ptroff);
1338 op->op0 = ptr;
1341 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1342 if (tree_fits_shwi_p (mem_op->op0))
1343 mem_op->off = tree_to_shwi (mem_op->op0);
1344 else
1345 mem_op->off = -1;
1346 /* ??? Can end up with endless recursion here!?
1347 gcc.c-torture/execute/strcmp-1.c */
1348 if (TREE_CODE (op->op0) == SSA_NAME)
1349 op->op0 = SSA_VAL (op->op0);
1350 if (TREE_CODE (op->op0) != SSA_NAME)
1351 op->opcode = TREE_CODE (op->op0);
1353 /* And recurse. */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 vn_reference_maybe_forwprop_address (ops, i_p);
1356 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1357 vn_reference_fold_indirect (ops, i_p);
1358 return true;
1361 /* Optimize the reference REF to a constant if possible or return
1362 NULL_TREE if not. */
1364 tree
1365 fully_constant_vn_reference_p (vn_reference_t ref)
1367 vec<vn_reference_op_s> operands = ref->operands;
1368 vn_reference_op_t op;
1370 /* Try to simplify the translated expression if it is
1371 a call to a builtin function with at most two arguments. */
1372 op = &operands[0];
1373 if (op->opcode == CALL_EXPR
1374 && TREE_CODE (op->op0) == ADDR_EXPR
1375 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1376 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1377 && operands.length () >= 2
1378 && operands.length () <= 3)
1380 vn_reference_op_t arg0, arg1 = NULL;
1381 bool anyconst = false;
1382 arg0 = &operands[1];
1383 if (operands.length () > 2)
1384 arg1 = &operands[2];
1385 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1386 || (arg0->opcode == ADDR_EXPR
1387 && is_gimple_min_invariant (arg0->op0)))
1388 anyconst = true;
1389 if (arg1
1390 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1391 || (arg1->opcode == ADDR_EXPR
1392 && is_gimple_min_invariant (arg1->op0))))
1393 anyconst = true;
1394 if (anyconst)
1396 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1397 arg1 ? 2 : 1,
1398 arg0->op0,
1399 arg1 ? arg1->op0 : NULL);
1400 if (folded
1401 && TREE_CODE (folded) == NOP_EXPR)
1402 folded = TREE_OPERAND (folded, 0);
1403 if (folded
1404 && is_gimple_min_invariant (folded))
1405 return folded;
1409 /* Simplify reads from constants or constant initializers. */
1410 else if (BITS_PER_UNIT == 8
1411 && COMPLETE_TYPE_P (ref->type)
1412 && is_gimple_reg_type (ref->type))
1414 poly_int64 off = 0;
1415 HOST_WIDE_INT size;
1416 if (INTEGRAL_TYPE_P (ref->type))
1417 size = TYPE_PRECISION (ref->type);
1418 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1419 size = tree_to_shwi (TYPE_SIZE (ref->type));
1420 else
1421 return NULL_TREE;
1422 if (size % BITS_PER_UNIT != 0
1423 || size > MAX_BITSIZE_MODE_ANY_MODE)
1424 return NULL_TREE;
1425 size /= BITS_PER_UNIT;
1426 unsigned i;
1427 for (i = 0; i < operands.length (); ++i)
1429 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1431 ++i;
1432 break;
1434 if (known_eq (operands[i].off, -1))
1435 return NULL_TREE;
1436 off += operands[i].off;
1437 if (operands[i].opcode == MEM_REF)
1439 ++i;
1440 break;
1443 vn_reference_op_t base = &operands[--i];
1444 tree ctor = error_mark_node;
1445 tree decl = NULL_TREE;
1446 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1447 ctor = base->op0;
1448 else if (base->opcode == MEM_REF
1449 && base[1].opcode == ADDR_EXPR
1450 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1451 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1452 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1454 decl = TREE_OPERAND (base[1].op0, 0);
1455 if (TREE_CODE (decl) == STRING_CST)
1456 ctor = decl;
1457 else
1458 ctor = ctor_for_folding (decl);
1460 if (ctor == NULL_TREE)
1461 return build_zero_cst (ref->type);
1462 else if (ctor != error_mark_node)
1464 HOST_WIDE_INT const_off;
1465 if (decl)
1467 tree res = fold_ctor_reference (ref->type, ctor,
1468 off * BITS_PER_UNIT,
1469 size * BITS_PER_UNIT, decl);
1470 if (res)
1472 STRIP_USELESS_TYPE_CONVERSION (res);
1473 if (is_gimple_min_invariant (res))
1474 return res;
1477 else if (off.is_constant (&const_off))
1479 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1480 int len = native_encode_expr (ctor, buf, size, const_off);
1481 if (len > 0)
1482 return native_interpret_expr (ref->type, buf, len);
1487 return NULL_TREE;
1490 /* Return true if OPS contain a storage order barrier. */
1492 static bool
1493 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1495 vn_reference_op_t op;
1496 unsigned i;
1498 FOR_EACH_VEC_ELT (ops, i, op)
1499 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1500 return true;
1502 return false;
1505 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1506 structures into their value numbers. This is done in-place, and
1507 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1508 whether any operands were valueized. */
1510 static vec<vn_reference_op_s>
1511 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1512 bool with_avail = false)
1514 vn_reference_op_t vro;
1515 unsigned int i;
1517 *valueized_anything = false;
1519 FOR_EACH_VEC_ELT (orig, i, vro)
1521 if (vro->opcode == SSA_NAME
1522 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1524 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1525 if (tem != vro->op0)
1527 *valueized_anything = true;
1528 vro->op0 = tem;
1530 /* If it transforms from an SSA_NAME to a constant, update
1531 the opcode. */
1532 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1533 vro->opcode = TREE_CODE (vro->op0);
1535 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1537 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1538 if (tem != vro->op1)
1540 *valueized_anything = true;
1541 vro->op1 = tem;
1544 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1546 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1547 if (tem != vro->op2)
1549 *valueized_anything = true;
1550 vro->op2 = tem;
1553 /* If it transforms from an SSA_NAME to an address, fold with
1554 a preceding indirect reference. */
1555 if (i > 0
1556 && vro->op0
1557 && TREE_CODE (vro->op0) == ADDR_EXPR
1558 && orig[i - 1].opcode == MEM_REF)
1560 if (vn_reference_fold_indirect (&orig, &i))
1561 *valueized_anything = true;
1563 else if (i > 0
1564 && vro->opcode == SSA_NAME
1565 && orig[i - 1].opcode == MEM_REF)
1567 if (vn_reference_maybe_forwprop_address (&orig, &i))
1568 *valueized_anything = true;
1570 /* If it transforms a non-constant ARRAY_REF into a constant
1571 one, adjust the constant offset. */
1572 else if (vro->opcode == ARRAY_REF
1573 && known_eq (vro->off, -1)
1574 && poly_int_tree_p (vro->op0)
1575 && poly_int_tree_p (vro->op1)
1576 && TREE_CODE (vro->op2) == INTEGER_CST)
1578 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1579 - wi::to_poly_offset (vro->op1))
1580 * wi::to_offset (vro->op2)
1581 * vn_ref_op_align_unit (vro));
1582 off.to_shwi (&vro->off);
1586 return orig;
1589 static vec<vn_reference_op_s>
1590 valueize_refs (vec<vn_reference_op_s> orig)
1592 bool tem;
1593 return valueize_refs_1 (orig, &tem);
1596 static vec<vn_reference_op_s> shared_lookup_references;
1598 /* Create a vector of vn_reference_op_s structures from REF, a
1599 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1600 this function. *VALUEIZED_ANYTHING will specify whether any
1601 operands were valueized. */
1603 static vec<vn_reference_op_s>
1604 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1606 if (!ref)
1607 return vNULL;
1608 shared_lookup_references.truncate (0);
1609 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1610 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1611 valueized_anything);
1612 return shared_lookup_references;
1615 /* Create a vector of vn_reference_op_s structures from CALL, a
1616 call statement. The vector is shared among all callers of
1617 this function. */
1619 static vec<vn_reference_op_s>
1620 valueize_shared_reference_ops_from_call (gcall *call)
1622 if (!call)
1623 return vNULL;
1624 shared_lookup_references.truncate (0);
1625 copy_reference_ops_from_call (call, &shared_lookup_references);
1626 shared_lookup_references = valueize_refs (shared_lookup_references);
1627 return shared_lookup_references;
1630 /* Lookup a SCCVN reference operation VR in the current hash table.
1631 Returns the resulting value number if it exists in the hash table,
1632 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1633 vn_reference_t stored in the hashtable if something is found. */
1635 static tree
1636 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1638 vn_reference_s **slot;
1639 hashval_t hash;
1641 hash = vr->hashcode;
1642 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1643 if (slot)
1645 if (vnresult)
1646 *vnresult = (vn_reference_t)*slot;
1647 return ((vn_reference_t)*slot)->result;
1650 return NULL_TREE;
1654 /* Partial definition tracking support. */
1656 struct pd_range
1658 HOST_WIDE_INT offset;
1659 HOST_WIDE_INT size;
1662 struct pd_data
1664 tree rhs;
1665 HOST_WIDE_INT offset;
1666 HOST_WIDE_INT size;
1669 /* Context for alias walking. */
1671 struct vn_walk_cb_data
1673 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1674 vn_lookup_kind vn_walk_kind_, bool tbaa_p_)
1675 : vr (vr_), last_vuse_ptr (last_vuse_ptr_),
1676 vn_walk_kind (vn_walk_kind_), tbaa_p (tbaa_p_), known_ranges (NULL)
1678 ao_ref_init (&orig_ref, orig_ref_);
1680 ~vn_walk_cb_data ();
1681 void *push_partial_def (const pd_data& pd, tree, HOST_WIDE_INT);
1683 vn_reference_t vr;
1684 ao_ref orig_ref;
1685 tree *last_vuse_ptr;
1686 vn_lookup_kind vn_walk_kind;
1687 bool tbaa_p;
1689 /* The VDEFs of partial defs we come along. */
1690 auto_vec<pd_data, 2> partial_defs;
1691 /* The first defs range to avoid splay tree setup in most cases. */
1692 pd_range first_range;
1693 tree first_vuse;
1694 splay_tree known_ranges;
1695 obstack ranges_obstack;
1698 vn_walk_cb_data::~vn_walk_cb_data ()
1700 if (known_ranges)
1702 splay_tree_delete (known_ranges);
1703 obstack_free (&ranges_obstack, NULL);
1707 /* pd_range splay-tree helpers. */
1709 static int
1710 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1712 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1713 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1714 if (offset1 < offset2)
1715 return -1;
1716 else if (offset1 > offset2)
1717 return 1;
1718 return 0;
1721 static void *
1722 pd_tree_alloc (int size, void *data_)
1724 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1725 return obstack_alloc (&data->ranges_obstack, size);
1728 static void
1729 pd_tree_dealloc (void *, void *)
1733 /* Push PD to the vector of partial definitions returning a
1734 value when we are ready to combine things with VUSE and MAXSIZEI,
1735 NULL when we want to continue looking for partial defs or -1
1736 on failure. */
1738 void *
1739 vn_walk_cb_data::push_partial_def (const pd_data &pd, tree vuse,
1740 HOST_WIDE_INT maxsizei)
1742 if (partial_defs.is_empty ())
1744 partial_defs.safe_push (pd);
1745 first_range.offset = pd.offset;
1746 first_range.size = pd.size;
1747 first_vuse = vuse;
1748 last_vuse_ptr = NULL;
1750 else
1752 if (!known_ranges)
1754 /* ??? Optimize the case where the second partial def
1755 completes things. */
1756 gcc_obstack_init (&ranges_obstack);
1757 known_ranges
1758 = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1759 pd_tree_alloc,
1760 pd_tree_dealloc, this);
1761 splay_tree_insert (known_ranges,
1762 (splay_tree_key)&first_range.offset,
1763 (splay_tree_value)&first_range);
1765 if (known_ranges)
1767 pd_range newr = { pd.offset, pd.size };
1768 splay_tree_node n;
1769 pd_range *r;
1770 /* Lookup the predecessor of offset + 1 and see if
1771 we need to merge with it. */
1772 HOST_WIDE_INT loffset = newr.offset + 1;
1773 if ((n = splay_tree_predecessor (known_ranges,
1774 (splay_tree_key)&loffset))
1775 && ((r = (pd_range *)n->value), true)
1776 && ranges_known_overlap_p (r->offset, r->size + 1,
1777 newr.offset, newr.size))
1779 /* Ignore partial defs already covered. */
1780 if (known_subrange_p (newr.offset, newr.size,
1781 r->offset, r->size))
1782 return NULL;
1783 r->size = MAX (r->offset + r->size,
1784 newr.offset + newr.size) - r->offset;
1786 else
1788 /* newr.offset wasn't covered yet, insert the
1789 range. */
1790 r = XOBNEW (&ranges_obstack, pd_range);
1791 *r = newr;
1792 splay_tree_insert (known_ranges,
1793 (splay_tree_key)&r->offset,
1794 (splay_tree_value)r);
1796 /* Merge r which now contains newr and is a member
1797 of the splay tree with adjacent overlapping ranges. */
1798 pd_range *rafter;
1799 while ((n = splay_tree_successor (known_ranges,
1800 (splay_tree_key)&r->offset))
1801 && ((rafter = (pd_range *)n->value), true)
1802 && ranges_known_overlap_p (r->offset, r->size + 1,
1803 rafter->offset, rafter->size))
1805 r->size = MAX (r->offset + r->size,
1806 rafter->offset + rafter->size) - r->offset;
1807 splay_tree_remove (known_ranges,
1808 (splay_tree_key)&rafter->offset);
1810 partial_defs.safe_push (pd);
1812 /* Now we have merged newr into the range tree.
1813 When we have covered [offseti, sizei] then the
1814 tree will contain exactly one node which has
1815 the desired properties and it will be 'r'. */
1816 if (known_subrange_p (0, maxsizei / BITS_PER_UNIT,
1817 r->offset, r->size))
1819 /* Now simply native encode all partial defs
1820 in reverse order. */
1821 unsigned ndefs = partial_defs.length ();
1822 /* We support up to 512-bit values (for V8DFmode). */
1823 unsigned char buffer[64];
1824 int len;
1826 while (!partial_defs.is_empty ())
1828 pd_data pd = partial_defs.pop ();
1829 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
1830 /* Empty CONSTRUCTOR. */
1831 memset (buffer + MAX (0, pd.offset),
1832 0, MIN ((HOST_WIDE_INT)sizeof (buffer), pd.size));
1833 else
1835 unsigned pad = 0;
1836 if (BYTES_BIG_ENDIAN
1837 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (pd.rhs))))
1839 /* On big-endian the padding is at the 'front' so
1840 just skip the initial bytes. */
1841 fixed_size_mode mode = as_a <fixed_size_mode>
1842 (TYPE_MODE (TREE_TYPE (pd.rhs)));
1843 pad = GET_MODE_SIZE (mode) - pd.size;
1845 len = native_encode_expr (pd.rhs,
1846 buffer + MAX (0, pd.offset),
1847 sizeof (buffer - MAX (0, pd.offset)),
1848 MAX (0, -pd.offset) + pad);
1849 if (len <= 0
1850 || len < (pd.size - MAX (0, -pd.offset)))
1852 if (dump_file && (dump_flags & TDF_DETAILS))
1853 fprintf (dump_file, "Failed to encode %u "
1854 "partial definitions\n", ndefs);
1855 return (void *)-1;
1860 tree type = vr->type;
1861 /* Make sure to interpret in a type that has a range
1862 covering the whole access size. */
1863 if (INTEGRAL_TYPE_P (vr->type)
1864 && maxsizei != TYPE_PRECISION (vr->type))
1865 type = build_nonstandard_integer_type (maxsizei,
1866 TYPE_UNSIGNED (type));
1867 tree val = native_interpret_expr (type, buffer,
1868 maxsizei / BITS_PER_UNIT);
1869 /* If we chop off bits because the types precision doesn't
1870 match the memory access size this is ok when optimizing
1871 reads but not when called from the DSE code during
1872 elimination. */
1873 if (val
1874 && type != vr->type)
1876 if (! int_fits_type_p (val, vr->type))
1877 val = NULL_TREE;
1878 else
1879 val = fold_convert (vr->type, val);
1882 if (val)
1884 if (dump_file && (dump_flags & TDF_DETAILS))
1885 fprintf (dump_file, "Successfully combined %u "
1886 "partial definitions\n", ndefs);
1887 return vn_reference_lookup_or_insert_for_pieces
1888 (first_vuse,
1889 vr->set, vr->type, vr->operands, val);
1891 else
1893 if (dump_file && (dump_flags & TDF_DETAILS))
1894 fprintf (dump_file, "Failed to interpret %u "
1895 "encoded partial definitions\n", ndefs);
1896 return (void *)-1;
1901 /* Continue looking for partial defs. */
1902 return NULL;
1905 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1906 with the current VUSE and performs the expression lookup. */
1908 static void *
1909 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
1911 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1912 vn_reference_t vr = data->vr;
1913 vn_reference_s **slot;
1914 hashval_t hash;
1916 /* If we have partial definitions recorded we have to go through
1917 vn_reference_lookup_3. */
1918 if (!data->partial_defs.is_empty ())
1919 return NULL;
1921 if (data->last_vuse_ptr)
1922 *data->last_vuse_ptr = vuse;
1924 /* Fixup vuse and hash. */
1925 if (vr->vuse)
1926 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1927 vr->vuse = vuse_ssa_val (vuse);
1928 if (vr->vuse)
1929 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1931 hash = vr->hashcode;
1932 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1933 if (slot)
1934 return *slot;
1936 return NULL;
1939 /* Lookup an existing or insert a new vn_reference entry into the
1940 value table for the VUSE, SET, TYPE, OPERANDS reference which
1941 has the value VALUE which is either a constant or an SSA name. */
1943 static vn_reference_t
1944 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1945 alias_set_type set,
1946 tree type,
1947 vec<vn_reference_op_s,
1948 va_heap> operands,
1949 tree value)
1951 vn_reference_s vr1;
1952 vn_reference_t result;
1953 unsigned value_id;
1954 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1955 vr1.operands = operands;
1956 vr1.type = type;
1957 vr1.set = set;
1958 vr1.hashcode = vn_reference_compute_hash (&vr1);
1959 if (vn_reference_lookup_1 (&vr1, &result))
1960 return result;
1961 if (TREE_CODE (value) == SSA_NAME)
1962 value_id = VN_INFO (value)->value_id;
1963 else
1964 value_id = get_or_alloc_constant_value_id (value);
1965 return vn_reference_insert_pieces (vuse, set, type,
1966 operands.copy (), value, value_id);
1969 /* Return a value-number for RCODE OPS... either by looking up an existing
1970 value-number for the simplified result or by inserting the operation if
1971 INSERT is true. */
1973 static tree
1974 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1976 tree result = NULL_TREE;
1977 /* We will be creating a value number for
1978 RCODE (OPS...).
1979 So first simplify and lookup this expression to see if it
1980 is already available. */
1981 /* For simplification valueize. */
1982 unsigned i;
1983 for (i = 0; i < res_op->num_ops; ++i)
1984 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
1986 tree tem = vn_valueize (res_op->ops[i]);
1987 if (!tem)
1988 break;
1989 res_op->ops[i] = tem;
1991 /* If valueization of an operand fails (it is not available), skip
1992 simplification. */
1993 bool res = false;
1994 if (i == res_op->num_ops)
1996 mprts_hook = vn_lookup_simplify_result;
1997 res = res_op->resimplify (NULL, vn_valueize);
1998 mprts_hook = NULL;
2000 gimple *new_stmt = NULL;
2001 if (res
2002 && gimple_simplified_result_is_gimple_val (res_op))
2004 /* The expression is already available. */
2005 result = res_op->ops[0];
2006 /* Valueize it, simplification returns sth in AVAIL only. */
2007 if (TREE_CODE (result) == SSA_NAME)
2008 result = SSA_VAL (result);
2010 else
2012 tree val = vn_lookup_simplify_result (res_op);
2013 if (!val && insert)
2015 gimple_seq stmts = NULL;
2016 result = maybe_push_res_to_seq (res_op, &stmts);
2017 if (result)
2019 gcc_assert (gimple_seq_singleton_p (stmts));
2020 new_stmt = gimple_seq_first_stmt (stmts);
2023 else
2024 /* The expression is already available. */
2025 result = val;
2027 if (new_stmt)
2029 /* The expression is not yet available, value-number lhs to
2030 the new SSA_NAME we created. */
2031 /* Initialize value-number information properly. */
2032 vn_ssa_aux_t result_info = VN_INFO (result);
2033 result_info->valnum = result;
2034 result_info->value_id = get_next_value_id ();
2035 result_info->visited = 1;
2036 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2037 new_stmt);
2038 result_info->needs_insertion = true;
2039 /* ??? PRE phi-translation inserts NARYs without corresponding
2040 SSA name result. Re-use those but set their result according
2041 to the stmt we just built. */
2042 vn_nary_op_t nary = NULL;
2043 vn_nary_op_lookup_stmt (new_stmt, &nary);
2044 if (nary)
2046 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2047 nary->u.result = gimple_assign_lhs (new_stmt);
2049 /* As all "inserted" statements are singleton SCCs, insert
2050 to the valid table. This is strictly needed to
2051 avoid re-generating new value SSA_NAMEs for the same
2052 expression during SCC iteration over and over (the
2053 optimistic table gets cleared after each iteration).
2054 We do not need to insert into the optimistic table, as
2055 lookups there will fall back to the valid table. */
2056 else
2058 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2059 vn_nary_op_t vno1
2060 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2061 vno1->value_id = result_info->value_id;
2062 vno1->length = length;
2063 vno1->predicated_values = 0;
2064 vno1->u.result = result;
2065 init_vn_nary_op_from_stmt (vno1, new_stmt);
2066 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2067 /* Also do not link it into the undo chain. */
2068 last_inserted_nary = vno1->next;
2069 vno1->next = (vn_nary_op_t)(void *)-1;
2071 if (dump_file && (dump_flags & TDF_DETAILS))
2073 fprintf (dump_file, "Inserting name ");
2074 print_generic_expr (dump_file, result);
2075 fprintf (dump_file, " for expression ");
2076 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2077 fprintf (dump_file, "\n");
2080 return result;
2083 /* Return a value-number for RCODE OPS... either by looking up an existing
2084 value-number for the simplified result or by inserting the operation. */
2086 static tree
2087 vn_nary_build_or_lookup (gimple_match_op *res_op)
2089 return vn_nary_build_or_lookup_1 (res_op, true);
2092 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2093 its value if present. */
2095 tree
2096 vn_nary_simplify (vn_nary_op_t nary)
2098 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2099 return NULL_TREE;
2100 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2101 nary->type, nary->length);
2102 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2103 return vn_nary_build_or_lookup_1 (&op, false);
2106 /* Elimination engine. */
2108 class eliminate_dom_walker : public dom_walker
2110 public:
2111 eliminate_dom_walker (cdi_direction, bitmap);
2112 ~eliminate_dom_walker ();
2114 virtual edge before_dom_children (basic_block);
2115 virtual void after_dom_children (basic_block);
2117 virtual tree eliminate_avail (basic_block, tree op);
2118 virtual void eliminate_push_avail (basic_block, tree op);
2119 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2121 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2123 unsigned eliminate_cleanup (bool region_p = false);
2125 bool do_pre;
2126 unsigned int el_todo;
2127 unsigned int eliminations;
2128 unsigned int insertions;
2130 /* SSA names that had their defs inserted by PRE if do_pre. */
2131 bitmap inserted_exprs;
2133 /* Blocks with statements that have had their EH properties changed. */
2134 bitmap need_eh_cleanup;
2136 /* Blocks with statements that have had their AB properties changed. */
2137 bitmap need_ab_cleanup;
2139 /* Local state for the eliminate domwalk. */
2140 auto_vec<gimple *> to_remove;
2141 auto_vec<gimple *> to_fixup;
2142 auto_vec<tree> avail;
2143 auto_vec<tree> avail_stack;
2146 /* Adaptor to the elimination engine using RPO availability. */
2148 class rpo_elim : public eliminate_dom_walker
2150 public:
2151 rpo_elim(basic_block entry_)
2152 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
2153 ~rpo_elim();
2155 virtual tree eliminate_avail (basic_block, tree op);
2157 virtual void eliminate_push_avail (basic_block, tree);
2159 basic_block entry;
2160 /* Instead of having a local availability lattice for each
2161 basic-block and availability at X defined as union of
2162 the local availabilities at X and its dominators we're
2163 turning this upside down and track availability per
2164 value given values are usually made available at very
2165 few points (at least one).
2166 So we have a value -> vec<location, leader> map where
2167 LOCATION is specifying the basic-block LEADER is made
2168 available for VALUE. We push to this vector in RPO
2169 order thus for iteration we can simply pop the last
2170 entries.
2171 LOCATION is the basic-block index and LEADER is its
2172 SSA name version. */
2173 /* ??? We'd like to use auto_vec here with embedded storage
2174 but that doesn't play well until we can provide move
2175 constructors and use std::move on hash-table expansion.
2176 So for now this is a bit more expensive than necessary.
2177 We eventually want to switch to a chaining scheme like
2178 for hashtable entries for unwinding which would make
2179 making the vector part of the vn_ssa_aux structure possible. */
2180 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
2181 rpo_avail_t m_rpo_avail;
2184 /* Global RPO state for access from hooks. */
2185 static rpo_elim *rpo_avail;
2186 basic_block vn_context_bb;
2188 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2189 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2190 Otherwise return false. */
2192 static bool
2193 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2194 tree base2, poly_int64 *offset2)
2196 poly_int64 soff;
2197 if (TREE_CODE (base1) == MEM_REF
2198 && TREE_CODE (base2) == MEM_REF)
2200 if (mem_ref_offset (base1).to_shwi (&soff))
2202 base1 = TREE_OPERAND (base1, 0);
2203 *offset1 += soff * BITS_PER_UNIT;
2205 if (mem_ref_offset (base2).to_shwi (&soff))
2207 base2 = TREE_OPERAND (base2, 0);
2208 *offset2 += soff * BITS_PER_UNIT;
2210 return operand_equal_p (base1, base2, 0);
2212 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2215 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2216 from the statement defining VUSE and if not successful tries to
2217 translate *REFP and VR_ through an aggregate copy at the definition
2218 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2219 of *REF and *VR. If only disambiguation was performed then
2220 *DISAMBIGUATE_ONLY is set to true. */
2222 static void *
2223 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2224 bool *disambiguate_only)
2226 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2227 vn_reference_t vr = data->vr;
2228 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2229 tree base = ao_ref_base (ref);
2230 HOST_WIDE_INT offseti, maxsizei;
2231 static vec<vn_reference_op_s> lhs_ops;
2232 ao_ref lhs_ref;
2233 bool lhs_ref_ok = false;
2234 poly_int64 copy_size;
2236 /* First try to disambiguate after value-replacing in the definitions LHS. */
2237 if (is_gimple_assign (def_stmt))
2239 tree lhs = gimple_assign_lhs (def_stmt);
2240 bool valueized_anything = false;
2241 /* Avoid re-allocation overhead. */
2242 lhs_ops.truncate (0);
2243 basic_block saved_rpo_bb = vn_context_bb;
2244 vn_context_bb = gimple_bb (def_stmt);
2245 copy_reference_ops_from_ref (lhs, &lhs_ops);
2246 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
2247 vn_context_bb = saved_rpo_bb;
2248 if (valueized_anything)
2250 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
2251 get_alias_set (lhs),
2252 TREE_TYPE (lhs), lhs_ops);
2253 if (lhs_ref_ok
2254 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2256 *disambiguate_only = true;
2257 return NULL;
2260 else
2262 ao_ref_init (&lhs_ref, lhs);
2263 lhs_ref_ok = true;
2266 /* Besides valueizing the LHS we can also use access-path based
2267 disambiguation on the original non-valueized ref. */
2268 if (!ref->ref
2269 && lhs_ref_ok
2270 && data->orig_ref.ref)
2272 /* We want to use the non-valueized LHS for this, but avoid redundant
2273 work. */
2274 ao_ref *lref = &lhs_ref;
2275 ao_ref lref_alt;
2276 if (valueized_anything)
2278 ao_ref_init (&lref_alt, lhs);
2279 lref = &lref_alt;
2281 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2283 *disambiguate_only = true;
2284 return NULL;
2288 /* If we reach a clobbering statement try to skip it and see if
2289 we find a VN result with exactly the same value as the
2290 possible clobber. In this case we can ignore the clobber
2291 and return the found value. */
2292 if (is_gimple_reg_type (TREE_TYPE (lhs))
2293 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2294 && ref->ref)
2296 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2297 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2298 data->last_vuse_ptr = NULL;
2299 tree saved_vuse = vr->vuse;
2300 hashval_t saved_hashcode = vr->hashcode;
2301 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2302 /* Need to restore vr->vuse and vr->hashcode. */
2303 vr->vuse = saved_vuse;
2304 vr->hashcode = saved_hashcode;
2305 data->last_vuse_ptr = saved_last_vuse_ptr;
2306 if (res && res != (void *)-1)
2308 vn_reference_t vnresult = (vn_reference_t) res;
2309 tree rhs = gimple_assign_rhs1 (def_stmt);
2310 if (TREE_CODE (rhs) == SSA_NAME)
2311 rhs = SSA_VAL (rhs);
2312 if (vnresult->result
2313 && operand_equal_p (vnresult->result, rhs, 0)
2314 /* We have to honor our promise about union type punning
2315 and also support arbitrary overlaps with
2316 -fno-strict-aliasing. So simply resort to alignment to
2317 rule out overlaps. Do this check last because it is
2318 quite expensive compared to the hash-lookup above. */
2319 && multiple_p (get_object_alignment (ref->ref), ref->size)
2320 && multiple_p (get_object_alignment (lhs), ref->size))
2321 return res;
2325 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2326 && gimple_call_num_args (def_stmt) <= 4)
2328 /* For builtin calls valueize its arguments and call the
2329 alias oracle again. Valueization may improve points-to
2330 info of pointers and constify size and position arguments.
2331 Originally this was motivated by PR61034 which has
2332 conditional calls to free falsely clobbering ref because
2333 of imprecise points-to info of the argument. */
2334 tree oldargs[4];
2335 bool valueized_anything = false;
2336 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2338 oldargs[i] = gimple_call_arg (def_stmt, i);
2339 tree val = vn_valueize (oldargs[i]);
2340 if (val != oldargs[i])
2342 gimple_call_set_arg (def_stmt, i, val);
2343 valueized_anything = true;
2346 if (valueized_anything)
2348 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2349 ref);
2350 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2351 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2352 if (!res)
2354 *disambiguate_only = true;
2355 return NULL;
2360 /* If we are looking for redundant stores do not create new hashtable
2361 entries from aliasing defs with made up alias-sets. */
2362 if (*disambiguate_only || !data->tbaa_p)
2363 return (void *)-1;
2365 /* If we cannot constrain the size of the reference we cannot
2366 test if anything kills it. */
2367 if (!ref->max_size_known_p ())
2368 return (void *)-1;
2370 poly_int64 offset = ref->offset;
2371 poly_int64 maxsize = ref->max_size;
2373 /* We can't deduce anything useful from clobbers. */
2374 if (gimple_clobber_p (def_stmt))
2375 return (void *)-1;
2377 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2378 from that definition.
2379 1) Memset. */
2380 if (is_gimple_reg_type (vr->type)
2381 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2382 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2383 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2384 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2385 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2386 && offset.is_constant (&offseti)
2387 && offseti % BITS_PER_UNIT == 0))
2388 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2389 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2390 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2392 tree base2;
2393 poly_int64 offset2, size2, maxsize2;
2394 bool reverse;
2395 tree ref2 = gimple_call_arg (def_stmt, 0);
2396 if (TREE_CODE (ref2) == SSA_NAME)
2398 ref2 = SSA_VAL (ref2);
2399 if (TREE_CODE (ref2) == SSA_NAME
2400 && (TREE_CODE (base) != MEM_REF
2401 || TREE_OPERAND (base, 0) != ref2))
2403 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2404 if (gimple_assign_single_p (def_stmt)
2405 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2406 ref2 = gimple_assign_rhs1 (def_stmt);
2409 if (TREE_CODE (ref2) == ADDR_EXPR)
2411 ref2 = TREE_OPERAND (ref2, 0);
2412 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2413 &reverse);
2414 if (!known_size_p (maxsize2)
2415 || !known_eq (maxsize2, size2)
2416 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2417 return (void *)-1;
2419 else if (TREE_CODE (ref2) == SSA_NAME)
2421 poly_int64 soff;
2422 if (TREE_CODE (base) != MEM_REF
2423 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2424 return (void *)-1;
2425 offset += soff;
2426 offset2 = 0;
2427 if (TREE_OPERAND (base, 0) != ref2)
2429 gimple *def = SSA_NAME_DEF_STMT (ref2);
2430 if (is_gimple_assign (def)
2431 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2432 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2433 && poly_int_tree_p (gimple_assign_rhs2 (def))
2434 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2435 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2437 ref2 = gimple_assign_rhs1 (def);
2438 if (TREE_CODE (ref2) == SSA_NAME)
2439 ref2 = SSA_VAL (ref2);
2441 else
2442 return (void *)-1;
2445 else
2446 return (void *)-1;
2447 tree len = gimple_call_arg (def_stmt, 2);
2448 HOST_WIDE_INT leni, offset2i, offseti;
2449 if (data->partial_defs.is_empty ()
2450 && known_subrange_p (offset, maxsize, offset2,
2451 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2453 tree val;
2454 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2455 val = build_zero_cst (vr->type);
2456 else if (INTEGRAL_TYPE_P (vr->type)
2457 && known_eq (ref->size, 8))
2459 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2460 vr->type, gimple_call_arg (def_stmt, 1));
2461 val = vn_nary_build_or_lookup (&res_op);
2462 if (!val
2463 || (TREE_CODE (val) == SSA_NAME
2464 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2465 return (void *)-1;
2467 else
2469 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2470 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2471 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2472 len);
2473 val = native_interpret_expr (vr->type, buf, len);
2474 if (!val)
2475 return (void *)-1;
2477 return vn_reference_lookup_or_insert_for_pieces
2478 (vuse, vr->set, vr->type, vr->operands, val);
2480 /* For now handle clearing memory with partial defs. */
2481 else if (integer_zerop (gimple_call_arg (def_stmt, 1))
2482 && tree_to_poly_int64 (len).is_constant (&leni)
2483 && offset.is_constant (&offseti)
2484 && offset2.is_constant (&offset2i)
2485 && maxsize.is_constant (&maxsizei))
2487 pd_data pd;
2488 pd.rhs = build_constructor (NULL_TREE, NULL);
2489 pd.offset = offset2i - offseti;
2490 pd.size = leni;
2491 return data->push_partial_def (pd, vuse, maxsizei);
2495 /* 2) Assignment from an empty CONSTRUCTOR. */
2496 else if (is_gimple_reg_type (vr->type)
2497 && gimple_assign_single_p (def_stmt)
2498 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2499 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2501 tree lhs = gimple_assign_lhs (def_stmt);
2502 tree base2;
2503 poly_int64 offset2, size2, maxsize2;
2504 HOST_WIDE_INT offset2i, size2i;
2505 bool reverse;
2506 if (lhs_ref_ok)
2508 base2 = ao_ref_base (&lhs_ref);
2509 offset2 = lhs_ref.offset;
2510 size2 = lhs_ref.size;
2511 maxsize2 = lhs_ref.max_size;
2512 reverse = reverse_storage_order_for_component_p (lhs);
2514 else
2515 base2 = get_ref_base_and_extent (lhs,
2516 &offset2, &size2, &maxsize2, &reverse);
2517 if (known_size_p (maxsize2)
2518 && known_eq (maxsize2, size2)
2519 && adjust_offsets_for_equal_base_address (base, &offset,
2520 base2, &offset2))
2522 if (data->partial_defs.is_empty ()
2523 && known_subrange_p (offset, maxsize, offset2, size2))
2525 tree val = build_zero_cst (vr->type);
2526 return vn_reference_lookup_or_insert_for_pieces
2527 (vuse, vr->set, vr->type, vr->operands, val);
2529 else if (maxsize.is_constant (&maxsizei)
2530 && maxsizei % BITS_PER_UNIT == 0
2531 && offset.is_constant (&offseti)
2532 && offseti % BITS_PER_UNIT == 0
2533 && offset2.is_constant (&offset2i)
2534 && offset2i % BITS_PER_UNIT == 0
2535 && size2.is_constant (&size2i)
2536 && size2i % BITS_PER_UNIT == 0)
2538 pd_data pd;
2539 pd.rhs = gimple_assign_rhs1 (def_stmt);
2540 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2541 pd.size = size2i / BITS_PER_UNIT;
2542 return data->push_partial_def (pd, vuse, maxsizei);
2547 /* 3) Assignment from a constant. We can use folds native encode/interpret
2548 routines to extract the assigned bits. */
2549 else if (known_eq (ref->size, maxsize)
2550 && is_gimple_reg_type (vr->type)
2551 && !contains_storage_order_barrier_p (vr->operands)
2552 && gimple_assign_single_p (def_stmt)
2553 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2554 /* native_encode and native_decode operate on arrays of bytes
2555 and so fundamentally need a compile-time size and offset. */
2556 && maxsize.is_constant (&maxsizei)
2557 && maxsizei % BITS_PER_UNIT == 0
2558 && offset.is_constant (&offseti)
2559 && offseti % BITS_PER_UNIT == 0
2560 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2561 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2562 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2564 tree lhs = gimple_assign_lhs (def_stmt);
2565 tree base2;
2566 poly_int64 offset2, size2, maxsize2;
2567 HOST_WIDE_INT offset2i, size2i;
2568 bool reverse;
2569 if (lhs_ref_ok)
2571 base2 = ao_ref_base (&lhs_ref);
2572 offset2 = lhs_ref.offset;
2573 size2 = lhs_ref.size;
2574 maxsize2 = lhs_ref.max_size;
2575 reverse = reverse_storage_order_for_component_p (lhs);
2577 else
2578 base2 = get_ref_base_and_extent (lhs,
2579 &offset2, &size2, &maxsize2, &reverse);
2580 if (base2
2581 && !reverse
2582 && known_eq (maxsize2, size2)
2583 && multiple_p (size2, BITS_PER_UNIT)
2584 && multiple_p (offset2, BITS_PER_UNIT)
2585 && adjust_offsets_for_equal_base_address (base, &offset,
2586 base2, &offset2)
2587 && offset.is_constant (&offseti)
2588 && offset2.is_constant (&offset2i)
2589 && size2.is_constant (&size2i))
2591 if (data->partial_defs.is_empty ()
2592 && known_subrange_p (offseti, maxsizei, offset2, size2))
2594 /* We support up to 512-bit values (for V8DFmode). */
2595 unsigned char buffer[64];
2596 int len;
2598 tree rhs = gimple_assign_rhs1 (def_stmt);
2599 if (TREE_CODE (rhs) == SSA_NAME)
2600 rhs = SSA_VAL (rhs);
2601 unsigned pad = 0;
2602 if (BYTES_BIG_ENDIAN
2603 && is_a <scalar_mode> (TYPE_MODE (TREE_TYPE (rhs))))
2605 /* On big-endian the padding is at the 'front' so
2606 just skip the initial bytes. */
2607 fixed_size_mode mode
2608 = as_a <fixed_size_mode> (TYPE_MODE (TREE_TYPE (rhs)));
2609 pad = GET_MODE_SIZE (mode) - size2i / BITS_PER_UNIT;
2611 len = native_encode_expr (rhs,
2612 buffer, sizeof (buffer),
2613 ((offseti - offset2i) / BITS_PER_UNIT
2614 + pad));
2615 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2617 tree type = vr->type;
2618 /* Make sure to interpret in a type that has a range
2619 covering the whole access size. */
2620 if (INTEGRAL_TYPE_P (vr->type)
2621 && maxsizei != TYPE_PRECISION (vr->type))
2622 type = build_nonstandard_integer_type (maxsizei,
2623 TYPE_UNSIGNED (type));
2624 tree val = native_interpret_expr (type, buffer,
2625 maxsizei / BITS_PER_UNIT);
2626 /* If we chop off bits because the types precision doesn't
2627 match the memory access size this is ok when optimizing
2628 reads but not when called from the DSE code during
2629 elimination. */
2630 if (val
2631 && type != vr->type)
2633 if (! int_fits_type_p (val, vr->type))
2634 val = NULL_TREE;
2635 else
2636 val = fold_convert (vr->type, val);
2639 if (val)
2640 return vn_reference_lookup_or_insert_for_pieces
2641 (vuse, vr->set, vr->type, vr->operands, val);
2644 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i, size2i))
2646 pd_data pd;
2647 tree rhs = gimple_assign_rhs1 (def_stmt);
2648 if (TREE_CODE (rhs) == SSA_NAME)
2649 rhs = SSA_VAL (rhs);
2650 pd.rhs = rhs;
2651 pd.offset = (offset2i - offseti) / BITS_PER_UNIT;
2652 pd.size = size2i / BITS_PER_UNIT;
2653 return data->push_partial_def (pd, vuse, maxsizei);
2658 /* 4) Assignment from an SSA name which definition we may be able
2659 to access pieces from. */
2660 else if (known_eq (ref->size, maxsize)
2661 && is_gimple_reg_type (vr->type)
2662 && !contains_storage_order_barrier_p (vr->operands)
2663 && gimple_assign_single_p (def_stmt)
2664 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2665 /* A subset of partial defs from non-constants can be handled
2666 by for example inserting a CONSTRUCTOR, a COMPLEX_EXPR or
2667 even a (series of) BIT_INSERT_EXPR hoping for simplifications
2668 downstream, not so much for actually doing the insertion. */
2669 && data->partial_defs.is_empty ())
2671 tree lhs = gimple_assign_lhs (def_stmt);
2672 tree base2;
2673 poly_int64 offset2, size2, maxsize2;
2674 bool reverse;
2675 if (lhs_ref_ok)
2677 base2 = ao_ref_base (&lhs_ref);
2678 offset2 = lhs_ref.offset;
2679 size2 = lhs_ref.size;
2680 maxsize2 = lhs_ref.max_size;
2681 reverse = reverse_storage_order_for_component_p (lhs);
2683 else
2684 base2 = get_ref_base_and_extent (lhs,
2685 &offset2, &size2, &maxsize2, &reverse);
2686 tree def_rhs = gimple_assign_rhs1 (def_stmt);
2687 if (!reverse
2688 && known_size_p (maxsize2)
2689 && known_eq (maxsize2, size2)
2690 && adjust_offsets_for_equal_base_address (base, &offset,
2691 base2, &offset2)
2692 && known_subrange_p (offset, maxsize, offset2, size2)
2693 /* ??? We can't handle bitfield precision extracts without
2694 either using an alternate type for the BIT_FIELD_REF and
2695 then doing a conversion or possibly adjusting the offset
2696 according to endianness. */
2697 && (! INTEGRAL_TYPE_P (vr->type)
2698 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2699 && multiple_p (ref->size, BITS_PER_UNIT)
2700 && (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
2701 || type_has_mode_precision_p (TREE_TYPE (def_rhs))))
2703 gimple_match_op op (gimple_match_cond::UNCOND,
2704 BIT_FIELD_REF, vr->type,
2705 vn_valueize (def_rhs),
2706 bitsize_int (ref->size),
2707 bitsize_int (offset - offset2));
2708 tree val = vn_nary_build_or_lookup (&op);
2709 if (val
2710 && (TREE_CODE (val) != SSA_NAME
2711 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2713 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2714 (vuse, vr->set, vr->type, vr->operands, val);
2715 return res;
2720 /* 5) For aggregate copies translate the reference through them if
2721 the copy kills ref. */
2722 else if (data->vn_walk_kind == VN_WALKREWRITE
2723 && gimple_assign_single_p (def_stmt)
2724 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2725 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2726 || handled_component_p (gimple_assign_rhs1 (def_stmt)))
2727 /* Handling this is more complicated, give up for now. */
2728 && data->partial_defs.is_empty ())
2730 tree base2;
2731 int i, j, k;
2732 auto_vec<vn_reference_op_s> rhs;
2733 vn_reference_op_t vro;
2734 ao_ref r;
2736 if (!lhs_ref_ok)
2737 return (void *)-1;
2739 /* See if the assignment kills REF. */
2740 base2 = ao_ref_base (&lhs_ref);
2741 if (!lhs_ref.max_size_known_p ()
2742 || (base != base2
2743 && (TREE_CODE (base) != MEM_REF
2744 || TREE_CODE (base2) != MEM_REF
2745 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2746 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2747 TREE_OPERAND (base2, 1))))
2748 || !stmt_kills_ref_p (def_stmt, ref))
2749 return (void *)-1;
2751 /* Find the common base of ref and the lhs. lhs_ops already
2752 contains valueized operands for the lhs. */
2753 i = vr->operands.length () - 1;
2754 j = lhs_ops.length () - 1;
2755 while (j >= 0 && i >= 0
2756 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2758 i--;
2759 j--;
2762 /* ??? The innermost op should always be a MEM_REF and we already
2763 checked that the assignment to the lhs kills vr. Thus for
2764 aggregate copies using char[] types the vn_reference_op_eq
2765 may fail when comparing types for compatibility. But we really
2766 don't care here - further lookups with the rewritten operands
2767 will simply fail if we messed up types too badly. */
2768 poly_int64 extra_off = 0;
2769 if (j == 0 && i >= 0
2770 && lhs_ops[0].opcode == MEM_REF
2771 && maybe_ne (lhs_ops[0].off, -1))
2773 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2774 i--, j--;
2775 else if (vr->operands[i].opcode == MEM_REF
2776 && maybe_ne (vr->operands[i].off, -1))
2778 extra_off = vr->operands[i].off - lhs_ops[0].off;
2779 i--, j--;
2783 /* i now points to the first additional op.
2784 ??? LHS may not be completely contained in VR, one or more
2785 VIEW_CONVERT_EXPRs could be in its way. We could at least
2786 try handling outermost VIEW_CONVERT_EXPRs. */
2787 if (j != -1)
2788 return (void *)-1;
2790 /* Punt if the additional ops contain a storage order barrier. */
2791 for (k = i; k >= 0; k--)
2793 vro = &vr->operands[k];
2794 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2795 return (void *)-1;
2798 /* Now re-write REF to be based on the rhs of the assignment. */
2799 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2801 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2802 if (maybe_ne (extra_off, 0))
2804 if (rhs.length () < 2)
2805 return (void *)-1;
2806 int ix = rhs.length () - 2;
2807 if (rhs[ix].opcode != MEM_REF
2808 || known_eq (rhs[ix].off, -1))
2809 return (void *)-1;
2810 rhs[ix].off += extra_off;
2811 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2812 build_int_cst (TREE_TYPE (rhs[ix].op0),
2813 extra_off));
2816 /* We need to pre-pend vr->operands[0..i] to rhs. */
2817 vec<vn_reference_op_s> old = vr->operands;
2818 if (i + 1 + rhs.length () > vr->operands.length ())
2819 vr->operands.safe_grow (i + 1 + rhs.length ());
2820 else
2821 vr->operands.truncate (i + 1 + rhs.length ());
2822 FOR_EACH_VEC_ELT (rhs, j, vro)
2823 vr->operands[i + 1 + j] = *vro;
2824 vr->operands = valueize_refs (vr->operands);
2825 if (old == shared_lookup_references)
2826 shared_lookup_references = vr->operands;
2827 vr->hashcode = vn_reference_compute_hash (vr);
2829 /* Try folding the new reference to a constant. */
2830 tree val = fully_constant_vn_reference_p (vr);
2831 if (val)
2832 return vn_reference_lookup_or_insert_for_pieces
2833 (vuse, vr->set, vr->type, vr->operands, val);
2835 /* Adjust *ref from the new operands. */
2836 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2837 return (void *)-1;
2838 /* This can happen with bitfields. */
2839 if (maybe_ne (ref->size, r.size))
2840 return (void *)-1;
2841 *ref = r;
2843 /* Do not update last seen VUSE after translating. */
2844 data->last_vuse_ptr = NULL;
2845 /* Invalidate the original access path since it now contains
2846 the wrong base. */
2847 data->orig_ref.ref = NULL_TREE;
2849 /* Keep looking for the adjusted *REF / VR pair. */
2850 return NULL;
2853 /* 6) For memcpy copies translate the reference through them if
2854 the copy kills ref. */
2855 else if (data->vn_walk_kind == VN_WALKREWRITE
2856 && is_gimple_reg_type (vr->type)
2857 /* ??? Handle BCOPY as well. */
2858 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2859 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2860 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2861 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2862 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2863 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2864 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2865 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
2866 /* Handling this is more complicated, give up for now. */
2867 && data->partial_defs.is_empty ())
2869 tree lhs, rhs;
2870 ao_ref r;
2871 poly_int64 rhs_offset, lhs_offset;
2872 vn_reference_op_s op;
2873 poly_uint64 mem_offset;
2874 poly_int64 at, byte_maxsize;
2876 /* Only handle non-variable, addressable refs. */
2877 if (maybe_ne (ref->size, maxsize)
2878 || !multiple_p (offset, BITS_PER_UNIT, &at)
2879 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2880 return (void *)-1;
2882 /* Extract a pointer base and an offset for the destination. */
2883 lhs = gimple_call_arg (def_stmt, 0);
2884 lhs_offset = 0;
2885 if (TREE_CODE (lhs) == SSA_NAME)
2887 lhs = vn_valueize (lhs);
2888 if (TREE_CODE (lhs) == SSA_NAME)
2890 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2891 if (gimple_assign_single_p (def_stmt)
2892 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2893 lhs = gimple_assign_rhs1 (def_stmt);
2896 if (TREE_CODE (lhs) == ADDR_EXPR)
2898 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2899 &lhs_offset);
2900 if (!tem)
2901 return (void *)-1;
2902 if (TREE_CODE (tem) == MEM_REF
2903 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2905 lhs = TREE_OPERAND (tem, 0);
2906 if (TREE_CODE (lhs) == SSA_NAME)
2907 lhs = vn_valueize (lhs);
2908 lhs_offset += mem_offset;
2910 else if (DECL_P (tem))
2911 lhs = build_fold_addr_expr (tem);
2912 else
2913 return (void *)-1;
2915 if (TREE_CODE (lhs) != SSA_NAME
2916 && TREE_CODE (lhs) != ADDR_EXPR)
2917 return (void *)-1;
2919 /* Extract a pointer base and an offset for the source. */
2920 rhs = gimple_call_arg (def_stmt, 1);
2921 rhs_offset = 0;
2922 if (TREE_CODE (rhs) == SSA_NAME)
2923 rhs = vn_valueize (rhs);
2924 if (TREE_CODE (rhs) == ADDR_EXPR)
2926 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2927 &rhs_offset);
2928 if (!tem)
2929 return (void *)-1;
2930 if (TREE_CODE (tem) == MEM_REF
2931 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2933 rhs = TREE_OPERAND (tem, 0);
2934 rhs_offset += mem_offset;
2936 else if (DECL_P (tem)
2937 || TREE_CODE (tem) == STRING_CST)
2938 rhs = build_fold_addr_expr (tem);
2939 else
2940 return (void *)-1;
2942 if (TREE_CODE (rhs) != SSA_NAME
2943 && TREE_CODE (rhs) != ADDR_EXPR)
2944 return (void *)-1;
2946 /* The bases of the destination and the references have to agree. */
2947 if (TREE_CODE (base) == MEM_REF)
2949 if (TREE_OPERAND (base, 0) != lhs
2950 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2951 return (void *) -1;
2952 at += mem_offset;
2954 else if (!DECL_P (base)
2955 || TREE_CODE (lhs) != ADDR_EXPR
2956 || TREE_OPERAND (lhs, 0) != base)
2957 return (void *)-1;
2959 /* If the access is completely outside of the memcpy destination
2960 area there is no aliasing. */
2961 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2962 return NULL;
2963 /* And the access has to be contained within the memcpy destination. */
2964 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2965 return (void *)-1;
2967 /* Make room for 2 operands in the new reference. */
2968 if (vr->operands.length () < 2)
2970 vec<vn_reference_op_s> old = vr->operands;
2971 vr->operands.safe_grow_cleared (2);
2972 if (old == shared_lookup_references)
2973 shared_lookup_references = vr->operands;
2975 else
2976 vr->operands.truncate (2);
2978 /* The looked-through reference is a simple MEM_REF. */
2979 memset (&op, 0, sizeof (op));
2980 op.type = vr->type;
2981 op.opcode = MEM_REF;
2982 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2983 op.off = at - lhs_offset + rhs_offset;
2984 vr->operands[0] = op;
2985 op.type = TREE_TYPE (rhs);
2986 op.opcode = TREE_CODE (rhs);
2987 op.op0 = rhs;
2988 op.off = -1;
2989 vr->operands[1] = op;
2990 vr->hashcode = vn_reference_compute_hash (vr);
2992 /* Try folding the new reference to a constant. */
2993 tree val = fully_constant_vn_reference_p (vr);
2994 if (val)
2995 return vn_reference_lookup_or_insert_for_pieces
2996 (vuse, vr->set, vr->type, vr->operands, val);
2998 /* Adjust *ref from the new operands. */
2999 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
3000 return (void *)-1;
3001 /* This can happen with bitfields. */
3002 if (maybe_ne (ref->size, r.size))
3003 return (void *)-1;
3004 *ref = r;
3006 /* Do not update last seen VUSE after translating. */
3007 data->last_vuse_ptr = NULL;
3008 /* Invalidate the original access path since it now contains
3009 the wrong base. */
3010 data->orig_ref.ref = NULL_TREE;
3012 /* Keep looking for the adjusted *REF / VR pair. */
3013 return NULL;
3016 /* Bail out and stop walking. */
3017 return (void *)-1;
3020 /* Return a reference op vector from OP that can be used for
3021 vn_reference_lookup_pieces. The caller is responsible for releasing
3022 the vector. */
3024 vec<vn_reference_op_s>
3025 vn_reference_operands_for_lookup (tree op)
3027 bool valueized;
3028 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3031 /* Lookup a reference operation by it's parts, in the current hash table.
3032 Returns the resulting value number if it exists in the hash table,
3033 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3034 vn_reference_t stored in the hashtable if something is found. */
3036 tree
3037 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
3038 vec<vn_reference_op_s> operands,
3039 vn_reference_t *vnresult, vn_lookup_kind kind)
3041 struct vn_reference_s vr1;
3042 vn_reference_t tmp;
3043 tree cst;
3045 if (!vnresult)
3046 vnresult = &tmp;
3047 *vnresult = NULL;
3049 vr1.vuse = vuse_ssa_val (vuse);
3050 shared_lookup_references.truncate (0);
3051 shared_lookup_references.safe_grow (operands.length ());
3052 memcpy (shared_lookup_references.address (),
3053 operands.address (),
3054 sizeof (vn_reference_op_s)
3055 * operands.length ());
3056 vr1.operands = operands = shared_lookup_references
3057 = valueize_refs (shared_lookup_references);
3058 vr1.type = type;
3059 vr1.set = set;
3060 vr1.hashcode = vn_reference_compute_hash (&vr1);
3061 if ((cst = fully_constant_vn_reference_p (&vr1)))
3062 return cst;
3064 vn_reference_lookup_1 (&vr1, vnresult);
3065 if (!*vnresult
3066 && kind != VN_NOWALK
3067 && vr1.vuse)
3069 ao_ref r;
3070 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3071 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true);
3072 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
3073 *vnresult =
3074 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, true,
3075 vn_reference_lookup_2,
3076 vn_reference_lookup_3,
3077 vuse_valueize, limit, &data);
3078 gcc_checking_assert (vr1.operands == shared_lookup_references);
3081 if (*vnresult)
3082 return (*vnresult)->result;
3084 return NULL_TREE;
3087 /* Lookup OP in the current hash table, and return the resulting value
3088 number if it exists in the hash table. Return NULL_TREE if it does
3089 not exist in the hash table or if the result field of the structure
3090 was NULL.. VNRESULT will be filled in with the vn_reference_t
3091 stored in the hashtable if one exists. When TBAA_P is false assume
3092 we are looking up a store and treat it as having alias-set zero.
3093 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded. */
3095 tree
3096 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3097 vn_reference_t *vnresult, bool tbaa_p, tree *last_vuse_ptr)
3099 vec<vn_reference_op_s> operands;
3100 struct vn_reference_s vr1;
3101 tree cst;
3102 bool valuezied_anything;
3104 if (vnresult)
3105 *vnresult = NULL;
3107 vr1.vuse = vuse_ssa_val (vuse);
3108 vr1.operands = operands
3109 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
3110 vr1.type = TREE_TYPE (op);
3111 vr1.set = get_alias_set (op);
3112 vr1.hashcode = vn_reference_compute_hash (&vr1);
3113 if ((cst = fully_constant_vn_reference_p (&vr1)))
3114 return cst;
3116 if (kind != VN_NOWALK
3117 && vr1.vuse)
3119 vn_reference_t wvnresult;
3120 ao_ref r;
3121 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
3122 /* Make sure to use a valueized reference if we valueized anything.
3123 Otherwise preserve the full reference for advanced TBAA. */
3124 if (!valuezied_anything
3125 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
3126 vr1.operands))
3127 ao_ref_init (&r, op);
3128 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3129 last_vuse_ptr, kind, tbaa_p);
3130 wvnresult =
3131 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p,
3132 vn_reference_lookup_2,
3133 vn_reference_lookup_3,
3134 vuse_valueize, limit, &data);
3135 gcc_checking_assert (vr1.operands == shared_lookup_references);
3136 if (wvnresult)
3138 if (vnresult)
3139 *vnresult = wvnresult;
3140 return wvnresult->result;
3143 return NULL_TREE;
3146 return vn_reference_lookup_1 (&vr1, vnresult);
3149 /* Lookup CALL in the current hash table and return the entry in
3150 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3152 void
3153 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3154 vn_reference_t vr)
3156 if (vnresult)
3157 *vnresult = NULL;
3159 tree vuse = gimple_vuse (call);
3161 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3162 vr->operands = valueize_shared_reference_ops_from_call (call);
3163 vr->type = gimple_expr_type (call);
3164 vr->set = 0;
3165 vr->hashcode = vn_reference_compute_hash (vr);
3166 vn_reference_lookup_1 (vr, vnresult);
3169 /* Insert OP into the current hash table with a value number of RESULT. */
3171 static void
3172 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3174 vn_reference_s **slot;
3175 vn_reference_t vr1;
3176 bool tem;
3178 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3179 if (TREE_CODE (result) == SSA_NAME)
3180 vr1->value_id = VN_INFO (result)->value_id;
3181 else
3182 vr1->value_id = get_or_alloc_constant_value_id (result);
3183 vr1->vuse = vuse_ssa_val (vuse);
3184 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3185 vr1->type = TREE_TYPE (op);
3186 vr1->set = get_alias_set (op);
3187 vr1->hashcode = vn_reference_compute_hash (vr1);
3188 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3189 vr1->result_vdef = vdef;
3191 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3192 INSERT);
3194 /* Because IL walking on reference lookup can end up visiting
3195 a def that is only to be visited later in iteration order
3196 when we are about to make an irreducible region reducible
3197 the def can be effectively processed and its ref being inserted
3198 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3199 but save a lookup if we deal with already inserted refs here. */
3200 if (*slot)
3202 /* We cannot assert that we have the same value either because
3203 when disentangling an irreducible region we may end up visiting
3204 a use before the corresponding def. That's a missed optimization
3205 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3206 if (dump_file && (dump_flags & TDF_DETAILS)
3207 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3209 fprintf (dump_file, "Keeping old value ");
3210 print_generic_expr (dump_file, (*slot)->result);
3211 fprintf (dump_file, " because of collision\n");
3213 free_reference (vr1);
3214 obstack_free (&vn_tables_obstack, vr1);
3215 return;
3218 *slot = vr1;
3219 vr1->next = last_inserted_ref;
3220 last_inserted_ref = vr1;
3223 /* Insert a reference by it's pieces into the current hash table with
3224 a value number of RESULT. Return the resulting reference
3225 structure we created. */
3227 vn_reference_t
3228 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
3229 vec<vn_reference_op_s> operands,
3230 tree result, unsigned int value_id)
3233 vn_reference_s **slot;
3234 vn_reference_t vr1;
3236 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3237 vr1->value_id = value_id;
3238 vr1->vuse = vuse_ssa_val (vuse);
3239 vr1->operands = valueize_refs (operands);
3240 vr1->type = type;
3241 vr1->set = set;
3242 vr1->hashcode = vn_reference_compute_hash (vr1);
3243 if (result && TREE_CODE (result) == SSA_NAME)
3244 result = SSA_VAL (result);
3245 vr1->result = result;
3247 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3248 INSERT);
3250 /* At this point we should have all the things inserted that we have
3251 seen before, and we should never try inserting something that
3252 already exists. */
3253 gcc_assert (!*slot);
3255 *slot = vr1;
3256 vr1->next = last_inserted_ref;
3257 last_inserted_ref = vr1;
3258 return vr1;
3261 /* Compute and return the hash value for nary operation VBO1. */
3263 static hashval_t
3264 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3266 inchash::hash hstate;
3267 unsigned i;
3269 for (i = 0; i < vno1->length; ++i)
3270 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3271 vno1->op[i] = SSA_VAL (vno1->op[i]);
3273 if (((vno1->length == 2
3274 && commutative_tree_code (vno1->opcode))
3275 || (vno1->length == 3
3276 && commutative_ternary_tree_code (vno1->opcode)))
3277 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3278 std::swap (vno1->op[0], vno1->op[1]);
3279 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3280 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3282 std::swap (vno1->op[0], vno1->op[1]);
3283 vno1->opcode = swap_tree_comparison (vno1->opcode);
3286 hstate.add_int (vno1->opcode);
3287 for (i = 0; i < vno1->length; ++i)
3288 inchash::add_expr (vno1->op[i], hstate);
3290 return hstate.end ();
3293 /* Compare nary operations VNO1 and VNO2 and return true if they are
3294 equivalent. */
3296 bool
3297 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3299 unsigned i;
3301 if (vno1->hashcode != vno2->hashcode)
3302 return false;
3304 if (vno1->length != vno2->length)
3305 return false;
3307 if (vno1->opcode != vno2->opcode
3308 || !types_compatible_p (vno1->type, vno2->type))
3309 return false;
3311 for (i = 0; i < vno1->length; ++i)
3312 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3313 return false;
3315 /* BIT_INSERT_EXPR has an implict operand as the type precision
3316 of op1. Need to check to make sure they are the same. */
3317 if (vno1->opcode == BIT_INSERT_EXPR
3318 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3319 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3320 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3321 return false;
3323 return true;
3326 /* Initialize VNO from the pieces provided. */
3328 static void
3329 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3330 enum tree_code code, tree type, tree *ops)
3332 vno->opcode = code;
3333 vno->length = length;
3334 vno->type = type;
3335 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3338 /* Initialize VNO from OP. */
3340 static void
3341 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
3343 unsigned i;
3345 vno->opcode = TREE_CODE (op);
3346 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
3347 vno->type = TREE_TYPE (op);
3348 for (i = 0; i < vno->length; ++i)
3349 vno->op[i] = TREE_OPERAND (op, i);
3352 /* Return the number of operands for a vn_nary ops structure from STMT. */
3354 static unsigned int
3355 vn_nary_length_from_stmt (gimple *stmt)
3357 switch (gimple_assign_rhs_code (stmt))
3359 case REALPART_EXPR:
3360 case IMAGPART_EXPR:
3361 case VIEW_CONVERT_EXPR:
3362 return 1;
3364 case BIT_FIELD_REF:
3365 return 3;
3367 case CONSTRUCTOR:
3368 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3370 default:
3371 return gimple_num_ops (stmt) - 1;
3375 /* Initialize VNO from STMT. */
3377 static void
3378 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
3380 unsigned i;
3382 vno->opcode = gimple_assign_rhs_code (stmt);
3383 vno->type = gimple_expr_type (stmt);
3384 switch (vno->opcode)
3386 case REALPART_EXPR:
3387 case IMAGPART_EXPR:
3388 case VIEW_CONVERT_EXPR:
3389 vno->length = 1;
3390 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3391 break;
3393 case BIT_FIELD_REF:
3394 vno->length = 3;
3395 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3396 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3397 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3398 break;
3400 case CONSTRUCTOR:
3401 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3402 for (i = 0; i < vno->length; ++i)
3403 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3404 break;
3406 default:
3407 gcc_checking_assert (!gimple_assign_single_p (stmt));
3408 vno->length = gimple_num_ops (stmt) - 1;
3409 for (i = 0; i < vno->length; ++i)
3410 vno->op[i] = gimple_op (stmt, i + 1);
3414 /* Compute the hashcode for VNO and look for it in the hash table;
3415 return the resulting value number if it exists in the hash table.
3416 Return NULL_TREE if it does not exist in the hash table or if the
3417 result field of the operation is NULL. VNRESULT will contain the
3418 vn_nary_op_t from the hashtable if it exists. */
3420 static tree
3421 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3423 vn_nary_op_s **slot;
3425 if (vnresult)
3426 *vnresult = NULL;
3428 vno->hashcode = vn_nary_op_compute_hash (vno);
3429 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3430 if (!slot)
3431 return NULL_TREE;
3432 if (vnresult)
3433 *vnresult = *slot;
3434 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3437 /* Lookup a n-ary operation by its pieces and return the resulting value
3438 number if it exists in the hash table. Return NULL_TREE if it does
3439 not exist in the hash table or if the result field of the operation
3440 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3441 if it exists. */
3443 tree
3444 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3445 tree type, tree *ops, vn_nary_op_t *vnresult)
3447 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
3448 sizeof_vn_nary_op (length));
3449 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3450 return vn_nary_op_lookup_1 (vno1, vnresult);
3453 /* Lookup OP in the current hash table, and return the resulting value
3454 number if it exists in the hash table. Return NULL_TREE if it does
3455 not exist in the hash table or if the result field of the operation
3456 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3457 if it exists. */
3459 tree
3460 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
3462 vn_nary_op_t vno1
3463 = XALLOCAVAR (struct vn_nary_op_s,
3464 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
3465 init_vn_nary_op_from_op (vno1, op);
3466 return vn_nary_op_lookup_1 (vno1, vnresult);
3469 /* Lookup the rhs of STMT in the current hash table, and return the resulting
3470 value number if it exists in the hash table. Return NULL_TREE if
3471 it does not exist in the hash table. VNRESULT will contain the
3472 vn_nary_op_t from the hashtable if it exists. */
3474 tree
3475 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
3477 vn_nary_op_t vno1
3478 = XALLOCAVAR (struct vn_nary_op_s,
3479 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
3480 init_vn_nary_op_from_stmt (vno1, stmt);
3481 return vn_nary_op_lookup_1 (vno1, vnresult);
3484 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3486 static vn_nary_op_t
3487 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3489 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3492 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3493 obstack. */
3495 static vn_nary_op_t
3496 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3498 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3500 vno1->value_id = value_id;
3501 vno1->length = length;
3502 vno1->predicated_values = 0;
3503 vno1->u.result = result;
3505 return vno1;
3508 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3509 VNO->HASHCODE first. */
3511 static vn_nary_op_t
3512 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3513 bool compute_hash)
3515 vn_nary_op_s **slot;
3517 if (compute_hash)
3519 vno->hashcode = vn_nary_op_compute_hash (vno);
3520 gcc_assert (! vno->predicated_values
3521 || (! vno->u.values->next
3522 && vno->u.values->n == 1));
3525 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3526 vno->unwind_to = *slot;
3527 if (*slot)
3529 /* Prefer non-predicated values.
3530 ??? Only if those are constant, otherwise, with constant predicated
3531 value, turn them into predicated values with entry-block validity
3532 (??? but we always find the first valid result currently). */
3533 if ((*slot)->predicated_values
3534 && ! vno->predicated_values)
3536 /* ??? We cannot remove *slot from the unwind stack list.
3537 For the moment we deal with this by skipping not found
3538 entries but this isn't ideal ... */
3539 *slot = vno;
3540 /* ??? Maintain a stack of states we can unwind in
3541 vn_nary_op_s? But how far do we unwind? In reality
3542 we need to push change records somewhere... Or not
3543 unwind vn_nary_op_s and linking them but instead
3544 unwind the results "list", linking that, which also
3545 doesn't move on hashtable resize. */
3546 /* We can also have a ->unwind_to recording *slot there.
3547 That way we can make u.values a fixed size array with
3548 recording the number of entries but of course we then
3549 have always N copies for each unwind_to-state. Or we
3550 make sure to only ever append and each unwinding will
3551 pop off one entry (but how to deal with predicated
3552 replaced with non-predicated here?) */
3553 vno->next = last_inserted_nary;
3554 last_inserted_nary = vno;
3555 return vno;
3557 else if (vno->predicated_values
3558 && ! (*slot)->predicated_values)
3559 return *slot;
3560 else if (vno->predicated_values
3561 && (*slot)->predicated_values)
3563 /* ??? Factor this all into a insert_single_predicated_value
3564 routine. */
3565 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3566 basic_block vno_bb
3567 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3568 vn_pval *nval = vno->u.values;
3569 vn_pval **next = &vno->u.values;
3570 bool found = false;
3571 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3573 if (expressions_equal_p (val->result, vno->u.values->result))
3575 found = true;
3576 for (unsigned i = 0; i < val->n; ++i)
3578 basic_block val_bb
3579 = BASIC_BLOCK_FOR_FN (cfun,
3580 val->valid_dominated_by_p[i]);
3581 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3582 /* Value registered with more generic predicate. */
3583 return *slot;
3584 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3585 /* Shouldn't happen, we insert in RPO order. */
3586 gcc_unreachable ();
3588 /* Append value. */
3589 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3590 sizeof (vn_pval)
3591 + val->n * sizeof (int));
3592 (*next)->next = NULL;
3593 (*next)->result = val->result;
3594 (*next)->n = val->n + 1;
3595 memcpy ((*next)->valid_dominated_by_p,
3596 val->valid_dominated_by_p,
3597 val->n * sizeof (int));
3598 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3599 next = &(*next)->next;
3600 if (dump_file && (dump_flags & TDF_DETAILS))
3601 fprintf (dump_file, "Appending predicate to value.\n");
3602 continue;
3604 /* Copy other predicated values. */
3605 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3606 sizeof (vn_pval)
3607 + (val->n-1) * sizeof (int));
3608 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3609 (*next)->next = NULL;
3610 next = &(*next)->next;
3612 if (!found)
3613 *next = nval;
3615 *slot = vno;
3616 vno->next = last_inserted_nary;
3617 last_inserted_nary = vno;
3618 return vno;
3621 /* While we do not want to insert things twice it's awkward to
3622 avoid it in the case where visit_nary_op pattern-matches stuff
3623 and ends up simplifying the replacement to itself. We then
3624 get two inserts, one from visit_nary_op and one from
3625 vn_nary_build_or_lookup.
3626 So allow inserts with the same value number. */
3627 if ((*slot)->u.result == vno->u.result)
3628 return *slot;
3631 /* ??? There's also optimistic vs. previous commited state merging
3632 that is problematic for the case of unwinding. */
3634 /* ??? We should return NULL if we do not use 'vno' and have the
3635 caller release it. */
3636 gcc_assert (!*slot);
3638 *slot = vno;
3639 vno->next = last_inserted_nary;
3640 last_inserted_nary = vno;
3641 return vno;
3644 /* Insert a n-ary operation into the current hash table using it's
3645 pieces. Return the vn_nary_op_t structure we created and put in
3646 the hashtable. */
3648 vn_nary_op_t
3649 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3650 tree type, tree *ops,
3651 tree result, unsigned int value_id)
3653 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3654 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3655 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3658 static vn_nary_op_t
3659 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3660 tree type, tree *ops,
3661 tree result, unsigned int value_id,
3662 edge pred_e)
3664 /* ??? Currently tracking BBs. */
3665 if (! single_pred_p (pred_e->dest))
3667 /* Never record for backedges. */
3668 if (pred_e->flags & EDGE_DFS_BACK)
3669 return NULL;
3670 edge_iterator ei;
3671 edge e;
3672 int cnt = 0;
3673 /* Ignore backedges. */
3674 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3675 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3676 cnt++;
3677 if (cnt != 1)
3678 return NULL;
3680 if (dump_file && (dump_flags & TDF_DETAILS)
3681 /* ??? Fix dumping, but currently we only get comparisons. */
3682 && TREE_CODE_CLASS (code) == tcc_comparison)
3684 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3685 pred_e->dest->index);
3686 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3687 fprintf (dump_file, " %s ", get_tree_code_name (code));
3688 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3689 fprintf (dump_file, " == %s\n",
3690 integer_zerop (result) ? "false" : "true");
3692 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3693 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3694 vno1->predicated_values = 1;
3695 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3696 sizeof (vn_pval));
3697 vno1->u.values->next = NULL;
3698 vno1->u.values->result = result;
3699 vno1->u.values->n = 1;
3700 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3701 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3704 static bool
3705 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3707 static tree
3708 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3710 if (! vno->predicated_values)
3711 return vno->u.result;
3712 for (vn_pval *val = vno->u.values; val; val = val->next)
3713 for (unsigned i = 0; i < val->n; ++i)
3714 if (dominated_by_p_w_unex (bb,
3715 BASIC_BLOCK_FOR_FN
3716 (cfun, val->valid_dominated_by_p[i])))
3717 return val->result;
3718 return NULL_TREE;
3721 /* Insert OP into the current hash table with a value number of
3722 RESULT. Return the vn_nary_op_t structure we created and put in
3723 the hashtable. */
3725 vn_nary_op_t
3726 vn_nary_op_insert (tree op, tree result)
3728 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3729 vn_nary_op_t vno1;
3731 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3732 init_vn_nary_op_from_op (vno1, op);
3733 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3736 /* Insert the rhs of STMT into the current hash table with a value number of
3737 RESULT. */
3739 static vn_nary_op_t
3740 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3742 vn_nary_op_t vno1
3743 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3744 result, VN_INFO (result)->value_id);
3745 init_vn_nary_op_from_stmt (vno1, stmt);
3746 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3749 /* Compute a hashcode for PHI operation VP1 and return it. */
3751 static inline hashval_t
3752 vn_phi_compute_hash (vn_phi_t vp1)
3754 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3755 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3756 tree phi1op;
3757 tree type;
3758 edge e;
3759 edge_iterator ei;
3761 /* If all PHI arguments are constants we need to distinguish
3762 the PHI node via its type. */
3763 type = vp1->type;
3764 hstate.merge_hash (vn_hash_type (type));
3766 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3768 /* Don't hash backedge values they need to be handled as VN_TOP
3769 for optimistic value-numbering. */
3770 if (e->flags & EDGE_DFS_BACK)
3771 continue;
3773 phi1op = vp1->phiargs[e->dest_idx];
3774 if (phi1op == VN_TOP)
3775 continue;
3776 inchash::add_expr (phi1op, hstate);
3779 return hstate.end ();
3783 /* Return true if COND1 and COND2 represent the same condition, set
3784 *INVERTED_P if one needs to be inverted to make it the same as
3785 the other. */
3787 static bool
3788 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3789 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3791 enum tree_code code1 = gimple_cond_code (cond1);
3792 enum tree_code code2 = gimple_cond_code (cond2);
3794 *inverted_p = false;
3795 if (code1 == code2)
3797 else if (code1 == swap_tree_comparison (code2))
3798 std::swap (lhs2, rhs2);
3799 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3800 *inverted_p = true;
3801 else if (code1 == invert_tree_comparison
3802 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3804 std::swap (lhs2, rhs2);
3805 *inverted_p = true;
3807 else
3808 return false;
3810 return ((expressions_equal_p (lhs1, lhs2)
3811 && expressions_equal_p (rhs1, rhs2))
3812 || (commutative_tree_code (code1)
3813 && expressions_equal_p (lhs1, rhs2)
3814 && expressions_equal_p (rhs1, lhs2)));
3817 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3819 static int
3820 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3822 if (vp1->hashcode != vp2->hashcode)
3823 return false;
3825 if (vp1->block != vp2->block)
3827 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3828 return false;
3830 switch (EDGE_COUNT (vp1->block->preds))
3832 case 1:
3833 /* Single-arg PHIs are just copies. */
3834 break;
3836 case 2:
3838 /* Rule out backedges into the PHI. */
3839 if (vp1->block->loop_father->header == vp1->block
3840 || vp2->block->loop_father->header == vp2->block)
3841 return false;
3843 /* If the PHI nodes do not have compatible types
3844 they are not the same. */
3845 if (!types_compatible_p (vp1->type, vp2->type))
3846 return false;
3848 basic_block idom1
3849 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3850 basic_block idom2
3851 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3852 /* If the immediate dominator end in switch stmts multiple
3853 values may end up in the same PHI arg via intermediate
3854 CFG merges. */
3855 if (EDGE_COUNT (idom1->succs) != 2
3856 || EDGE_COUNT (idom2->succs) != 2)
3857 return false;
3859 /* Verify the controlling stmt is the same. */
3860 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3861 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3862 if (! last1 || ! last2)
3863 return false;
3864 bool inverted_p;
3865 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3866 last2, vp2->cclhs, vp2->ccrhs,
3867 &inverted_p))
3868 return false;
3870 /* Get at true/false controlled edges into the PHI. */
3871 edge te1, te2, fe1, fe2;
3872 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3873 &te1, &fe1)
3874 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3875 &te2, &fe2))
3876 return false;
3878 /* Swap edges if the second condition is the inverted of the
3879 first. */
3880 if (inverted_p)
3881 std::swap (te2, fe2);
3883 /* ??? Handle VN_TOP specially. */
3884 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3885 vp2->phiargs[te2->dest_idx])
3886 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3887 vp2->phiargs[fe2->dest_idx]))
3888 return false;
3890 return true;
3893 default:
3894 return false;
3898 /* If the PHI nodes do not have compatible types
3899 they are not the same. */
3900 if (!types_compatible_p (vp1->type, vp2->type))
3901 return false;
3903 /* Any phi in the same block will have it's arguments in the
3904 same edge order, because of how we store phi nodes. */
3905 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3907 tree phi1op = vp1->phiargs[i];
3908 tree phi2op = vp2->phiargs[i];
3909 if (phi1op == VN_TOP || phi2op == VN_TOP)
3910 continue;
3911 if (!expressions_equal_p (phi1op, phi2op))
3912 return false;
3915 return true;
3918 /* Lookup PHI in the current hash table, and return the resulting
3919 value number if it exists in the hash table. Return NULL_TREE if
3920 it does not exist in the hash table. */
3922 static tree
3923 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3925 vn_phi_s **slot;
3926 struct vn_phi_s *vp1;
3927 edge e;
3928 edge_iterator ei;
3930 vp1 = XALLOCAVAR (struct vn_phi_s,
3931 sizeof (struct vn_phi_s)
3932 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3934 /* Canonicalize the SSA_NAME's to their value number. */
3935 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3937 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3938 if (TREE_CODE (def) == SSA_NAME
3939 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3940 def = SSA_VAL (def);
3941 vp1->phiargs[e->dest_idx] = def;
3943 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3944 vp1->block = gimple_bb (phi);
3945 /* Extract values of the controlling condition. */
3946 vp1->cclhs = NULL_TREE;
3947 vp1->ccrhs = NULL_TREE;
3948 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3949 if (EDGE_COUNT (idom1->succs) == 2)
3950 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3952 /* ??? We want to use SSA_VAL here. But possibly not
3953 allow VN_TOP. */
3954 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3955 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3957 vp1->hashcode = vn_phi_compute_hash (vp1);
3958 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3959 if (!slot)
3960 return NULL_TREE;
3961 return (*slot)->result;
3964 /* Insert PHI into the current hash table with a value number of
3965 RESULT. */
3967 static vn_phi_t
3968 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3970 vn_phi_s **slot;
3971 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3972 sizeof (vn_phi_s)
3973 + ((gimple_phi_num_args (phi) - 1)
3974 * sizeof (tree)));
3975 edge e;
3976 edge_iterator ei;
3978 /* Canonicalize the SSA_NAME's to their value number. */
3979 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3981 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3982 if (TREE_CODE (def) == SSA_NAME
3983 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3984 def = SSA_VAL (def);
3985 vp1->phiargs[e->dest_idx] = def;
3987 vp1->value_id = VN_INFO (result)->value_id;
3988 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3989 vp1->block = gimple_bb (phi);
3990 /* Extract values of the controlling condition. */
3991 vp1->cclhs = NULL_TREE;
3992 vp1->ccrhs = NULL_TREE;
3993 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3994 if (EDGE_COUNT (idom1->succs) == 2)
3995 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3997 /* ??? We want to use SSA_VAL here. But possibly not
3998 allow VN_TOP. */
3999 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4000 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4002 vp1->result = result;
4003 vp1->hashcode = vn_phi_compute_hash (vp1);
4005 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4006 gcc_assert (!*slot);
4008 *slot = vp1;
4009 vp1->next = last_inserted_phi;
4010 last_inserted_phi = vp1;
4011 return vp1;
4015 /* Return true if BB1 is dominated by BB2 taking into account edges
4016 that are not executable. */
4018 static bool
4019 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
4021 edge_iterator ei;
4022 edge e;
4024 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4025 return true;
4027 /* Before iterating we'd like to know if there exists a
4028 (executable) path from bb2 to bb1 at all, if not we can
4029 directly return false. For now simply iterate once. */
4031 /* Iterate to the single executable bb1 predecessor. */
4032 if (EDGE_COUNT (bb1->preds) > 1)
4034 edge prede = NULL;
4035 FOR_EACH_EDGE (e, ei, bb1->preds)
4036 if (e->flags & EDGE_EXECUTABLE)
4038 if (prede)
4040 prede = NULL;
4041 break;
4043 prede = e;
4045 if (prede)
4047 bb1 = prede->src;
4049 /* Re-do the dominance check with changed bb1. */
4050 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4051 return true;
4055 /* Iterate to the single executable bb2 successor. */
4056 edge succe = NULL;
4057 FOR_EACH_EDGE (e, ei, bb2->succs)
4058 if (e->flags & EDGE_EXECUTABLE)
4060 if (succe)
4062 succe = NULL;
4063 break;
4065 succe = e;
4067 if (succe)
4069 /* Verify the reached block is only reached through succe.
4070 If there is only one edge we can spare us the dominator
4071 check and iterate directly. */
4072 if (EDGE_COUNT (succe->dest->preds) > 1)
4074 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4075 if (e != succe
4076 && (e->flags & EDGE_EXECUTABLE))
4078 succe = NULL;
4079 break;
4082 if (succe)
4084 bb2 = succe->dest;
4086 /* Re-do the dominance check with changed bb2. */
4087 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4088 return true;
4092 /* We could now iterate updating bb1 / bb2. */
4093 return false;
4096 /* Set the value number of FROM to TO, return true if it has changed
4097 as a result. */
4099 static inline bool
4100 set_ssa_val_to (tree from, tree to)
4102 vn_ssa_aux_t from_info = VN_INFO (from);
4103 tree currval = from_info->valnum; // SSA_VAL (from)
4104 poly_int64 toff, coff;
4106 /* The only thing we allow as value numbers are ssa_names
4107 and invariants. So assert that here. We don't allow VN_TOP
4108 as visiting a stmt should produce a value-number other than
4109 that.
4110 ??? Still VN_TOP can happen for unreachable code, so force
4111 it to varying in that case. Not all code is prepared to
4112 get VN_TOP on valueization. */
4113 if (to == VN_TOP)
4115 /* ??? When iterating and visiting PHI <undef, backedge-value>
4116 for the first time we rightfully get VN_TOP and we need to
4117 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4118 With SCCVN we were simply lucky we iterated the other PHI
4119 cycles first and thus visited the backedge-value DEF. */
4120 if (currval == VN_TOP)
4121 goto set_and_exit;
4122 if (dump_file && (dump_flags & TDF_DETAILS))
4123 fprintf (dump_file, "Forcing value number to varying on "
4124 "receiving VN_TOP\n");
4125 to = from;
4128 gcc_checking_assert (to != NULL_TREE
4129 && ((TREE_CODE (to) == SSA_NAME
4130 && (to == from || SSA_VAL (to) == to))
4131 || is_gimple_min_invariant (to)));
4133 if (from != to)
4135 if (currval == from)
4137 if (dump_file && (dump_flags & TDF_DETAILS))
4139 fprintf (dump_file, "Not changing value number of ");
4140 print_generic_expr (dump_file, from);
4141 fprintf (dump_file, " from VARYING to ");
4142 print_generic_expr (dump_file, to);
4143 fprintf (dump_file, "\n");
4145 return false;
4147 bool curr_invariant = is_gimple_min_invariant (currval);
4148 bool curr_undefined = (TREE_CODE (currval) == SSA_NAME
4149 && ssa_undefined_value_p (currval, false));
4150 if (currval != VN_TOP
4151 && !curr_invariant
4152 && !curr_undefined
4153 && is_gimple_min_invariant (to))
4155 if (dump_file && (dump_flags & TDF_DETAILS))
4157 fprintf (dump_file, "Forcing VARYING instead of changing "
4158 "value number of ");
4159 print_generic_expr (dump_file, from);
4160 fprintf (dump_file, " from ");
4161 print_generic_expr (dump_file, currval);
4162 fprintf (dump_file, " (non-constant) to ");
4163 print_generic_expr (dump_file, to);
4164 fprintf (dump_file, " (constant)\n");
4166 to = from;
4168 else if (currval != VN_TOP
4169 && !curr_undefined
4170 && TREE_CODE (to) == SSA_NAME
4171 && ssa_undefined_value_p (to, false))
4173 if (dump_file && (dump_flags & TDF_DETAILS))
4175 fprintf (dump_file, "Forcing VARYING instead of changing "
4176 "value number of ");
4177 print_generic_expr (dump_file, from);
4178 fprintf (dump_file, " from ");
4179 print_generic_expr (dump_file, currval);
4180 fprintf (dump_file, " (non-undefined) to ");
4181 print_generic_expr (dump_file, to);
4182 fprintf (dump_file, " (undefined)\n");
4184 to = from;
4186 else if (TREE_CODE (to) == SSA_NAME
4187 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4188 to = from;
4191 set_and_exit:
4192 if (dump_file && (dump_flags & TDF_DETAILS))
4194 fprintf (dump_file, "Setting value number of ");
4195 print_generic_expr (dump_file, from);
4196 fprintf (dump_file, " to ");
4197 print_generic_expr (dump_file, to);
4200 if (currval != to
4201 && !operand_equal_p (currval, to, 0)
4202 /* Different undefined SSA names are not actually different. See
4203 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4204 && !(TREE_CODE (currval) == SSA_NAME
4205 && TREE_CODE (to) == SSA_NAME
4206 && ssa_undefined_value_p (currval, false)
4207 && ssa_undefined_value_p (to, false))
4208 /* ??? For addresses involving volatile objects or types operand_equal_p
4209 does not reliably detect ADDR_EXPRs as equal. We know we are only
4210 getting invariant gimple addresses here, so can use
4211 get_addr_base_and_unit_offset to do this comparison. */
4212 && !(TREE_CODE (currval) == ADDR_EXPR
4213 && TREE_CODE (to) == ADDR_EXPR
4214 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4215 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4216 && known_eq (coff, toff)))
4218 if (dump_file && (dump_flags & TDF_DETAILS))
4219 fprintf (dump_file, " (changed)\n");
4220 from_info->valnum = to;
4221 return true;
4223 if (dump_file && (dump_flags & TDF_DETAILS))
4224 fprintf (dump_file, "\n");
4225 return false;
4228 /* Set all definitions in STMT to value number to themselves.
4229 Return true if a value number changed. */
4231 static bool
4232 defs_to_varying (gimple *stmt)
4234 bool changed = false;
4235 ssa_op_iter iter;
4236 def_operand_p defp;
4238 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4240 tree def = DEF_FROM_PTR (defp);
4241 changed |= set_ssa_val_to (def, def);
4243 return changed;
4246 /* Visit a copy between LHS and RHS, return true if the value number
4247 changed. */
4249 static bool
4250 visit_copy (tree lhs, tree rhs)
4252 /* Valueize. */
4253 rhs = SSA_VAL (rhs);
4255 return set_ssa_val_to (lhs, rhs);
4258 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4259 is the same. */
4261 static tree
4262 valueized_wider_op (tree wide_type, tree op)
4264 if (TREE_CODE (op) == SSA_NAME)
4265 op = vn_valueize (op);
4267 /* Either we have the op widened available. */
4268 tree ops[3] = {};
4269 ops[0] = op;
4270 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4271 wide_type, ops, NULL);
4272 if (tem)
4273 return tem;
4275 /* Or the op is truncated from some existing value. */
4276 if (TREE_CODE (op) == SSA_NAME)
4278 gimple *def = SSA_NAME_DEF_STMT (op);
4279 if (is_gimple_assign (def)
4280 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4282 tem = gimple_assign_rhs1 (def);
4283 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4285 if (TREE_CODE (tem) == SSA_NAME)
4286 tem = vn_valueize (tem);
4287 return tem;
4292 /* For constants simply extend it. */
4293 if (TREE_CODE (op) == INTEGER_CST)
4294 return wide_int_to_tree (wide_type, wi::to_wide (op));
4296 return NULL_TREE;
4299 /* Visit a nary operator RHS, value number it, and return true if the
4300 value number of LHS has changed as a result. */
4302 static bool
4303 visit_nary_op (tree lhs, gassign *stmt)
4305 vn_nary_op_t vnresult;
4306 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4307 if (! result && vnresult)
4308 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4309 if (result)
4310 return set_ssa_val_to (lhs, result);
4312 /* Do some special pattern matching for redundancies of operations
4313 in different types. */
4314 enum tree_code code = gimple_assign_rhs_code (stmt);
4315 tree type = TREE_TYPE (lhs);
4316 tree rhs1 = gimple_assign_rhs1 (stmt);
4317 switch (code)
4319 CASE_CONVERT:
4320 /* Match arithmetic done in a different type where we can easily
4321 substitute the result from some earlier sign-changed or widened
4322 operation. */
4323 if (INTEGRAL_TYPE_P (type)
4324 && TREE_CODE (rhs1) == SSA_NAME
4325 /* We only handle sign-changes or zero-extension -> & mask. */
4326 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4327 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4328 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4330 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4331 if (def
4332 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4333 || gimple_assign_rhs_code (def) == MINUS_EXPR
4334 || gimple_assign_rhs_code (def) == MULT_EXPR))
4336 tree ops[3] = {};
4337 /* Either we have the op widened available. */
4338 ops[0] = valueized_wider_op (type,
4339 gimple_assign_rhs1 (def));
4340 if (ops[0])
4341 ops[1] = valueized_wider_op (type,
4342 gimple_assign_rhs2 (def));
4343 if (ops[0] && ops[1])
4345 ops[0] = vn_nary_op_lookup_pieces
4346 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4347 /* We have wider operation available. */
4348 if (ops[0]
4349 /* If the leader is a wrapping operation we can
4350 insert it for code hoisting w/o introducing
4351 undefined overflow. If it is not it has to
4352 be available. See PR86554. */
4353 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4354 || (rpo_avail && vn_context_bb
4355 && rpo_avail->eliminate_avail (vn_context_bb,
4356 ops[0]))))
4358 unsigned lhs_prec = TYPE_PRECISION (type);
4359 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4360 if (lhs_prec == rhs_prec)
4362 gimple_match_op match_op (gimple_match_cond::UNCOND,
4363 NOP_EXPR, type, ops[0]);
4364 result = vn_nary_build_or_lookup (&match_op);
4365 if (result)
4367 bool changed = set_ssa_val_to (lhs, result);
4368 vn_nary_op_insert_stmt (stmt, result);
4369 return changed;
4372 else
4374 tree mask = wide_int_to_tree
4375 (type, wi::mask (rhs_prec, false, lhs_prec));
4376 gimple_match_op match_op (gimple_match_cond::UNCOND,
4377 BIT_AND_EXPR,
4378 TREE_TYPE (lhs),
4379 ops[0], mask);
4380 result = vn_nary_build_or_lookup (&match_op);
4381 if (result)
4383 bool changed = set_ssa_val_to (lhs, result);
4384 vn_nary_op_insert_stmt (stmt, result);
4385 return changed;
4392 default:;
4395 bool changed = set_ssa_val_to (lhs, lhs);
4396 vn_nary_op_insert_stmt (stmt, lhs);
4397 return changed;
4400 /* Visit a call STMT storing into LHS. Return true if the value number
4401 of the LHS has changed as a result. */
4403 static bool
4404 visit_reference_op_call (tree lhs, gcall *stmt)
4406 bool changed = false;
4407 struct vn_reference_s vr1;
4408 vn_reference_t vnresult = NULL;
4409 tree vdef = gimple_vdef (stmt);
4411 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
4412 if (lhs && TREE_CODE (lhs) != SSA_NAME)
4413 lhs = NULL_TREE;
4415 vn_reference_lookup_call (stmt, &vnresult, &vr1);
4416 if (vnresult)
4418 if (vnresult->result_vdef && vdef)
4419 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
4420 else if (vdef)
4421 /* If the call was discovered to be pure or const reflect
4422 that as far as possible. */
4423 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
4425 if (!vnresult->result && lhs)
4426 vnresult->result = lhs;
4428 if (vnresult->result && lhs)
4429 changed |= set_ssa_val_to (lhs, vnresult->result);
4431 else
4433 vn_reference_t vr2;
4434 vn_reference_s **slot;
4435 tree vdef_val = vdef;
4436 if (vdef)
4438 /* If we value numbered an indirect functions function to
4439 one not clobbering memory value number its VDEF to its
4440 VUSE. */
4441 tree fn = gimple_call_fn (stmt);
4442 if (fn && TREE_CODE (fn) == SSA_NAME)
4444 fn = SSA_VAL (fn);
4445 if (TREE_CODE (fn) == ADDR_EXPR
4446 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
4447 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
4448 & (ECF_CONST | ECF_PURE)))
4449 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
4451 changed |= set_ssa_val_to (vdef, vdef_val);
4453 if (lhs)
4454 changed |= set_ssa_val_to (lhs, lhs);
4455 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
4456 vr2->vuse = vr1.vuse;
4457 /* As we are not walking the virtual operand chain we know the
4458 shared_lookup_references are still original so we can re-use
4459 them here. */
4460 vr2->operands = vr1.operands.copy ();
4461 vr2->type = vr1.type;
4462 vr2->set = vr1.set;
4463 vr2->hashcode = vr1.hashcode;
4464 vr2->result = lhs;
4465 vr2->result_vdef = vdef_val;
4466 vr2->value_id = 0;
4467 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
4468 INSERT);
4469 gcc_assert (!*slot);
4470 *slot = vr2;
4471 vr2->next = last_inserted_ref;
4472 last_inserted_ref = vr2;
4475 return changed;
4478 /* Visit a load from a reference operator RHS, part of STMT, value number it,
4479 and return true if the value number of the LHS has changed as a result. */
4481 static bool
4482 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
4484 bool changed = false;
4485 tree last_vuse;
4486 tree result;
4488 last_vuse = gimple_vuse (stmt);
4489 result = vn_reference_lookup (op, gimple_vuse (stmt),
4490 default_vn_walk_kind, NULL, true, &last_vuse);
4492 /* We handle type-punning through unions by value-numbering based
4493 on offset and size of the access. Be prepared to handle a
4494 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
4495 if (result
4496 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
4498 /* We will be setting the value number of lhs to the value number
4499 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
4500 So first simplify and lookup this expression to see if it
4501 is already available. */
4502 gimple_match_op res_op (gimple_match_cond::UNCOND,
4503 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
4504 result = vn_nary_build_or_lookup (&res_op);
4505 /* When building the conversion fails avoid inserting the reference
4506 again. */
4507 if (!result)
4508 return set_ssa_val_to (lhs, lhs);
4511 if (result)
4512 changed = set_ssa_val_to (lhs, result);
4513 else
4515 changed = set_ssa_val_to (lhs, lhs);
4516 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4519 return changed;
4523 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4524 and return true if the value number of the LHS has changed as a result. */
4526 static bool
4527 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4529 bool changed = false;
4530 vn_reference_t vnresult = NULL;
4531 tree assign;
4532 bool resultsame = false;
4533 tree vuse = gimple_vuse (stmt);
4534 tree vdef = gimple_vdef (stmt);
4536 if (TREE_CODE (op) == SSA_NAME)
4537 op = SSA_VAL (op);
4539 /* First we want to lookup using the *vuses* from the store and see
4540 if there the last store to this location with the same address
4541 had the same value.
4543 The vuses represent the memory state before the store. If the
4544 memory state, address, and value of the store is the same as the
4545 last store to this location, then this store will produce the
4546 same memory state as that store.
4548 In this case the vdef versions for this store are value numbered to those
4549 vuse versions, since they represent the same memory state after
4550 this store.
4552 Otherwise, the vdefs for the store are used when inserting into
4553 the table, since the store generates a new memory state. */
4555 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4556 if (vnresult
4557 && vnresult->result)
4559 tree result = vnresult->result;
4560 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4561 || result == SSA_VAL (result));
4562 resultsame = expressions_equal_p (result, op);
4563 if (resultsame)
4565 /* If the TBAA state isn't compatible for downstream reads
4566 we cannot value-number the VDEFs the same. */
4567 alias_set_type set = get_alias_set (lhs);
4568 if (vnresult->set != set
4569 && ! alias_set_subset_of (set, vnresult->set))
4570 resultsame = false;
4574 if (!resultsame)
4576 /* Only perform the following when being called from PRE
4577 which embeds tail merging. */
4578 if (default_vn_walk_kind == VN_WALK)
4580 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4581 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4582 if (vnresult)
4584 VN_INFO (vdef)->visited = true;
4585 return set_ssa_val_to (vdef, vnresult->result_vdef);
4589 if (dump_file && (dump_flags & TDF_DETAILS))
4591 fprintf (dump_file, "No store match\n");
4592 fprintf (dump_file, "Value numbering store ");
4593 print_generic_expr (dump_file, lhs);
4594 fprintf (dump_file, " to ");
4595 print_generic_expr (dump_file, op);
4596 fprintf (dump_file, "\n");
4598 /* Have to set value numbers before insert, since insert is
4599 going to valueize the references in-place. */
4600 if (vdef)
4601 changed |= set_ssa_val_to (vdef, vdef);
4603 /* Do not insert structure copies into the tables. */
4604 if (is_gimple_min_invariant (op)
4605 || is_gimple_reg (op))
4606 vn_reference_insert (lhs, op, vdef, NULL);
4608 /* Only perform the following when being called from PRE
4609 which embeds tail merging. */
4610 if (default_vn_walk_kind == VN_WALK)
4612 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4613 vn_reference_insert (assign, lhs, vuse, vdef);
4616 else
4618 /* We had a match, so value number the vdef to have the value
4619 number of the vuse it came from. */
4621 if (dump_file && (dump_flags & TDF_DETAILS))
4622 fprintf (dump_file, "Store matched earlier value, "
4623 "value numbering store vdefs to matching vuses.\n");
4625 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4628 return changed;
4631 /* Visit and value number PHI, return true if the value number
4632 changed. When BACKEDGES_VARYING_P is true then assume all
4633 backedge values are varying. When INSERTED is not NULL then
4634 this is just a ahead query for a possible iteration, set INSERTED
4635 to true if we'd insert into the hashtable. */
4637 static bool
4638 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4640 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4641 tree backedge_val = NULL_TREE;
4642 bool seen_non_backedge = false;
4643 tree sameval_base = NULL_TREE;
4644 poly_int64 soff, doff;
4645 unsigned n_executable = 0;
4646 edge_iterator ei;
4647 edge e;
4649 /* TODO: We could check for this in initialization, and replace this
4650 with a gcc_assert. */
4651 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4652 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4654 /* We track whether a PHI was CSEd to to avoid excessive iterations
4655 that would be necessary only because the PHI changed arguments
4656 but not value. */
4657 if (!inserted)
4658 gimple_set_plf (phi, GF_PLF_1, false);
4660 /* See if all non-TOP arguments have the same value. TOP is
4661 equivalent to everything, so we can ignore it. */
4662 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4663 if (e->flags & EDGE_EXECUTABLE)
4665 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4667 ++n_executable;
4668 if (TREE_CODE (def) == SSA_NAME)
4670 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4671 def = SSA_VAL (def);
4672 if (e->flags & EDGE_DFS_BACK)
4673 backedge_val = def;
4675 if (!(e->flags & EDGE_DFS_BACK))
4676 seen_non_backedge = true;
4677 if (def == VN_TOP)
4679 /* Ignore undefined defs for sameval but record one. */
4680 else if (TREE_CODE (def) == SSA_NAME
4681 && ! virtual_operand_p (def)
4682 && ssa_undefined_value_p (def, false))
4683 seen_undef = def;
4684 else if (sameval == VN_TOP)
4685 sameval = def;
4686 else if (!expressions_equal_p (def, sameval))
4688 /* We know we're arriving only with invariant addresses here,
4689 try harder comparing them. We can do some caching here
4690 which we cannot do in expressions_equal_p. */
4691 if (TREE_CODE (def) == ADDR_EXPR
4692 && TREE_CODE (sameval) == ADDR_EXPR
4693 && sameval_base != (void *)-1)
4695 if (!sameval_base)
4696 sameval_base = get_addr_base_and_unit_offset
4697 (TREE_OPERAND (sameval, 0), &soff);
4698 if (!sameval_base)
4699 sameval_base = (tree)(void *)-1;
4700 else if ((get_addr_base_and_unit_offset
4701 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4702 && known_eq (soff, doff))
4703 continue;
4705 sameval = NULL_TREE;
4706 break;
4710 /* If the value we want to use is flowing over the backedge and we
4711 should take it as VARYING but it has a non-VARYING value drop to
4712 VARYING.
4713 If we value-number a virtual operand never value-number to the
4714 value from the backedge as that confuses the alias-walking code.
4715 See gcc.dg/torture/pr87176.c. If the value is the same on a
4716 non-backedge everything is OK though. */
4717 bool visited_p;
4718 if ((backedge_val
4719 && !seen_non_backedge
4720 && TREE_CODE (backedge_val) == SSA_NAME
4721 && sameval == backedge_val
4722 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4723 || SSA_VAL (backedge_val) != backedge_val))
4724 /* Do not value-number a virtual operand to sth not visited though
4725 given that allows us to escape a region in alias walking. */
4726 || (sameval
4727 && TREE_CODE (sameval) == SSA_NAME
4728 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
4729 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
4730 && (SSA_VAL (sameval, &visited_p), !visited_p)))
4731 /* Note this just drops to VARYING without inserting the PHI into
4732 the hashes. */
4733 result = PHI_RESULT (phi);
4734 /* If none of the edges was executable keep the value-number at VN_TOP,
4735 if only a single edge is exectuable use its value. */
4736 else if (n_executable <= 1)
4737 result = seen_undef ? seen_undef : sameval;
4738 /* If we saw only undefined values and VN_TOP use one of the
4739 undefined values. */
4740 else if (sameval == VN_TOP)
4741 result = seen_undef ? seen_undef : sameval;
4742 /* First see if it is equivalent to a phi node in this block. We prefer
4743 this as it allows IV elimination - see PRs 66502 and 67167. */
4744 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4746 if (!inserted
4747 && TREE_CODE (result) == SSA_NAME
4748 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4750 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4751 if (dump_file && (dump_flags & TDF_DETAILS))
4753 fprintf (dump_file, "Marking CSEd to PHI node ");
4754 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4755 0, TDF_SLIM);
4756 fprintf (dump_file, "\n");
4760 /* If all values are the same use that, unless we've seen undefined
4761 values as well and the value isn't constant.
4762 CCP/copyprop have the same restriction to not remove uninit warnings. */
4763 else if (sameval
4764 && (! seen_undef || is_gimple_min_invariant (sameval)))
4765 result = sameval;
4766 else
4768 result = PHI_RESULT (phi);
4769 /* Only insert PHIs that are varying, for constant value numbers
4770 we mess up equivalences otherwise as we are only comparing
4771 the immediate controlling predicates. */
4772 vn_phi_insert (phi, result, backedges_varying_p);
4773 if (inserted)
4774 *inserted = true;
4777 return set_ssa_val_to (PHI_RESULT (phi), result);
4780 /* Try to simplify RHS using equivalences and constant folding. */
4782 static tree
4783 try_to_simplify (gassign *stmt)
4785 enum tree_code code = gimple_assign_rhs_code (stmt);
4786 tree tem;
4788 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4789 in this case, there is no point in doing extra work. */
4790 if (code == SSA_NAME)
4791 return NULL_TREE;
4793 /* First try constant folding based on our current lattice. */
4794 mprts_hook = vn_lookup_simplify_result;
4795 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4796 mprts_hook = NULL;
4797 if (tem
4798 && (TREE_CODE (tem) == SSA_NAME
4799 || is_gimple_min_invariant (tem)))
4800 return tem;
4802 return NULL_TREE;
4805 /* Visit and value number STMT, return true if the value number
4806 changed. */
4808 static bool
4809 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4811 bool changed = false;
4813 if (dump_file && (dump_flags & TDF_DETAILS))
4815 fprintf (dump_file, "Value numbering stmt = ");
4816 print_gimple_stmt (dump_file, stmt, 0);
4819 if (gimple_code (stmt) == GIMPLE_PHI)
4820 changed = visit_phi (stmt, NULL, backedges_varying_p);
4821 else if (gimple_has_volatile_ops (stmt))
4822 changed = defs_to_varying (stmt);
4823 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4825 enum tree_code code = gimple_assign_rhs_code (ass);
4826 tree lhs = gimple_assign_lhs (ass);
4827 tree rhs1 = gimple_assign_rhs1 (ass);
4828 tree simplified;
4830 /* Shortcut for copies. Simplifying copies is pointless,
4831 since we copy the expression and value they represent. */
4832 if (code == SSA_NAME
4833 && TREE_CODE (lhs) == SSA_NAME)
4835 changed = visit_copy (lhs, rhs1);
4836 goto done;
4838 simplified = try_to_simplify (ass);
4839 if (simplified)
4841 if (dump_file && (dump_flags & TDF_DETAILS))
4843 fprintf (dump_file, "RHS ");
4844 print_gimple_expr (dump_file, ass, 0);
4845 fprintf (dump_file, " simplified to ");
4846 print_generic_expr (dump_file, simplified);
4847 fprintf (dump_file, "\n");
4850 /* Setting value numbers to constants will occasionally
4851 screw up phi congruence because constants are not
4852 uniquely associated with a single ssa name that can be
4853 looked up. */
4854 if (simplified
4855 && is_gimple_min_invariant (simplified)
4856 && TREE_CODE (lhs) == SSA_NAME)
4858 changed = set_ssa_val_to (lhs, simplified);
4859 goto done;
4861 else if (simplified
4862 && TREE_CODE (simplified) == SSA_NAME
4863 && TREE_CODE (lhs) == SSA_NAME)
4865 changed = visit_copy (lhs, simplified);
4866 goto done;
4869 if ((TREE_CODE (lhs) == SSA_NAME
4870 /* We can substitute SSA_NAMEs that are live over
4871 abnormal edges with their constant value. */
4872 && !(gimple_assign_copy_p (ass)
4873 && is_gimple_min_invariant (rhs1))
4874 && !(simplified
4875 && is_gimple_min_invariant (simplified))
4876 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4877 /* Stores or copies from SSA_NAMEs that are live over
4878 abnormal edges are a problem. */
4879 || (code == SSA_NAME
4880 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4881 changed = defs_to_varying (ass);
4882 else if (REFERENCE_CLASS_P (lhs)
4883 || DECL_P (lhs))
4884 changed = visit_reference_op_store (lhs, rhs1, ass);
4885 else if (TREE_CODE (lhs) == SSA_NAME)
4887 if ((gimple_assign_copy_p (ass)
4888 && is_gimple_min_invariant (rhs1))
4889 || (simplified
4890 && is_gimple_min_invariant (simplified)))
4892 if (simplified)
4893 changed = set_ssa_val_to (lhs, simplified);
4894 else
4895 changed = set_ssa_val_to (lhs, rhs1);
4897 else
4899 /* Visit the original statement. */
4900 switch (vn_get_stmt_kind (ass))
4902 case VN_NARY:
4903 changed = visit_nary_op (lhs, ass);
4904 break;
4905 case VN_REFERENCE:
4906 changed = visit_reference_op_load (lhs, rhs1, ass);
4907 break;
4908 default:
4909 changed = defs_to_varying (ass);
4910 break;
4914 else
4915 changed = defs_to_varying (ass);
4917 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4919 tree lhs = gimple_call_lhs (call_stmt);
4920 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4922 /* Try constant folding based on our current lattice. */
4923 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4924 vn_valueize);
4925 if (simplified)
4927 if (dump_file && (dump_flags & TDF_DETAILS))
4929 fprintf (dump_file, "call ");
4930 print_gimple_expr (dump_file, call_stmt, 0);
4931 fprintf (dump_file, " simplified to ");
4932 print_generic_expr (dump_file, simplified);
4933 fprintf (dump_file, "\n");
4936 /* Setting value numbers to constants will occasionally
4937 screw up phi congruence because constants are not
4938 uniquely associated with a single ssa name that can be
4939 looked up. */
4940 if (simplified
4941 && is_gimple_min_invariant (simplified))
4943 changed = set_ssa_val_to (lhs, simplified);
4944 if (gimple_vdef (call_stmt))
4945 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4946 SSA_VAL (gimple_vuse (call_stmt)));
4947 goto done;
4949 else if (simplified
4950 && TREE_CODE (simplified) == SSA_NAME)
4952 changed = visit_copy (lhs, simplified);
4953 if (gimple_vdef (call_stmt))
4954 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4955 SSA_VAL (gimple_vuse (call_stmt)));
4956 goto done;
4958 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4960 changed = defs_to_varying (call_stmt);
4961 goto done;
4965 /* Pick up flags from a devirtualization target. */
4966 tree fn = gimple_call_fn (stmt);
4967 int extra_fnflags = 0;
4968 if (fn && TREE_CODE (fn) == SSA_NAME)
4970 fn = SSA_VAL (fn);
4971 if (TREE_CODE (fn) == ADDR_EXPR
4972 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4973 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4975 if (!gimple_call_internal_p (call_stmt)
4976 && (/* Calls to the same function with the same vuse
4977 and the same operands do not necessarily return the same
4978 value, unless they're pure or const. */
4979 ((gimple_call_flags (call_stmt) | extra_fnflags)
4980 & (ECF_PURE | ECF_CONST))
4981 /* If calls have a vdef, subsequent calls won't have
4982 the same incoming vuse. So, if 2 calls with vdef have the
4983 same vuse, we know they're not subsequent.
4984 We can value number 2 calls to the same function with the
4985 same vuse and the same operands which are not subsequent
4986 the same, because there is no code in the program that can
4987 compare the 2 values... */
4988 || (gimple_vdef (call_stmt)
4989 /* ... unless the call returns a pointer which does
4990 not alias with anything else. In which case the
4991 information that the values are distinct are encoded
4992 in the IL. */
4993 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4994 /* Only perform the following when being called from PRE
4995 which embeds tail merging. */
4996 && default_vn_walk_kind == VN_WALK)))
4997 changed = visit_reference_op_call (lhs, call_stmt);
4998 else
4999 changed = defs_to_varying (call_stmt);
5001 else
5002 changed = defs_to_varying (stmt);
5003 done:
5004 return changed;
5008 /* Allocate a value number table. */
5010 static void
5011 allocate_vn_table (vn_tables_t table, unsigned size)
5013 table->phis = new vn_phi_table_type (size);
5014 table->nary = new vn_nary_op_table_type (size);
5015 table->references = new vn_reference_table_type (size);
5018 /* Free a value number table. */
5020 static void
5021 free_vn_table (vn_tables_t table)
5023 /* Walk over elements and release vectors. */
5024 vn_reference_iterator_type hir;
5025 vn_reference_t vr;
5026 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5027 vr->operands.release ();
5028 delete table->phis;
5029 table->phis = NULL;
5030 delete table->nary;
5031 table->nary = NULL;
5032 delete table->references;
5033 table->references = NULL;
5036 /* Set *ID according to RESULT. */
5038 static void
5039 set_value_id_for_result (tree result, unsigned int *id)
5041 if (result && TREE_CODE (result) == SSA_NAME)
5042 *id = VN_INFO (result)->value_id;
5043 else if (result && is_gimple_min_invariant (result))
5044 *id = get_or_alloc_constant_value_id (result);
5045 else
5046 *id = get_next_value_id ();
5049 /* Set the value ids in the valid hash tables. */
5051 static void
5052 set_hashtable_value_ids (void)
5054 vn_nary_op_iterator_type hin;
5055 vn_phi_iterator_type hip;
5056 vn_reference_iterator_type hir;
5057 vn_nary_op_t vno;
5058 vn_reference_t vr;
5059 vn_phi_t vp;
5061 /* Now set the value ids of the things we had put in the hash
5062 table. */
5064 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5065 if (! vno->predicated_values)
5066 set_value_id_for_result (vno->u.result, &vno->value_id);
5068 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5069 set_value_id_for_result (vp->result, &vp->value_id);
5071 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5072 hir)
5073 set_value_id_for_result (vr->result, &vr->value_id);
5076 /* Return the maximum value id we have ever seen. */
5078 unsigned int
5079 get_max_value_id (void)
5081 return next_value_id;
5084 /* Return the next unique value id. */
5086 unsigned int
5087 get_next_value_id (void)
5089 return next_value_id++;
5093 /* Compare two expressions E1 and E2 and return true if they are equal. */
5095 bool
5096 expressions_equal_p (tree e1, tree e2)
5098 /* The obvious case. */
5099 if (e1 == e2)
5100 return true;
5102 /* If either one is VN_TOP consider them equal. */
5103 if (e1 == VN_TOP || e2 == VN_TOP)
5104 return true;
5106 /* If only one of them is null, they cannot be equal. */
5107 if (!e1 || !e2)
5108 return false;
5110 /* Now perform the actual comparison. */
5111 if (TREE_CODE (e1) == TREE_CODE (e2)
5112 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5113 return true;
5115 return false;
5119 /* Return true if the nary operation NARY may trap. This is a copy
5120 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5122 bool
5123 vn_nary_may_trap (vn_nary_op_t nary)
5125 tree type;
5126 tree rhs2 = NULL_TREE;
5127 bool honor_nans = false;
5128 bool honor_snans = false;
5129 bool fp_operation = false;
5130 bool honor_trapv = false;
5131 bool handled, ret;
5132 unsigned i;
5134 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5135 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5136 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5138 type = nary->type;
5139 fp_operation = FLOAT_TYPE_P (type);
5140 if (fp_operation)
5142 honor_nans = flag_trapping_math && !flag_finite_math_only;
5143 honor_snans = flag_signaling_nans != 0;
5145 else if (INTEGRAL_TYPE_P (type)
5146 && TYPE_OVERFLOW_TRAPS (type))
5147 honor_trapv = true;
5149 if (nary->length >= 2)
5150 rhs2 = nary->op[1];
5151 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5152 honor_trapv,
5153 honor_nans, honor_snans, rhs2,
5154 &handled);
5155 if (handled
5156 && ret)
5157 return true;
5159 for (i = 0; i < nary->length; ++i)
5160 if (tree_could_trap_p (nary->op[i]))
5161 return true;
5163 return false;
5166 /* Return true if the reference operation REF may trap. */
5168 bool
5169 vn_reference_may_trap (vn_reference_t ref)
5171 switch (ref->operands[0].opcode)
5173 case MODIFY_EXPR:
5174 case CALL_EXPR:
5175 /* We do not handle calls. */
5176 case ADDR_EXPR:
5177 /* And toplevel address computations never trap. */
5178 return false;
5179 default:;
5182 vn_reference_op_t op;
5183 unsigned i;
5184 FOR_EACH_VEC_ELT (ref->operands, i, op)
5186 switch (op->opcode)
5188 case WITH_SIZE_EXPR:
5189 case TARGET_MEM_REF:
5190 /* Always variable. */
5191 return true;
5192 case COMPONENT_REF:
5193 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5194 return true;
5195 break;
5196 case ARRAY_RANGE_REF:
5197 case ARRAY_REF:
5198 if (TREE_CODE (op->op0) == SSA_NAME)
5199 return true;
5200 break;
5201 case MEM_REF:
5202 /* Nothing interesting in itself, the base is separate. */
5203 break;
5204 /* The following are the address bases. */
5205 case SSA_NAME:
5206 return true;
5207 case ADDR_EXPR:
5208 if (op->op0)
5209 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5210 return false;
5211 default:;
5214 return false;
5217 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5218 bitmap inserted_exprs_)
5219 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5220 el_todo (0), eliminations (0), insertions (0),
5221 inserted_exprs (inserted_exprs_)
5223 need_eh_cleanup = BITMAP_ALLOC (NULL);
5224 need_ab_cleanup = BITMAP_ALLOC (NULL);
5227 eliminate_dom_walker::~eliminate_dom_walker ()
5229 BITMAP_FREE (need_eh_cleanup);
5230 BITMAP_FREE (need_ab_cleanup);
5233 /* Return a leader for OP that is available at the current point of the
5234 eliminate domwalk. */
5236 tree
5237 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5239 tree valnum = VN_INFO (op)->valnum;
5240 if (TREE_CODE (valnum) == SSA_NAME)
5242 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5243 return valnum;
5244 if (avail.length () > SSA_NAME_VERSION (valnum))
5245 return avail[SSA_NAME_VERSION (valnum)];
5247 else if (is_gimple_min_invariant (valnum))
5248 return valnum;
5249 return NULL_TREE;
5252 /* At the current point of the eliminate domwalk make OP available. */
5254 void
5255 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5257 tree valnum = VN_INFO (op)->valnum;
5258 if (TREE_CODE (valnum) == SSA_NAME)
5260 if (avail.length () <= SSA_NAME_VERSION (valnum))
5261 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
5262 tree pushop = op;
5263 if (avail[SSA_NAME_VERSION (valnum)])
5264 pushop = avail[SSA_NAME_VERSION (valnum)];
5265 avail_stack.safe_push (pushop);
5266 avail[SSA_NAME_VERSION (valnum)] = op;
5270 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5271 the leader for the expression if insertion was successful. */
5273 tree
5274 eliminate_dom_walker::eliminate_insert (basic_block bb,
5275 gimple_stmt_iterator *gsi, tree val)
5277 /* We can insert a sequence with a single assignment only. */
5278 gimple_seq stmts = VN_INFO (val)->expr;
5279 if (!gimple_seq_singleton_p (stmts))
5280 return NULL_TREE;
5281 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5282 if (!stmt
5283 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5284 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5285 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5286 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5287 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5288 return NULL_TREE;
5290 tree op = gimple_assign_rhs1 (stmt);
5291 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5292 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5293 op = TREE_OPERAND (op, 0);
5294 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5295 if (!leader)
5296 return NULL_TREE;
5298 tree res;
5299 stmts = NULL;
5300 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5301 res = gimple_build (&stmts, BIT_FIELD_REF,
5302 TREE_TYPE (val), leader,
5303 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5304 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5305 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5306 res = gimple_build (&stmts, BIT_AND_EXPR,
5307 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5308 else
5309 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5310 TREE_TYPE (val), leader);
5311 if (TREE_CODE (res) != SSA_NAME
5312 || SSA_NAME_IS_DEFAULT_DEF (res)
5313 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5315 gimple_seq_discard (stmts);
5317 /* During propagation we have to treat SSA info conservatively
5318 and thus we can end up simplifying the inserted expression
5319 at elimination time to sth not defined in stmts. */
5320 /* But then this is a redundancy we failed to detect. Which means
5321 res now has two values. That doesn't play well with how
5322 we track availability here, so give up. */
5323 if (dump_file && (dump_flags & TDF_DETAILS))
5325 if (TREE_CODE (res) == SSA_NAME)
5326 res = eliminate_avail (bb, res);
5327 if (res)
5329 fprintf (dump_file, "Failed to insert expression for value ");
5330 print_generic_expr (dump_file, val);
5331 fprintf (dump_file, " which is really fully redundant to ");
5332 print_generic_expr (dump_file, res);
5333 fprintf (dump_file, "\n");
5337 return NULL_TREE;
5339 else
5341 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
5342 VN_INFO (res)->valnum = val;
5343 VN_INFO (res)->visited = true;
5346 insertions++;
5347 if (dump_file && (dump_flags & TDF_DETAILS))
5349 fprintf (dump_file, "Inserted ");
5350 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
5353 return res;
5356 void
5357 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
5359 tree sprime = NULL_TREE;
5360 gimple *stmt = gsi_stmt (*gsi);
5361 tree lhs = gimple_get_lhs (stmt);
5362 if (lhs && TREE_CODE (lhs) == SSA_NAME
5363 && !gimple_has_volatile_ops (stmt)
5364 /* See PR43491. Do not replace a global register variable when
5365 it is a the RHS of an assignment. Do replace local register
5366 variables since gcc does not guarantee a local variable will
5367 be allocated in register.
5368 ??? The fix isn't effective here. This should instead
5369 be ensured by not value-numbering them the same but treating
5370 them like volatiles? */
5371 && !(gimple_assign_single_p (stmt)
5372 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
5373 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
5374 && is_global_var (gimple_assign_rhs1 (stmt)))))
5376 sprime = eliminate_avail (b, lhs);
5377 if (!sprime)
5379 /* If there is no existing usable leader but SCCVN thinks
5380 it has an expression it wants to use as replacement,
5381 insert that. */
5382 tree val = VN_INFO (lhs)->valnum;
5383 if (val != VN_TOP
5384 && TREE_CODE (val) == SSA_NAME
5385 && VN_INFO (val)->needs_insertion
5386 && VN_INFO (val)->expr != NULL
5387 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
5388 eliminate_push_avail (b, sprime);
5391 /* If this now constitutes a copy duplicate points-to
5392 and range info appropriately. This is especially
5393 important for inserted code. See tree-ssa-copy.c
5394 for similar code. */
5395 if (sprime
5396 && TREE_CODE (sprime) == SSA_NAME)
5398 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
5399 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5400 && SSA_NAME_PTR_INFO (lhs)
5401 && ! SSA_NAME_PTR_INFO (sprime))
5403 duplicate_ssa_name_ptr_info (sprime,
5404 SSA_NAME_PTR_INFO (lhs));
5405 if (b != sprime_b)
5406 mark_ptr_info_alignment_unknown
5407 (SSA_NAME_PTR_INFO (sprime));
5409 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
5410 && SSA_NAME_RANGE_INFO (lhs)
5411 && ! SSA_NAME_RANGE_INFO (sprime)
5412 && b == sprime_b)
5413 duplicate_ssa_name_range_info (sprime,
5414 SSA_NAME_RANGE_TYPE (lhs),
5415 SSA_NAME_RANGE_INFO (lhs));
5418 /* Inhibit the use of an inserted PHI on a loop header when
5419 the address of the memory reference is a simple induction
5420 variable. In other cases the vectorizer won't do anything
5421 anyway (either it's loop invariant or a complicated
5422 expression). */
5423 if (sprime
5424 && TREE_CODE (sprime) == SSA_NAME
5425 && do_pre
5426 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
5427 && loop_outer (b->loop_father)
5428 && has_zero_uses (sprime)
5429 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
5430 && gimple_assign_load_p (stmt))
5432 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
5433 basic_block def_bb = gimple_bb (def_stmt);
5434 if (gimple_code (def_stmt) == GIMPLE_PHI
5435 && def_bb->loop_father->header == def_bb)
5437 loop_p loop = def_bb->loop_father;
5438 ssa_op_iter iter;
5439 tree op;
5440 bool found = false;
5441 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
5443 affine_iv iv;
5444 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
5445 if (def_bb
5446 && flow_bb_inside_loop_p (loop, def_bb)
5447 && simple_iv (loop, loop, op, &iv, true))
5449 found = true;
5450 break;
5453 if (found)
5455 if (dump_file && (dump_flags & TDF_DETAILS))
5457 fprintf (dump_file, "Not replacing ");
5458 print_gimple_expr (dump_file, stmt, 0);
5459 fprintf (dump_file, " with ");
5460 print_generic_expr (dump_file, sprime);
5461 fprintf (dump_file, " which would add a loop"
5462 " carried dependence to loop %d\n",
5463 loop->num);
5465 /* Don't keep sprime available. */
5466 sprime = NULL_TREE;
5471 if (sprime)
5473 /* If we can propagate the value computed for LHS into
5474 all uses don't bother doing anything with this stmt. */
5475 if (may_propagate_copy (lhs, sprime))
5477 /* Mark it for removal. */
5478 to_remove.safe_push (stmt);
5480 /* ??? Don't count copy/constant propagations. */
5481 if (gimple_assign_single_p (stmt)
5482 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5483 || gimple_assign_rhs1 (stmt) == sprime))
5484 return;
5486 if (dump_file && (dump_flags & TDF_DETAILS))
5488 fprintf (dump_file, "Replaced ");
5489 print_gimple_expr (dump_file, stmt, 0);
5490 fprintf (dump_file, " with ");
5491 print_generic_expr (dump_file, sprime);
5492 fprintf (dump_file, " in all uses of ");
5493 print_gimple_stmt (dump_file, stmt, 0);
5496 eliminations++;
5497 return;
5500 /* If this is an assignment from our leader (which
5501 happens in the case the value-number is a constant)
5502 then there is nothing to do. */
5503 if (gimple_assign_single_p (stmt)
5504 && sprime == gimple_assign_rhs1 (stmt))
5505 return;
5507 /* Else replace its RHS. */
5508 if (dump_file && (dump_flags & TDF_DETAILS))
5510 fprintf (dump_file, "Replaced ");
5511 print_gimple_expr (dump_file, stmt, 0);
5512 fprintf (dump_file, " with ");
5513 print_generic_expr (dump_file, sprime);
5514 fprintf (dump_file, " in ");
5515 print_gimple_stmt (dump_file, stmt, 0);
5517 eliminations++;
5519 bool can_make_abnormal_goto = (is_gimple_call (stmt)
5520 && stmt_can_make_abnormal_goto (stmt));
5521 gimple *orig_stmt = stmt;
5522 if (!useless_type_conversion_p (TREE_TYPE (lhs),
5523 TREE_TYPE (sprime)))
5525 /* We preserve conversions to but not from function or method
5526 types. This asymmetry makes it necessary to re-instantiate
5527 conversions here. */
5528 if (POINTER_TYPE_P (TREE_TYPE (lhs))
5529 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
5530 sprime = fold_convert (TREE_TYPE (lhs), sprime);
5531 else
5532 gcc_unreachable ();
5534 tree vdef = gimple_vdef (stmt);
5535 tree vuse = gimple_vuse (stmt);
5536 propagate_tree_value_into_stmt (gsi, sprime);
5537 stmt = gsi_stmt (*gsi);
5538 update_stmt (stmt);
5539 /* In case the VDEF on the original stmt was released, value-number
5540 it to the VUSE. This is to make vuse_ssa_val able to skip
5541 released virtual operands. */
5542 if (vdef != gimple_vdef (stmt))
5544 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5545 VN_INFO (vdef)->valnum = vuse;
5548 /* If we removed EH side-effects from the statement, clean
5549 its EH information. */
5550 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5552 bitmap_set_bit (need_eh_cleanup,
5553 gimple_bb (stmt)->index);
5554 if (dump_file && (dump_flags & TDF_DETAILS))
5555 fprintf (dump_file, " Removed EH side-effects.\n");
5558 /* Likewise for AB side-effects. */
5559 if (can_make_abnormal_goto
5560 && !stmt_can_make_abnormal_goto (stmt))
5562 bitmap_set_bit (need_ab_cleanup,
5563 gimple_bb (stmt)->index);
5564 if (dump_file && (dump_flags & TDF_DETAILS))
5565 fprintf (dump_file, " Removed AB side-effects.\n");
5568 return;
5572 /* If the statement is a scalar store, see if the expression
5573 has the same value number as its rhs. If so, the store is
5574 dead. */
5575 if (gimple_assign_single_p (stmt)
5576 && !gimple_has_volatile_ops (stmt)
5577 && !is_gimple_reg (gimple_assign_lhs (stmt))
5578 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5579 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5581 tree val;
5582 tree rhs = gimple_assign_rhs1 (stmt);
5583 vn_reference_t vnresult;
5584 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5585 &vnresult, false);
5586 if (TREE_CODE (rhs) == SSA_NAME)
5587 rhs = VN_INFO (rhs)->valnum;
5588 if (val
5589 && operand_equal_p (val, rhs, 0))
5591 /* We can only remove the later store if the former aliases
5592 at least all accesses the later one does or if the store
5593 was to readonly memory storing the same value. */
5594 alias_set_type set = get_alias_set (lhs);
5595 if (! vnresult
5596 || vnresult->set == set
5597 || alias_set_subset_of (set, vnresult->set))
5599 if (dump_file && (dump_flags & TDF_DETAILS))
5601 fprintf (dump_file, "Deleted redundant store ");
5602 print_gimple_stmt (dump_file, stmt, 0);
5605 /* Queue stmt for removal. */
5606 to_remove.safe_push (stmt);
5607 return;
5612 /* If this is a control statement value numbering left edges
5613 unexecuted on force the condition in a way consistent with
5614 that. */
5615 if (gcond *cond = dyn_cast <gcond *> (stmt))
5617 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5618 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5620 if (dump_file && (dump_flags & TDF_DETAILS))
5622 fprintf (dump_file, "Removing unexecutable edge from ");
5623 print_gimple_stmt (dump_file, stmt, 0);
5625 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5626 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5627 gimple_cond_make_true (cond);
5628 else
5629 gimple_cond_make_false (cond);
5630 update_stmt (cond);
5631 el_todo |= TODO_cleanup_cfg;
5632 return;
5636 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5637 bool was_noreturn = (is_gimple_call (stmt)
5638 && gimple_call_noreturn_p (stmt));
5639 tree vdef = gimple_vdef (stmt);
5640 tree vuse = gimple_vuse (stmt);
5642 /* If we didn't replace the whole stmt (or propagate the result
5643 into all uses), replace all uses on this stmt with their
5644 leaders. */
5645 bool modified = false;
5646 use_operand_p use_p;
5647 ssa_op_iter iter;
5648 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5650 tree use = USE_FROM_PTR (use_p);
5651 /* ??? The call code above leaves stmt operands un-updated. */
5652 if (TREE_CODE (use) != SSA_NAME)
5653 continue;
5654 tree sprime;
5655 if (SSA_NAME_IS_DEFAULT_DEF (use))
5656 /* ??? For default defs BB shouldn't matter, but we have to
5657 solve the inconsistency between rpo eliminate and
5658 dom eliminate avail valueization first. */
5659 sprime = eliminate_avail (b, use);
5660 else
5661 /* Look for sth available at the definition block of the argument.
5662 This avoids inconsistencies between availability there which
5663 decides if the stmt can be removed and availability at the
5664 use site. The SSA property ensures that things available
5665 at the definition are also available at uses. */
5666 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5667 if (sprime && sprime != use
5668 && may_propagate_copy (use, sprime)
5669 /* We substitute into debug stmts to avoid excessive
5670 debug temporaries created by removed stmts, but we need
5671 to avoid doing so for inserted sprimes as we never want
5672 to create debug temporaries for them. */
5673 && (!inserted_exprs
5674 || TREE_CODE (sprime) != SSA_NAME
5675 || !is_gimple_debug (stmt)
5676 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5678 propagate_value (use_p, sprime);
5679 modified = true;
5683 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5684 into which is a requirement for the IPA devirt machinery. */
5685 gimple *old_stmt = stmt;
5686 if (modified)
5688 /* If a formerly non-invariant ADDR_EXPR is turned into an
5689 invariant one it was on a separate stmt. */
5690 if (gimple_assign_single_p (stmt)
5691 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5692 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5693 gimple_stmt_iterator prev = *gsi;
5694 gsi_prev (&prev);
5695 if (fold_stmt (gsi))
5697 /* fold_stmt may have created new stmts inbetween
5698 the previous stmt and the folded stmt. Mark
5699 all defs created there as varying to not confuse
5700 the SCCVN machinery as we're using that even during
5701 elimination. */
5702 if (gsi_end_p (prev))
5703 prev = gsi_start_bb (b);
5704 else
5705 gsi_next (&prev);
5706 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5709 tree def;
5710 ssa_op_iter dit;
5711 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5712 dit, SSA_OP_ALL_DEFS)
5713 /* As existing DEFs may move between stmts
5714 only process new ones. */
5715 if (! has_VN_INFO (def))
5717 VN_INFO (def)->valnum = def;
5718 VN_INFO (def)->visited = true;
5720 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5721 break;
5722 gsi_next (&prev);
5724 while (1);
5726 stmt = gsi_stmt (*gsi);
5727 /* In case we folded the stmt away schedule the NOP for removal. */
5728 if (gimple_nop_p (stmt))
5729 to_remove.safe_push (stmt);
5732 /* Visit indirect calls and turn them into direct calls if
5733 possible using the devirtualization machinery. Do this before
5734 checking for required EH/abnormal/noreturn cleanup as devird
5735 may expose more of those. */
5736 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5738 tree fn = gimple_call_fn (call_stmt);
5739 if (fn
5740 && flag_devirtualize
5741 && virtual_method_call_p (fn))
5743 tree otr_type = obj_type_ref_class (fn);
5744 unsigned HOST_WIDE_INT otr_tok
5745 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5746 tree instance;
5747 ipa_polymorphic_call_context context (current_function_decl,
5748 fn, stmt, &instance);
5749 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5750 otr_type, stmt, NULL);
5751 bool final;
5752 vec <cgraph_node *> targets
5753 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5754 otr_tok, context, &final);
5755 if (dump_file)
5756 dump_possible_polymorphic_call_targets (dump_file,
5757 obj_type_ref_class (fn),
5758 otr_tok, context);
5759 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5761 tree fn;
5762 if (targets.length () == 1)
5763 fn = targets[0]->decl;
5764 else
5765 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5766 if (dump_enabled_p ())
5768 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5769 "converting indirect call to "
5770 "function %s\n",
5771 lang_hooks.decl_printable_name (fn, 2));
5773 gimple_call_set_fndecl (call_stmt, fn);
5774 /* If changing the call to __builtin_unreachable
5775 or similar noreturn function, adjust gimple_call_fntype
5776 too. */
5777 if (gimple_call_noreturn_p (call_stmt)
5778 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5779 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5780 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5781 == void_type_node))
5782 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5783 maybe_remove_unused_call_args (cfun, call_stmt);
5784 modified = true;
5789 if (modified)
5791 /* When changing a call into a noreturn call, cfg cleanup
5792 is needed to fix up the noreturn call. */
5793 if (!was_noreturn
5794 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5795 to_fixup.safe_push (stmt);
5796 /* When changing a condition or switch into one we know what
5797 edge will be executed, schedule a cfg cleanup. */
5798 if ((gimple_code (stmt) == GIMPLE_COND
5799 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5800 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5801 || (gimple_code (stmt) == GIMPLE_SWITCH
5802 && TREE_CODE (gimple_switch_index
5803 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5804 el_todo |= TODO_cleanup_cfg;
5805 /* If we removed EH side-effects from the statement, clean
5806 its EH information. */
5807 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5809 bitmap_set_bit (need_eh_cleanup,
5810 gimple_bb (stmt)->index);
5811 if (dump_file && (dump_flags & TDF_DETAILS))
5812 fprintf (dump_file, " Removed EH side-effects.\n");
5814 /* Likewise for AB side-effects. */
5815 if (can_make_abnormal_goto
5816 && !stmt_can_make_abnormal_goto (stmt))
5818 bitmap_set_bit (need_ab_cleanup,
5819 gimple_bb (stmt)->index);
5820 if (dump_file && (dump_flags & TDF_DETAILS))
5821 fprintf (dump_file, " Removed AB side-effects.\n");
5823 update_stmt (stmt);
5824 /* In case the VDEF on the original stmt was released, value-number
5825 it to the VUSE. This is to make vuse_ssa_val able to skip
5826 released virtual operands. */
5827 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5828 VN_INFO (vdef)->valnum = vuse;
5831 /* Make new values available - for fully redundant LHS we
5832 continue with the next stmt above and skip this. */
5833 def_operand_p defp;
5834 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5835 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5838 /* Perform elimination for the basic-block B during the domwalk. */
5840 edge
5841 eliminate_dom_walker::before_dom_children (basic_block b)
5843 /* Mark new bb. */
5844 avail_stack.safe_push (NULL_TREE);
5846 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5847 if (!(b->flags & BB_EXECUTABLE))
5848 return NULL;
5850 vn_context_bb = b;
5852 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5854 gphi *phi = gsi.phi ();
5855 tree res = PHI_RESULT (phi);
5857 if (virtual_operand_p (res))
5859 gsi_next (&gsi);
5860 continue;
5863 tree sprime = eliminate_avail (b, res);
5864 if (sprime
5865 && sprime != res)
5867 if (dump_file && (dump_flags & TDF_DETAILS))
5869 fprintf (dump_file, "Replaced redundant PHI node defining ");
5870 print_generic_expr (dump_file, res);
5871 fprintf (dump_file, " with ");
5872 print_generic_expr (dump_file, sprime);
5873 fprintf (dump_file, "\n");
5876 /* If we inserted this PHI node ourself, it's not an elimination. */
5877 if (! inserted_exprs
5878 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5879 eliminations++;
5881 /* If we will propagate into all uses don't bother to do
5882 anything. */
5883 if (may_propagate_copy (res, sprime))
5885 /* Mark the PHI for removal. */
5886 to_remove.safe_push (phi);
5887 gsi_next (&gsi);
5888 continue;
5891 remove_phi_node (&gsi, false);
5893 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5894 sprime = fold_convert (TREE_TYPE (res), sprime);
5895 gimple *stmt = gimple_build_assign (res, sprime);
5896 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5897 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5898 continue;
5901 eliminate_push_avail (b, res);
5902 gsi_next (&gsi);
5905 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5906 !gsi_end_p (gsi);
5907 gsi_next (&gsi))
5908 eliminate_stmt (b, &gsi);
5910 /* Replace destination PHI arguments. */
5911 edge_iterator ei;
5912 edge e;
5913 FOR_EACH_EDGE (e, ei, b->succs)
5914 if (e->flags & EDGE_EXECUTABLE)
5915 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5916 !gsi_end_p (gsi);
5917 gsi_next (&gsi))
5919 gphi *phi = gsi.phi ();
5920 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5921 tree arg = USE_FROM_PTR (use_p);
5922 if (TREE_CODE (arg) != SSA_NAME
5923 || virtual_operand_p (arg))
5924 continue;
5925 tree sprime = eliminate_avail (b, arg);
5926 if (sprime && may_propagate_copy (arg, sprime))
5927 propagate_value (use_p, sprime);
5930 vn_context_bb = NULL;
5932 return NULL;
5935 /* Make no longer available leaders no longer available. */
5937 void
5938 eliminate_dom_walker::after_dom_children (basic_block)
5940 tree entry;
5941 while ((entry = avail_stack.pop ()) != NULL_TREE)
5943 tree valnum = VN_INFO (entry)->valnum;
5944 tree old = avail[SSA_NAME_VERSION (valnum)];
5945 if (old == entry)
5946 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5947 else
5948 avail[SSA_NAME_VERSION (valnum)] = entry;
5952 /* Remove queued stmts and perform delayed cleanups. */
5954 unsigned
5955 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5957 statistics_counter_event (cfun, "Eliminated", eliminations);
5958 statistics_counter_event (cfun, "Insertions", insertions);
5960 /* We cannot remove stmts during BB walk, especially not release SSA
5961 names there as this confuses the VN machinery. The stmts ending
5962 up in to_remove are either stores or simple copies.
5963 Remove stmts in reverse order to make debug stmt creation possible. */
5964 while (!to_remove.is_empty ())
5966 bool do_release_defs = true;
5967 gimple *stmt = to_remove.pop ();
5969 /* When we are value-numbering a region we do not require exit PHIs to
5970 be present so we have to make sure to deal with uses outside of the
5971 region of stmts that we thought are eliminated.
5972 ??? Note we may be confused by uses in dead regions we didn't run
5973 elimination on. Rather than checking individual uses we accept
5974 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5975 contains such example). */
5976 if (region_p)
5978 if (gphi *phi = dyn_cast <gphi *> (stmt))
5980 tree lhs = gimple_phi_result (phi);
5981 if (!has_zero_uses (lhs))
5983 if (dump_file && (dump_flags & TDF_DETAILS))
5984 fprintf (dump_file, "Keeping eliminated stmt live "
5985 "as copy because of out-of-region uses\n");
5986 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5987 gimple *copy = gimple_build_assign (lhs, sprime);
5988 gimple_stmt_iterator gsi
5989 = gsi_after_labels (gimple_bb (stmt));
5990 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5991 do_release_defs = false;
5994 else if (tree lhs = gimple_get_lhs (stmt))
5995 if (TREE_CODE (lhs) == SSA_NAME
5996 && !has_zero_uses (lhs))
5998 if (dump_file && (dump_flags & TDF_DETAILS))
5999 fprintf (dump_file, "Keeping eliminated stmt live "
6000 "as copy because of out-of-region uses\n");
6001 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6002 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6003 if (is_gimple_assign (stmt))
6005 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6006 stmt = gsi_stmt (gsi);
6007 update_stmt (stmt);
6008 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6009 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6010 continue;
6012 else
6014 gimple *copy = gimple_build_assign (lhs, sprime);
6015 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6016 do_release_defs = false;
6021 if (dump_file && (dump_flags & TDF_DETAILS))
6023 fprintf (dump_file, "Removing dead stmt ");
6024 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6027 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6028 if (gimple_code (stmt) == GIMPLE_PHI)
6029 remove_phi_node (&gsi, do_release_defs);
6030 else
6032 basic_block bb = gimple_bb (stmt);
6033 unlink_stmt_vdef (stmt);
6034 if (gsi_remove (&gsi, true))
6035 bitmap_set_bit (need_eh_cleanup, bb->index);
6036 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6037 bitmap_set_bit (need_ab_cleanup, bb->index);
6038 if (do_release_defs)
6039 release_defs (stmt);
6042 /* Removing a stmt may expose a forwarder block. */
6043 el_todo |= TODO_cleanup_cfg;
6046 /* Fixup stmts that became noreturn calls. This may require splitting
6047 blocks and thus isn't possible during the dominator walk. Do this
6048 in reverse order so we don't inadvertedly remove a stmt we want to
6049 fixup by visiting a dominating now noreturn call first. */
6050 while (!to_fixup.is_empty ())
6052 gimple *stmt = to_fixup.pop ();
6054 if (dump_file && (dump_flags & TDF_DETAILS))
6056 fprintf (dump_file, "Fixing up noreturn call ");
6057 print_gimple_stmt (dump_file, stmt, 0);
6060 if (fixup_noreturn_call (stmt))
6061 el_todo |= TODO_cleanup_cfg;
6064 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6065 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6067 if (do_eh_cleanup)
6068 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6070 if (do_ab_cleanup)
6071 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6073 if (do_eh_cleanup || do_ab_cleanup)
6074 el_todo |= TODO_cleanup_cfg;
6076 return el_todo;
6079 /* Eliminate fully redundant computations. */
6081 unsigned
6082 eliminate_with_rpo_vn (bitmap inserted_exprs)
6084 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6086 walker.walk (cfun->cfg->x_entry_block_ptr);
6087 return walker.eliminate_cleanup ();
6090 static unsigned
6091 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6092 bool iterate, bool eliminate);
6094 void
6095 run_rpo_vn (vn_lookup_kind kind)
6097 default_vn_walk_kind = kind;
6098 do_rpo_vn (cfun, NULL, NULL, true, false);
6100 /* ??? Prune requirement of these. */
6101 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6102 constant_value_ids = BITMAP_ALLOC (NULL);
6104 /* Initialize the value ids and prune out remaining VN_TOPs
6105 from dead code. */
6106 tree name;
6107 unsigned i;
6108 FOR_EACH_SSA_NAME (i, name, cfun)
6110 vn_ssa_aux_t info = VN_INFO (name);
6111 if (!info->visited
6112 || info->valnum == VN_TOP)
6113 info->valnum = name;
6114 if (info->valnum == name)
6115 info->value_id = get_next_value_id ();
6116 else if (is_gimple_min_invariant (info->valnum))
6117 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6120 /* Propagate. */
6121 FOR_EACH_SSA_NAME (i, name, cfun)
6123 vn_ssa_aux_t info = VN_INFO (name);
6124 if (TREE_CODE (info->valnum) == SSA_NAME
6125 && info->valnum != name
6126 && info->value_id != VN_INFO (info->valnum)->value_id)
6127 info->value_id = VN_INFO (info->valnum)->value_id;
6130 set_hashtable_value_ids ();
6132 if (dump_file && (dump_flags & TDF_DETAILS))
6134 fprintf (dump_file, "Value numbers:\n");
6135 FOR_EACH_SSA_NAME (i, name, cfun)
6137 if (VN_INFO (name)->visited
6138 && SSA_VAL (name) != name)
6140 print_generic_expr (dump_file, name);
6141 fprintf (dump_file, " = ");
6142 print_generic_expr (dump_file, SSA_VAL (name));
6143 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6149 /* Free VN associated data structures. */
6151 void
6152 free_rpo_vn (void)
6154 free_vn_table (valid_info);
6155 XDELETE (valid_info);
6156 obstack_free (&vn_tables_obstack, NULL);
6157 obstack_free (&vn_tables_insert_obstack, NULL);
6159 vn_ssa_aux_iterator_type it;
6160 vn_ssa_aux_t info;
6161 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6162 if (info->needs_insertion)
6163 release_ssa_name (info->name);
6164 obstack_free (&vn_ssa_aux_obstack, NULL);
6165 delete vn_ssa_aux_hash;
6167 delete constant_to_value_id;
6168 constant_to_value_id = NULL;
6169 BITMAP_FREE (constant_value_ids);
6172 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6174 static tree
6175 vn_lookup_simplify_result (gimple_match_op *res_op)
6177 if (!res_op->code.is_tree_code ())
6178 return NULL_TREE;
6179 tree *ops = res_op->ops;
6180 unsigned int length = res_op->num_ops;
6181 if (res_op->code == CONSTRUCTOR
6182 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6183 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6184 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6186 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6187 ops = XALLOCAVEC (tree, length);
6188 for (unsigned i = 0; i < length; ++i)
6189 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6191 vn_nary_op_t vnresult = NULL;
6192 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6193 res_op->type, ops, &vnresult);
6194 /* If this is used from expression simplification make sure to
6195 return an available expression. */
6196 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6197 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6198 return res;
6201 rpo_elim::~rpo_elim ()
6203 /* Release the avail vectors. */
6204 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
6205 i != m_rpo_avail.end (); ++i)
6206 (*i).second.release ();
6209 /* Return a leader for OPs value that is valid at BB. */
6211 tree
6212 rpo_elim::eliminate_avail (basic_block bb, tree op)
6214 bool visited;
6215 tree valnum = SSA_VAL (op, &visited);
6216 /* If we didn't visit OP then it must be defined outside of the
6217 region we process and also dominate it. So it is available. */
6218 if (!visited)
6219 return op;
6220 if (TREE_CODE (valnum) == SSA_NAME)
6222 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6223 return valnum;
6224 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
6225 if (!av || av->is_empty ())
6226 return NULL_TREE;
6227 int i = av->length () - 1;
6228 if ((*av)[i].first == bb->index)
6229 /* On tramp3d 90% of the cases are here. */
6230 return ssa_name ((*av)[i].second);
6233 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
6234 /* ??? During elimination we have to use availability at the
6235 definition site of a use we try to replace. This
6236 is required to not run into inconsistencies because
6237 of dominated_by_p_w_unex behavior and removing a definition
6238 while not replacing all uses.
6239 ??? We could try to consistently walk dominators
6240 ignoring non-executable regions. The nearest common
6241 dominator of bb and abb is where we can stop walking. We
6242 may also be able to "pre-compute" (bits of) the next immediate
6243 (non-)dominator during the RPO walk when marking edges as
6244 executable. */
6245 if (dominated_by_p_w_unex (bb, abb))
6247 tree leader = ssa_name ((*av)[i].second);
6248 /* Prevent eliminations that break loop-closed SSA. */
6249 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6250 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6251 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6252 (leader))->loop_father,
6253 bb))
6254 return NULL_TREE;
6255 if (dump_file && (dump_flags & TDF_DETAILS))
6257 print_generic_expr (dump_file, leader);
6258 fprintf (dump_file, " is available for ");
6259 print_generic_expr (dump_file, valnum);
6260 fprintf (dump_file, "\n");
6262 /* On tramp3d 99% of the _remaining_ cases succeed at
6263 the first enty. */
6264 return leader;
6266 /* ??? Can we somehow skip to the immediate dominator
6267 RPO index (bb_to_rpo)? Again, maybe not worth, on
6268 tramp3d the worst number of elements in the vector is 9. */
6270 while (--i >= 0);
6272 else if (valnum != VN_TOP)
6273 /* valnum is is_gimple_min_invariant. */
6274 return valnum;
6275 return NULL_TREE;
6278 /* Make LEADER a leader for its value at BB. */
6280 void
6281 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
6283 tree valnum = VN_INFO (leader)->valnum;
6284 if (valnum == VN_TOP)
6285 return;
6286 if (dump_file && (dump_flags & TDF_DETAILS))
6288 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
6289 print_generic_expr (dump_file, leader);
6290 fprintf (dump_file, " for value ");
6291 print_generic_expr (dump_file, valnum);
6292 fprintf (dump_file, "\n");
6294 bool existed;
6295 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
6296 if (!existed)
6298 new (&av) vec<std::pair<int, int> >;
6299 av = vNULL;
6300 av.reserve_exact (2);
6302 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
6305 /* Valueization hook for RPO VN plus required state. */
6307 tree
6308 rpo_vn_valueize (tree name)
6310 if (TREE_CODE (name) == SSA_NAME)
6312 vn_ssa_aux_t val = VN_INFO (name);
6313 if (val)
6315 tree tem = val->valnum;
6316 if (tem != VN_TOP && tem != name)
6318 if (TREE_CODE (tem) != SSA_NAME)
6319 return tem;
6320 /* For all values we only valueize to an available leader
6321 which means we can use SSA name info without restriction. */
6322 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
6323 if (tem)
6324 return tem;
6328 return name;
6331 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
6332 inverted condition. */
6334 static void
6335 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
6337 switch (code)
6339 case LT_EXPR:
6340 /* a < b -> a {!,<}= b */
6341 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6342 ops, boolean_true_node, 0, pred_e);
6343 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
6344 ops, boolean_true_node, 0, pred_e);
6345 /* a < b -> ! a {>,=} b */
6346 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6347 ops, boolean_false_node, 0, pred_e);
6348 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6349 ops, boolean_false_node, 0, pred_e);
6350 break;
6351 case GT_EXPR:
6352 /* a > b -> a {!,>}= b */
6353 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
6354 ops, boolean_true_node, 0, pred_e);
6355 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
6356 ops, boolean_true_node, 0, pred_e);
6357 /* a > b -> ! a {<,=} b */
6358 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6359 ops, boolean_false_node, 0, pred_e);
6360 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
6361 ops, boolean_false_node, 0, pred_e);
6362 break;
6363 case EQ_EXPR:
6364 /* a == b -> ! a {<,>} b */
6365 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
6366 ops, boolean_false_node, 0, pred_e);
6367 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
6368 ops, boolean_false_node, 0, pred_e);
6369 break;
6370 case LE_EXPR:
6371 case GE_EXPR:
6372 case NE_EXPR:
6373 /* Nothing besides inverted condition. */
6374 break;
6375 default:;
6379 /* Main stmt worker for RPO VN, process BB. */
6381 static unsigned
6382 process_bb (rpo_elim &avail, basic_block bb,
6383 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
6384 bool do_region, bitmap exit_bbs, bool skip_phis)
6386 unsigned todo = 0;
6387 edge_iterator ei;
6388 edge e;
6390 vn_context_bb = bb;
6392 /* If we are in loop-closed SSA preserve this state. This is
6393 relevant when called on regions from outside of FRE/PRE. */
6394 bool lc_phi_nodes = false;
6395 if (!skip_phis
6396 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
6397 FOR_EACH_EDGE (e, ei, bb->preds)
6398 if (e->src->loop_father != e->dest->loop_father
6399 && flow_loop_nested_p (e->dest->loop_father,
6400 e->src->loop_father))
6402 lc_phi_nodes = true;
6403 break;
6406 /* When we visit a loop header substitute into loop info. */
6407 if (!iterate && eliminate && bb->loop_father->header == bb)
6409 /* Keep fields in sync with substitute_in_loop_info. */
6410 if (bb->loop_father->nb_iterations)
6411 bb->loop_father->nb_iterations
6412 = simplify_replace_tree (bb->loop_father->nb_iterations,
6413 NULL_TREE, NULL_TREE, vn_valueize);
6416 /* Value-number all defs in the basic-block. */
6417 if (!skip_phis)
6418 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
6419 gsi_next (&gsi))
6421 gphi *phi = gsi.phi ();
6422 tree res = PHI_RESULT (phi);
6423 vn_ssa_aux_t res_info = VN_INFO (res);
6424 if (!bb_visited)
6426 gcc_assert (!res_info->visited);
6427 res_info->valnum = VN_TOP;
6428 res_info->visited = true;
6431 /* When not iterating force backedge values to varying. */
6432 visit_stmt (phi, !iterate_phis);
6433 if (virtual_operand_p (res))
6434 continue;
6436 /* Eliminate */
6437 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
6438 how we handle backedges and availability.
6439 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
6440 tree val = res_info->valnum;
6441 if (res != val && !iterate && eliminate)
6443 if (tree leader = avail.eliminate_avail (bb, res))
6445 if (leader != res
6446 /* Preserve loop-closed SSA form. */
6447 && (! lc_phi_nodes
6448 || is_gimple_min_invariant (leader)))
6450 if (dump_file && (dump_flags & TDF_DETAILS))
6452 fprintf (dump_file, "Replaced redundant PHI node "
6453 "defining ");
6454 print_generic_expr (dump_file, res);
6455 fprintf (dump_file, " with ");
6456 print_generic_expr (dump_file, leader);
6457 fprintf (dump_file, "\n");
6459 avail.eliminations++;
6461 if (may_propagate_copy (res, leader))
6463 /* Schedule for removal. */
6464 avail.to_remove.safe_push (phi);
6465 continue;
6467 /* ??? Else generate a copy stmt. */
6471 /* Only make defs available that not already are. But make
6472 sure loop-closed SSA PHI node defs are picked up for
6473 downstream uses. */
6474 if (lc_phi_nodes
6475 || res == val
6476 || ! avail.eliminate_avail (bb, res))
6477 avail.eliminate_push_avail (bb, res);
6480 /* For empty BBs mark outgoing edges executable. For non-empty BBs
6481 we do this when processing the last stmt as we have to do this
6482 before elimination which otherwise forces GIMPLE_CONDs to
6483 if (1 != 0) style when seeing non-executable edges. */
6484 if (gsi_end_p (gsi_start_bb (bb)))
6486 FOR_EACH_EDGE (e, ei, bb->succs)
6488 if (!(e->flags & EDGE_EXECUTABLE))
6490 if (dump_file && (dump_flags & TDF_DETAILS))
6491 fprintf (dump_file,
6492 "marking outgoing edge %d -> %d executable\n",
6493 e->src->index, e->dest->index);
6494 e->flags |= EDGE_EXECUTABLE;
6495 e->dest->flags |= BB_EXECUTABLE;
6497 else if (!(e->dest->flags & BB_EXECUTABLE))
6499 if (dump_file && (dump_flags & TDF_DETAILS))
6500 fprintf (dump_file,
6501 "marking destination block %d reachable\n",
6502 e->dest->index);
6503 e->dest->flags |= BB_EXECUTABLE;
6507 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
6508 !gsi_end_p (gsi); gsi_next (&gsi))
6510 ssa_op_iter i;
6511 tree op;
6512 if (!bb_visited)
6514 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
6516 vn_ssa_aux_t op_info = VN_INFO (op);
6517 gcc_assert (!op_info->visited);
6518 op_info->valnum = VN_TOP;
6519 op_info->visited = true;
6522 /* We somehow have to deal with uses that are not defined
6523 in the processed region. Forcing unvisited uses to
6524 varying here doesn't play well with def-use following during
6525 expression simplification, so we deal with this by checking
6526 the visited flag in SSA_VAL. */
6529 visit_stmt (gsi_stmt (gsi));
6531 gimple *last = gsi_stmt (gsi);
6532 e = NULL;
6533 switch (gimple_code (last))
6535 case GIMPLE_SWITCH:
6536 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6537 (as_a <gswitch *> (last))));
6538 break;
6539 case GIMPLE_COND:
6541 tree lhs = vn_valueize (gimple_cond_lhs (last));
6542 tree rhs = vn_valueize (gimple_cond_rhs (last));
6543 tree val = gimple_simplify (gimple_cond_code (last),
6544 boolean_type_node, lhs, rhs,
6545 NULL, vn_valueize);
6546 /* If the condition didn't simplfy see if we have recorded
6547 an expression from sofar taken edges. */
6548 if (! val || TREE_CODE (val) != INTEGER_CST)
6550 vn_nary_op_t vnresult;
6551 tree ops[2];
6552 ops[0] = lhs;
6553 ops[1] = rhs;
6554 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6555 boolean_type_node, ops,
6556 &vnresult);
6557 /* Did we get a predicated value? */
6558 if (! val && vnresult && vnresult->predicated_values)
6560 val = vn_nary_op_get_predicated_value (vnresult, bb);
6561 if (val && dump_file && (dump_flags & TDF_DETAILS))
6563 fprintf (dump_file, "Got predicated value ");
6564 print_generic_expr (dump_file, val, TDF_NONE);
6565 fprintf (dump_file, " for ");
6566 print_gimple_stmt (dump_file, last, TDF_SLIM);
6570 if (val)
6571 e = find_taken_edge (bb, val);
6572 if (! e)
6574 /* If we didn't manage to compute the taken edge then
6575 push predicated expressions for the condition itself
6576 and related conditions to the hashtables. This allows
6577 simplification of redundant conditions which is
6578 important as early cleanup. */
6579 edge true_e, false_e;
6580 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6581 enum tree_code code = gimple_cond_code (last);
6582 enum tree_code icode
6583 = invert_tree_comparison (code, HONOR_NANS (lhs));
6584 tree ops[2];
6585 ops[0] = lhs;
6586 ops[1] = rhs;
6587 if (do_region
6588 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6589 true_e = NULL;
6590 if (do_region
6591 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6592 false_e = NULL;
6593 if (true_e)
6594 vn_nary_op_insert_pieces_predicated
6595 (2, code, boolean_type_node, ops,
6596 boolean_true_node, 0, true_e);
6597 if (false_e)
6598 vn_nary_op_insert_pieces_predicated
6599 (2, code, boolean_type_node, ops,
6600 boolean_false_node, 0, false_e);
6601 if (icode != ERROR_MARK)
6603 if (true_e)
6604 vn_nary_op_insert_pieces_predicated
6605 (2, icode, boolean_type_node, ops,
6606 boolean_false_node, 0, true_e);
6607 if (false_e)
6608 vn_nary_op_insert_pieces_predicated
6609 (2, icode, boolean_type_node, ops,
6610 boolean_true_node, 0, false_e);
6612 /* Relax for non-integers, inverted condition handled
6613 above. */
6614 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6616 if (true_e)
6617 insert_related_predicates_on_edge (code, ops, true_e);
6618 if (false_e)
6619 insert_related_predicates_on_edge (icode, ops, false_e);
6622 break;
6624 case GIMPLE_GOTO:
6625 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6626 break;
6627 default:
6628 e = NULL;
6630 if (e)
6632 todo = TODO_cleanup_cfg;
6633 if (!(e->flags & EDGE_EXECUTABLE))
6635 if (dump_file && (dump_flags & TDF_DETAILS))
6636 fprintf (dump_file,
6637 "marking known outgoing %sedge %d -> %d executable\n",
6638 e->flags & EDGE_DFS_BACK ? "back-" : "",
6639 e->src->index, e->dest->index);
6640 e->flags |= EDGE_EXECUTABLE;
6641 e->dest->flags |= BB_EXECUTABLE;
6643 else if (!(e->dest->flags & BB_EXECUTABLE))
6645 if (dump_file && (dump_flags & TDF_DETAILS))
6646 fprintf (dump_file,
6647 "marking destination block %d reachable\n",
6648 e->dest->index);
6649 e->dest->flags |= BB_EXECUTABLE;
6652 else if (gsi_one_before_end_p (gsi))
6654 FOR_EACH_EDGE (e, ei, bb->succs)
6656 if (!(e->flags & EDGE_EXECUTABLE))
6658 if (dump_file && (dump_flags & TDF_DETAILS))
6659 fprintf (dump_file,
6660 "marking outgoing edge %d -> %d executable\n",
6661 e->src->index, e->dest->index);
6662 e->flags |= EDGE_EXECUTABLE;
6663 e->dest->flags |= BB_EXECUTABLE;
6665 else if (!(e->dest->flags & BB_EXECUTABLE))
6667 if (dump_file && (dump_flags & TDF_DETAILS))
6668 fprintf (dump_file,
6669 "marking destination block %d reachable\n",
6670 e->dest->index);
6671 e->dest->flags |= BB_EXECUTABLE;
6676 /* Eliminate. That also pushes to avail. */
6677 if (eliminate && ! iterate)
6678 avail.eliminate_stmt (bb, &gsi);
6679 else
6680 /* If not eliminating, make all not already available defs
6681 available. */
6682 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6683 if (! avail.eliminate_avail (bb, op))
6684 avail.eliminate_push_avail (bb, op);
6687 /* Eliminate in destination PHI arguments. Always substitute in dest
6688 PHIs, even for non-executable edges. This handles region
6689 exits PHIs. */
6690 if (!iterate && eliminate)
6691 FOR_EACH_EDGE (e, ei, bb->succs)
6692 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6693 !gsi_end_p (gsi); gsi_next (&gsi))
6695 gphi *phi = gsi.phi ();
6696 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6697 tree arg = USE_FROM_PTR (use_p);
6698 if (TREE_CODE (arg) != SSA_NAME
6699 || virtual_operand_p (arg))
6700 continue;
6701 tree sprime;
6702 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6704 sprime = SSA_VAL (arg);
6705 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6706 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6708 else
6709 /* Look for sth available at the definition block of the argument.
6710 This avoids inconsistencies between availability there which
6711 decides if the stmt can be removed and availability at the
6712 use site. The SSA property ensures that things available
6713 at the definition are also available at uses. */
6714 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6715 arg);
6716 if (sprime
6717 && sprime != arg
6718 && may_propagate_copy (arg, sprime))
6719 propagate_value (use_p, sprime);
6722 vn_context_bb = NULL;
6723 return todo;
6726 /* Unwind state per basic-block. */
6728 struct unwind_state
6730 /* Times this block has been visited. */
6731 unsigned visited;
6732 /* Whether to handle this as iteration point or whether to treat
6733 incoming backedge PHI values as varying. */
6734 bool iterate;
6735 /* Maximum RPO index this block is reachable from. */
6736 int max_rpo;
6737 /* Unwind state. */
6738 void *ob_top;
6739 vn_reference_t ref_top;
6740 vn_phi_t phi_top;
6741 vn_nary_op_t nary_top;
6744 /* Unwind the RPO VN state for iteration. */
6746 static void
6747 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6749 gcc_assert (to->iterate);
6750 for (; last_inserted_nary != to->nary_top;
6751 last_inserted_nary = last_inserted_nary->next)
6753 vn_nary_op_t *slot;
6754 slot = valid_info->nary->find_slot_with_hash
6755 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6756 /* Predication causes the need to restore previous state. */
6757 if ((*slot)->unwind_to)
6758 *slot = (*slot)->unwind_to;
6759 else
6760 valid_info->nary->clear_slot (slot);
6762 for (; last_inserted_phi != to->phi_top;
6763 last_inserted_phi = last_inserted_phi->next)
6765 vn_phi_t *slot;
6766 slot = valid_info->phis->find_slot_with_hash
6767 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6768 valid_info->phis->clear_slot (slot);
6770 for (; last_inserted_ref != to->ref_top;
6771 last_inserted_ref = last_inserted_ref->next)
6773 vn_reference_t *slot;
6774 slot = valid_info->references->find_slot_with_hash
6775 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6776 (*slot)->operands.release ();
6777 valid_info->references->clear_slot (slot);
6779 obstack_free (&vn_tables_obstack, to->ob_top);
6781 /* Prune [rpo_idx, ] from avail. */
6782 /* ??? This is O(number-of-values-in-region) which is
6783 O(region-size) rather than O(iteration-piece). */
6784 for (rpo_elim::rpo_avail_t::iterator i
6785 = avail.m_rpo_avail.begin ();
6786 i != avail.m_rpo_avail.end (); ++i)
6788 while (! (*i).second.is_empty ())
6790 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6791 break;
6792 (*i).second.pop ();
6797 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6798 If ITERATE is true then treat backedges optimistically as not
6799 executed and iterate. If ELIMINATE is true then perform
6800 elimination, otherwise leave that to the caller. */
6802 static unsigned
6803 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6804 bool iterate, bool eliminate)
6806 unsigned todo = 0;
6808 /* We currently do not support region-based iteration when
6809 elimination is requested. */
6810 gcc_assert (!entry || !iterate || !eliminate);
6811 /* When iterating we need loop info up-to-date. */
6812 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6814 bool do_region = entry != NULL;
6815 if (!do_region)
6817 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6818 exit_bbs = BITMAP_ALLOC (NULL);
6819 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6822 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
6823 re-mark those that are contained in the region. */
6824 edge_iterator ei;
6825 edge e;
6826 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6827 e->flags &= ~EDGE_DFS_BACK;
6829 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6830 int n = rev_post_order_and_mark_dfs_back_seme
6831 (fn, entry, exit_bbs, !loops_state_satisfies_p (LOOPS_NEED_FIXUP), rpo);
6832 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6833 for (int i = 0; i < n / 2; ++i)
6834 std::swap (rpo[i], rpo[n-i-1]);
6836 if (!do_region)
6837 BITMAP_FREE (exit_bbs);
6839 /* If there are any non-DFS_BACK edges into entry->dest skip
6840 processing PHI nodes for that block. This supports
6841 value-numbering loop bodies w/o the actual loop. */
6842 FOR_EACH_EDGE (e, ei, entry->dest->preds)
6843 if (e != entry
6844 && !(e->flags & EDGE_DFS_BACK))
6845 break;
6846 bool skip_entry_phis = e != NULL;
6847 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
6848 fprintf (dump_file, "Region does not contain all edges into "
6849 "the entry block, skipping its PHIs.\n");
6851 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6852 for (int i = 0; i < n; ++i)
6853 bb_to_rpo[rpo[i]] = i;
6855 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6857 rpo_elim avail (entry->dest);
6858 rpo_avail = &avail;
6860 /* Verify we have no extra entries into the region. */
6861 if (flag_checking && do_region)
6863 auto_bb_flag bb_in_region (fn);
6864 for (int i = 0; i < n; ++i)
6866 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6867 bb->flags |= bb_in_region;
6869 /* We can't merge the first two loops because we cannot rely
6870 on EDGE_DFS_BACK for edges not within the region. But if
6871 we decide to always have the bb_in_region flag we can
6872 do the checking during the RPO walk itself (but then it's
6873 also easy to handle MEME conservatively). */
6874 for (int i = 0; i < n; ++i)
6876 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6877 edge e;
6878 edge_iterator ei;
6879 FOR_EACH_EDGE (e, ei, bb->preds)
6880 gcc_assert (e == entry
6881 || (skip_entry_phis && bb == entry->dest)
6882 || (e->src->flags & bb_in_region));
6884 for (int i = 0; i < n; ++i)
6886 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6887 bb->flags &= ~bb_in_region;
6891 /* Create the VN state. For the initial size of the various hashtables
6892 use a heuristic based on region size and number of SSA names. */
6893 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6894 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6895 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6896 next_value_id = 1;
6898 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6899 gcc_obstack_init (&vn_ssa_aux_obstack);
6901 gcc_obstack_init (&vn_tables_obstack);
6902 gcc_obstack_init (&vn_tables_insert_obstack);
6903 valid_info = XCNEW (struct vn_tables_s);
6904 allocate_vn_table (valid_info, region_size);
6905 last_inserted_ref = NULL;
6906 last_inserted_phi = NULL;
6907 last_inserted_nary = NULL;
6909 vn_valueize = rpo_vn_valueize;
6911 /* Initialize the unwind state and edge/BB executable state. */
6912 bool need_max_rpo_iterate = false;
6913 for (int i = 0; i < n; ++i)
6915 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6916 rpo_state[i].visited = 0;
6917 rpo_state[i].max_rpo = i;
6918 bb->flags &= ~BB_EXECUTABLE;
6919 bool has_backedges = false;
6920 edge e;
6921 edge_iterator ei;
6922 FOR_EACH_EDGE (e, ei, bb->preds)
6924 if (e->flags & EDGE_DFS_BACK)
6925 has_backedges = true;
6926 e->flags &= ~EDGE_EXECUTABLE;
6927 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
6928 continue;
6929 if (bb_to_rpo[e->src->index] > i)
6931 rpo_state[i].max_rpo = MAX (rpo_state[i].max_rpo,
6932 bb_to_rpo[e->src->index]);
6933 need_max_rpo_iterate = true;
6935 else
6936 rpo_state[i].max_rpo
6937 = MAX (rpo_state[i].max_rpo,
6938 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6940 rpo_state[i].iterate = iterate && has_backedges;
6942 entry->flags |= EDGE_EXECUTABLE;
6943 entry->dest->flags |= BB_EXECUTABLE;
6945 /* When there are irreducible regions the simplistic max_rpo computation
6946 above for the case of backedges doesn't work and we need to iterate
6947 until there are no more changes. */
6948 unsigned nit = 0;
6949 while (need_max_rpo_iterate)
6951 nit++;
6952 need_max_rpo_iterate = false;
6953 for (int i = 0; i < n; ++i)
6955 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6956 edge e;
6957 edge_iterator ei;
6958 FOR_EACH_EDGE (e, ei, bb->preds)
6960 if (e == entry || (skip_entry_phis && bb == entry->dest))
6961 continue;
6962 int max_rpo = MAX (rpo_state[i].max_rpo,
6963 rpo_state[bb_to_rpo[e->src->index]].max_rpo);
6964 if (rpo_state[i].max_rpo != max_rpo)
6966 rpo_state[i].max_rpo = max_rpo;
6967 need_max_rpo_iterate = true;
6972 statistics_histogram_event (cfun, "RPO max_rpo iterations", nit);
6974 /* As heuristic to improve compile-time we handle only the N innermost
6975 loops and the outermost one optimistically. */
6976 if (iterate)
6978 loop_p loop;
6979 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6980 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6981 if (loop_depth (loop) > max_depth)
6982 for (unsigned i = 2;
6983 i < loop_depth (loop) - max_depth; ++i)
6985 basic_block header = superloop_at_depth (loop, i)->header;
6986 bool non_latch_backedge = false;
6987 edge e;
6988 edge_iterator ei;
6989 FOR_EACH_EDGE (e, ei, header->preds)
6990 if (e->flags & EDGE_DFS_BACK)
6992 /* There can be a non-latch backedge into the header
6993 which is part of an outer irreducible region. We
6994 cannot avoid iterating this block then. */
6995 if (!dominated_by_p (CDI_DOMINATORS,
6996 e->src, e->dest))
6998 if (dump_file && (dump_flags & TDF_DETAILS))
6999 fprintf (dump_file, "non-latch backedge %d -> %d "
7000 "forces iteration of loop %d\n",
7001 e->src->index, e->dest->index, loop->num);
7002 non_latch_backedge = true;
7004 else
7005 e->flags |= EDGE_EXECUTABLE;
7007 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7011 uint64_t nblk = 0;
7012 int idx = 0;
7013 if (iterate)
7014 /* Go and process all blocks, iterating as necessary. */
7017 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7019 /* If the block has incoming backedges remember unwind state. This
7020 is required even for non-executable blocks since in irreducible
7021 regions we might reach them via the backedge and re-start iterating
7022 from there.
7023 Note we can individually mark blocks with incoming backedges to
7024 not iterate where we then handle PHIs conservatively. We do that
7025 heuristically to reduce compile-time for degenerate cases. */
7026 if (rpo_state[idx].iterate)
7028 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7029 rpo_state[idx].ref_top = last_inserted_ref;
7030 rpo_state[idx].phi_top = last_inserted_phi;
7031 rpo_state[idx].nary_top = last_inserted_nary;
7034 if (!(bb->flags & BB_EXECUTABLE))
7036 if (dump_file && (dump_flags & TDF_DETAILS))
7037 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7038 idx, bb->index);
7039 idx++;
7040 continue;
7043 if (dump_file && (dump_flags & TDF_DETAILS))
7044 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7045 nblk++;
7046 todo |= process_bb (avail, bb,
7047 rpo_state[idx].visited != 0,
7048 rpo_state[idx].iterate,
7049 iterate, eliminate, do_region, exit_bbs, false);
7050 rpo_state[idx].visited++;
7052 /* Verify if changed values flow over executable outgoing backedges
7053 and those change destination PHI values (that's the thing we
7054 can easily verify). Reduce over all such edges to the farthest
7055 away PHI. */
7056 int iterate_to = -1;
7057 edge_iterator ei;
7058 edge e;
7059 FOR_EACH_EDGE (e, ei, bb->succs)
7060 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7061 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7062 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7064 int destidx = bb_to_rpo[e->dest->index];
7065 if (!rpo_state[destidx].visited)
7067 if (dump_file && (dump_flags & TDF_DETAILS))
7068 fprintf (dump_file, "Unvisited destination %d\n",
7069 e->dest->index);
7070 if (iterate_to == -1 || destidx < iterate_to)
7071 iterate_to = destidx;
7072 continue;
7074 if (dump_file && (dump_flags & TDF_DETAILS))
7075 fprintf (dump_file, "Looking for changed values of backedge"
7076 " %d->%d destination PHIs\n",
7077 e->src->index, e->dest->index);
7078 vn_context_bb = e->dest;
7079 gphi_iterator gsi;
7080 for (gsi = gsi_start_phis (e->dest);
7081 !gsi_end_p (gsi); gsi_next (&gsi))
7083 bool inserted = false;
7084 /* While we'd ideally just iterate on value changes
7085 we CSE PHIs and do that even across basic-block
7086 boundaries. So even hashtable state changes can
7087 be important (which is roughly equivalent to
7088 PHI argument value changes). To not excessively
7089 iterate because of that we track whether a PHI
7090 was CSEd to with GF_PLF_1. */
7091 bool phival_changed;
7092 if ((phival_changed = visit_phi (gsi.phi (),
7093 &inserted, false))
7094 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7096 if (!phival_changed
7097 && dump_file && (dump_flags & TDF_DETAILS))
7098 fprintf (dump_file, "PHI was CSEd and hashtable "
7099 "state (changed)\n");
7100 if (iterate_to == -1 || destidx < iterate_to)
7101 iterate_to = destidx;
7102 break;
7105 vn_context_bb = NULL;
7107 if (iterate_to != -1)
7109 do_unwind (&rpo_state[iterate_to], iterate_to, avail, bb_to_rpo);
7110 idx = iterate_to;
7111 if (dump_file && (dump_flags & TDF_DETAILS))
7112 fprintf (dump_file, "Iterating to %d BB%d\n",
7113 iterate_to, rpo[iterate_to]);
7114 continue;
7117 idx++;
7119 while (idx < n);
7121 else /* !iterate */
7123 /* Process all blocks greedily with a worklist that enforces RPO
7124 processing of reachable blocks. */
7125 auto_bitmap worklist;
7126 bitmap_set_bit (worklist, 0);
7127 while (!bitmap_empty_p (worklist))
7129 int idx = bitmap_first_set_bit (worklist);
7130 bitmap_clear_bit (worklist, idx);
7131 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7132 gcc_assert ((bb->flags & BB_EXECUTABLE)
7133 && !rpo_state[idx].visited);
7135 if (dump_file && (dump_flags & TDF_DETAILS))
7136 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7138 /* When we run into predecessor edges where we cannot trust its
7139 executable state mark them executable so PHI processing will
7140 be conservative.
7141 ??? Do we need to force arguments flowing over that edge
7142 to be varying or will they even always be? */
7143 edge_iterator ei;
7144 edge e;
7145 FOR_EACH_EDGE (e, ei, bb->preds)
7146 if (!(e->flags & EDGE_EXECUTABLE)
7147 && (bb == entry->dest
7148 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7149 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7150 >= (int)idx))))
7152 if (dump_file && (dump_flags & TDF_DETAILS))
7153 fprintf (dump_file, "Cannot trust state of predecessor "
7154 "edge %d -> %d, marking executable\n",
7155 e->src->index, e->dest->index);
7156 e->flags |= EDGE_EXECUTABLE;
7159 nblk++;
7160 todo |= process_bb (avail, bb, false, false, false, eliminate,
7161 do_region, exit_bbs,
7162 skip_entry_phis && bb == entry->dest);
7163 rpo_state[idx].visited++;
7165 FOR_EACH_EDGE (e, ei, bb->succs)
7166 if ((e->flags & EDGE_EXECUTABLE)
7167 && e->dest->index != EXIT_BLOCK
7168 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7169 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7170 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7174 /* If statistics or dump file active. */
7175 int nex = 0;
7176 unsigned max_visited = 1;
7177 for (int i = 0; i < n; ++i)
7179 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7180 if (bb->flags & BB_EXECUTABLE)
7181 nex++;
7182 statistics_histogram_event (cfun, "RPO block visited times",
7183 rpo_state[i].visited);
7184 if (rpo_state[i].visited > max_visited)
7185 max_visited = rpo_state[i].visited;
7187 unsigned nvalues = 0, navail = 0;
7188 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
7189 i != avail.m_rpo_avail.end (); ++i)
7191 nvalues++;
7192 navail += (*i).second.length ();
7194 statistics_counter_event (cfun, "RPO blocks", n);
7195 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7196 statistics_counter_event (cfun, "RPO blocks executable", nex);
7197 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7198 statistics_histogram_event (cfun, "RPO num values", nvalues);
7199 statistics_histogram_event (cfun, "RPO num avail", navail);
7200 statistics_histogram_event (cfun, "RPO num lattice",
7201 vn_ssa_aux_hash->elements ());
7202 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7204 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7205 " blocks in total discovering %d executable blocks iterating "
7206 "%d.%d times, a block was visited max. %u times\n",
7207 n, nblk, nex,
7208 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7209 max_visited);
7210 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7211 "and %" PRIu64 " lattice elements\n",
7212 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7215 if (eliminate)
7217 /* When !iterate we already performed elimination during the RPO
7218 walk. */
7219 if (iterate)
7221 /* Elimination for region-based VN needs to be done within the
7222 RPO walk. */
7223 gcc_assert (! do_region);
7224 /* Note we can't use avail.walk here because that gets confused
7225 by the existing availability and it will be less efficient
7226 as well. */
7227 todo |= eliminate_with_rpo_vn (NULL);
7229 else
7230 todo |= avail.eliminate_cleanup (do_region);
7233 vn_valueize = NULL;
7234 rpo_avail = NULL;
7236 XDELETEVEC (bb_to_rpo);
7237 XDELETEVEC (rpo);
7238 XDELETEVEC (rpo_state);
7240 return todo;
7243 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7244 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7245 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7246 are not considered. */
7248 unsigned
7249 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7251 default_vn_walk_kind = VN_WALKREWRITE;
7252 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7253 free_rpo_vn ();
7254 return todo;
7258 namespace {
7260 const pass_data pass_data_fre =
7262 GIMPLE_PASS, /* type */
7263 "fre", /* name */
7264 OPTGROUP_NONE, /* optinfo_flags */
7265 TV_TREE_FRE, /* tv_id */
7266 ( PROP_cfg | PROP_ssa ), /* properties_required */
7267 0, /* properties_provided */
7268 0, /* properties_destroyed */
7269 0, /* todo_flags_start */
7270 0, /* todo_flags_finish */
7273 class pass_fre : public gimple_opt_pass
7275 public:
7276 pass_fre (gcc::context *ctxt)
7277 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7280 /* opt_pass methods: */
7281 opt_pass * clone () { return new pass_fre (m_ctxt); }
7282 void set_pass_param (unsigned int n, bool param)
7284 gcc_assert (n == 0);
7285 may_iterate = param;
7287 virtual bool gate (function *)
7289 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
7291 virtual unsigned int execute (function *);
7293 private:
7294 bool may_iterate;
7295 }; // class pass_fre
7297 unsigned int
7298 pass_fre::execute (function *fun)
7300 unsigned todo = 0;
7302 /* At -O[1g] use the cheap non-iterating mode. */
7303 bool iterate_p = may_iterate && (optimize > 1);
7304 calculate_dominance_info (CDI_DOMINATORS);
7305 if (iterate_p)
7306 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
7308 default_vn_walk_kind = VN_WALKREWRITE;
7309 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
7310 free_rpo_vn ();
7312 if (iterate_p)
7313 loop_optimizer_finalize ();
7315 return todo;
7318 } // anon namespace
7320 gimple_opt_pass *
7321 make_pass_fre (gcc::context *ctxt)
7323 return new pass_fre (ctxt);
7326 #undef BB_EXECUTABLE