* varasm.c (output_constructor_regular_field): Check TYPE_SIZE_UNIT of
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob6fec6243a59e2009cf6e99b50528dd67171374d9
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
91 operands).
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
95 some nice properties.
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
113 identities.
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
118 equivalent.
119 TODO:
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
127 structure copies.
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree *last_vuse_ptr;
134 static vn_lookup_kind vn_walk_kind;
135 static vn_lookup_kind default_vn_walk_kind;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
178 /* Return the computed hashcode for phi operation P1. */
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 return vp1->hashcode;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
204 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207 return (vro1->opcode == vro2->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1->type == vro2->type
210 || (vro1->type && vro2->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 TYPE_MAIN_VARIANT (vro2->type))))
213 && expressions_equal_p (vro1->op0, vro2->op0)
214 && expressions_equal_p (vro1->op1, vro2->op1)
215 && expressions_equal_p (vro1->op2, vro2->op2));
218 /* Free a reference operation structure VP. */
220 static inline void
221 free_reference (vn_reference_s *vr)
223 vr->operands.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 static inline hashval_t hash (const vn_reference_s *);
232 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return v == c || vn_reference_eq (v, c);
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type *nary;
258 vn_phi_table_type *phis;
259 vn_reference_table_type *references;
260 } *vn_tables_t;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 static inline hashval_t hash (const vn_constant_s *);
268 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
271 /* Hash table hash function for vn_constant_t. */
273 inline hashval_t
274 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 return vc1->hashcode;
279 /* Hash table equality function for vn_constant_t. */
281 inline bool
282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 if (vc1->hashcode != vc2->hashcode)
285 return false;
287 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
291 static bitmap constant_value_ids;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack;
299 static vn_reference_t last_inserted_ref;
300 static vn_phi_t last_inserted_phi;
301 static vn_nary_op_t last_inserted_nary;
303 /* Valid hashtables storing information we have proven to be
304 correct. */
305 static vn_tables_t valid_info;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
309 just return it. */
310 tree (*vn_valueize) (tree);
313 /* This represents the top of the VN lattice, which is the universal
314 value. */
316 tree VN_TOP;
318 /* Unique counter for our value ids. */
320 static unsigned int next_value_id;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 typedef vn_ssa_aux_t value_type;
330 typedef tree compare_type;
331 static inline hashval_t hash (const value_type &);
332 static inline bool equal (const value_type &, const compare_type &);
333 static inline void mark_deleted (value_type &) {}
334 static inline void mark_empty (value_type &e) { e = NULL; }
335 static inline bool is_deleted (value_type &) { return false; }
336 static inline bool is_empty (value_type &e) { return e == NULL; }
339 hashval_t
340 vn_ssa_aux_hasher::hash (const value_type &entry)
342 return SSA_NAME_VERSION (entry->name);
345 bool
346 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 return name == entry->name;
351 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
352 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
353 static struct obstack vn_ssa_aux_obstack;
355 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
356 static unsigned int vn_nary_length_from_stmt (gimple *);
357 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
358 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
359 vn_nary_op_table_type *, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
362 enum tree_code, tree, tree *);
363 static tree vn_lookup_simplify_result (gimple_match_op *);
365 /* Return whether there is value numbering information for a given SSA name. */
367 bool
368 has_VN_INFO (tree name)
370 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
373 vn_ssa_aux_t
374 VN_INFO (tree name)
376 vn_ssa_aux_t *res
377 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
378 INSERT);
379 if (*res != NULL)
380 return *res;
382 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
383 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
384 newinfo->name = name;
385 newinfo->valnum = VN_TOP;
386 /* We are using the visited flag to handle uses with defs not within the
387 region being value-numbered. */
388 newinfo->visited = false;
390 /* Given we create the VN_INFOs on-demand now we have to do initialization
391 different than VN_TOP here. */
392 if (SSA_NAME_IS_DEFAULT_DEF (name))
393 switch (TREE_CODE (SSA_NAME_VAR (name)))
395 case VAR_DECL:
396 /* All undefined vars are VARYING. */
397 newinfo->valnum = name;
398 newinfo->visited = true;
399 break;
401 case PARM_DECL:
402 /* Parameters are VARYING but we can record a condition
403 if we know it is a non-NULL pointer. */
404 newinfo->visited = true;
405 newinfo->valnum = name;
406 if (POINTER_TYPE_P (TREE_TYPE (name))
407 && nonnull_arg_p (SSA_NAME_VAR (name)))
409 tree ops[2];
410 ops[0] = name;
411 ops[1] = build_int_cst (TREE_TYPE (name), 0);
412 vn_nary_op_t nary;
413 /* Allocate from non-unwinding stack. */
414 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
415 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
416 boolean_type_node, ops);
417 nary->predicated_values = 0;
418 nary->u.result = boolean_true_node;
419 vn_nary_op_insert_into (nary, valid_info->nary, true);
420 gcc_assert (nary->unwind_to == NULL);
421 /* Also do not link it into the undo chain. */
422 last_inserted_nary = nary->next;
423 nary->next = (vn_nary_op_t)(void *)-1;
424 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
425 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
426 boolean_type_node, ops);
427 nary->predicated_values = 0;
428 nary->u.result = boolean_false_node;
429 vn_nary_op_insert_into (nary, valid_info->nary, true);
430 gcc_assert (nary->unwind_to == NULL);
431 last_inserted_nary = nary->next;
432 nary->next = (vn_nary_op_t)(void *)-1;
433 if (dump_file && (dump_flags & TDF_DETAILS))
435 fprintf (dump_file, "Recording ");
436 print_generic_expr (dump_file, name, TDF_SLIM);
437 fprintf (dump_file, " != 0\n");
440 break;
442 case RESULT_DECL:
443 /* If the result is passed by invisible reference the default
444 def is initialized, otherwise it's uninitialized. Still
445 undefined is varying. */
446 newinfo->visited = true;
447 newinfo->valnum = name;
448 break;
450 default:
451 gcc_unreachable ();
453 return newinfo;
456 /* Return the SSA value of X. */
458 inline tree
459 SSA_VAL (tree x, bool *visited = NULL)
461 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
462 if (visited)
463 *visited = tem && tem->visited;
464 return tem && tem->visited ? tem->valnum : x;
467 /* Return whether X was visited. */
469 inline bool
470 SSA_VISITED (tree x)
472 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
473 return tem && tem->visited;
476 /* Return the SSA value of the VUSE x, supporting released VDEFs
477 during elimination which will value-number the VDEF to the
478 associated VUSE (but not substitute in the whole lattice). */
480 static inline tree
481 vuse_ssa_val (tree x)
483 if (!x)
484 return NULL_TREE;
488 x = SSA_VAL (x);
489 gcc_assert (x != VN_TOP);
491 while (SSA_NAME_IN_FREE_LIST (x));
493 return x;
496 /* Similar to the above but used as callback for walk_non_aliases_vuses
497 and thus should stop at unvisited VUSE to not walk across region
498 boundaries. */
500 static tree
501 vuse_valueize (tree vuse)
505 bool visited;
506 vuse = SSA_VAL (vuse, &visited);
507 if (!visited)
508 return NULL_TREE;
509 gcc_assert (vuse != VN_TOP);
511 while (SSA_NAME_IN_FREE_LIST (vuse));
512 return vuse;
516 /* Return the vn_kind the expression computed by the stmt should be
517 associated with. */
519 enum vn_kind
520 vn_get_stmt_kind (gimple *stmt)
522 switch (gimple_code (stmt))
524 case GIMPLE_CALL:
525 return VN_REFERENCE;
526 case GIMPLE_PHI:
527 return VN_PHI;
528 case GIMPLE_ASSIGN:
530 enum tree_code code = gimple_assign_rhs_code (stmt);
531 tree rhs1 = gimple_assign_rhs1 (stmt);
532 switch (get_gimple_rhs_class (code))
534 case GIMPLE_UNARY_RHS:
535 case GIMPLE_BINARY_RHS:
536 case GIMPLE_TERNARY_RHS:
537 return VN_NARY;
538 case GIMPLE_SINGLE_RHS:
539 switch (TREE_CODE_CLASS (code))
541 case tcc_reference:
542 /* VOP-less references can go through unary case. */
543 if ((code == REALPART_EXPR
544 || code == IMAGPART_EXPR
545 || code == VIEW_CONVERT_EXPR
546 || code == BIT_FIELD_REF)
547 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
548 return VN_NARY;
550 /* Fallthrough. */
551 case tcc_declaration:
552 return VN_REFERENCE;
554 case tcc_constant:
555 return VN_CONSTANT;
557 default:
558 if (code == ADDR_EXPR)
559 return (is_gimple_min_invariant (rhs1)
560 ? VN_CONSTANT : VN_REFERENCE);
561 else if (code == CONSTRUCTOR)
562 return VN_NARY;
563 return VN_NONE;
565 default:
566 return VN_NONE;
569 default:
570 return VN_NONE;
574 /* Lookup a value id for CONSTANT and return it. If it does not
575 exist returns 0. */
577 unsigned int
578 get_constant_value_id (tree constant)
580 vn_constant_s **slot;
581 struct vn_constant_s vc;
583 vc.hashcode = vn_hash_constant_with_type (constant);
584 vc.constant = constant;
585 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
586 if (slot)
587 return (*slot)->value_id;
588 return 0;
591 /* Lookup a value id for CONSTANT, and if it does not exist, create a
592 new one and return it. If it does exist, return it. */
594 unsigned int
595 get_or_alloc_constant_value_id (tree constant)
597 vn_constant_s **slot;
598 struct vn_constant_s vc;
599 vn_constant_t vcp;
601 /* If the hashtable isn't initialized we're not running from PRE and thus
602 do not need value-ids. */
603 if (!constant_to_value_id)
604 return 0;
606 vc.hashcode = vn_hash_constant_with_type (constant);
607 vc.constant = constant;
608 slot = constant_to_value_id->find_slot (&vc, INSERT);
609 if (*slot)
610 return (*slot)->value_id;
612 vcp = XNEW (struct vn_constant_s);
613 vcp->hashcode = vc.hashcode;
614 vcp->constant = constant;
615 vcp->value_id = get_next_value_id ();
616 *slot = vcp;
617 bitmap_set_bit (constant_value_ids, vcp->value_id);
618 return vcp->value_id;
621 /* Return true if V is a value id for a constant. */
623 bool
624 value_id_constant_p (unsigned int v)
626 return bitmap_bit_p (constant_value_ids, v);
629 /* Compute the hash for a reference operand VRO1. */
631 static void
632 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
634 hstate.add_int (vro1->opcode);
635 if (vro1->op0)
636 inchash::add_expr (vro1->op0, hstate);
637 if (vro1->op1)
638 inchash::add_expr (vro1->op1, hstate);
639 if (vro1->op2)
640 inchash::add_expr (vro1->op2, hstate);
643 /* Compute a hash for the reference operation VR1 and return it. */
645 static hashval_t
646 vn_reference_compute_hash (const vn_reference_t vr1)
648 inchash::hash hstate;
649 hashval_t result;
650 int i;
651 vn_reference_op_t vro;
652 poly_int64 off = -1;
653 bool deref = false;
655 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
657 if (vro->opcode == MEM_REF)
658 deref = true;
659 else if (vro->opcode != ADDR_EXPR)
660 deref = false;
661 if (maybe_ne (vro->off, -1))
663 if (known_eq (off, -1))
664 off = 0;
665 off += vro->off;
667 else
669 if (maybe_ne (off, -1)
670 && maybe_ne (off, 0))
671 hstate.add_poly_int (off);
672 off = -1;
673 if (deref
674 && vro->opcode == ADDR_EXPR)
676 if (vro->op0)
678 tree op = TREE_OPERAND (vro->op0, 0);
679 hstate.add_int (TREE_CODE (op));
680 inchash::add_expr (op, hstate);
683 else
684 vn_reference_op_compute_hash (vro, hstate);
687 result = hstate.end ();
688 /* ??? We would ICE later if we hash instead of adding that in. */
689 if (vr1->vuse)
690 result += SSA_NAME_VERSION (vr1->vuse);
692 return result;
695 /* Return true if reference operations VR1 and VR2 are equivalent. This
696 means they have the same set of operands and vuses. */
698 bool
699 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
701 unsigned i, j;
703 /* Early out if this is not a hash collision. */
704 if (vr1->hashcode != vr2->hashcode)
705 return false;
707 /* The VOP needs to be the same. */
708 if (vr1->vuse != vr2->vuse)
709 return false;
711 /* If the operands are the same we are done. */
712 if (vr1->operands == vr2->operands)
713 return true;
715 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
716 return false;
718 if (INTEGRAL_TYPE_P (vr1->type)
719 && INTEGRAL_TYPE_P (vr2->type))
721 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
722 return false;
724 else if (INTEGRAL_TYPE_P (vr1->type)
725 && (TYPE_PRECISION (vr1->type)
726 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
727 return false;
728 else if (INTEGRAL_TYPE_P (vr2->type)
729 && (TYPE_PRECISION (vr2->type)
730 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
731 return false;
733 i = 0;
734 j = 0;
737 poly_int64 off1 = 0, off2 = 0;
738 vn_reference_op_t vro1, vro2;
739 vn_reference_op_s tem1, tem2;
740 bool deref1 = false, deref2 = false;
741 for (; vr1->operands.iterate (i, &vro1); i++)
743 if (vro1->opcode == MEM_REF)
744 deref1 = true;
745 /* Do not look through a storage order barrier. */
746 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
747 return false;
748 if (known_eq (vro1->off, -1))
749 break;
750 off1 += vro1->off;
752 for (; vr2->operands.iterate (j, &vro2); j++)
754 if (vro2->opcode == MEM_REF)
755 deref2 = true;
756 /* Do not look through a storage order barrier. */
757 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
758 return false;
759 if (known_eq (vro2->off, -1))
760 break;
761 off2 += vro2->off;
763 if (maybe_ne (off1, off2))
764 return false;
765 if (deref1 && vro1->opcode == ADDR_EXPR)
767 memset (&tem1, 0, sizeof (tem1));
768 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
769 tem1.type = TREE_TYPE (tem1.op0);
770 tem1.opcode = TREE_CODE (tem1.op0);
771 vro1 = &tem1;
772 deref1 = false;
774 if (deref2 && vro2->opcode == ADDR_EXPR)
776 memset (&tem2, 0, sizeof (tem2));
777 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
778 tem2.type = TREE_TYPE (tem2.op0);
779 tem2.opcode = TREE_CODE (tem2.op0);
780 vro2 = &tem2;
781 deref2 = false;
783 if (deref1 != deref2)
784 return false;
785 if (!vn_reference_op_eq (vro1, vro2))
786 return false;
787 ++j;
788 ++i;
790 while (vr1->operands.length () != i
791 || vr2->operands.length () != j);
793 return true;
796 /* Copy the operations present in load/store REF into RESULT, a vector of
797 vn_reference_op_s's. */
799 static void
800 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
802 if (TREE_CODE (ref) == TARGET_MEM_REF)
804 vn_reference_op_s temp;
806 result->reserve (3);
808 memset (&temp, 0, sizeof (temp));
809 temp.type = TREE_TYPE (ref);
810 temp.opcode = TREE_CODE (ref);
811 temp.op0 = TMR_INDEX (ref);
812 temp.op1 = TMR_STEP (ref);
813 temp.op2 = TMR_OFFSET (ref);
814 temp.off = -1;
815 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
816 temp.base = MR_DEPENDENCE_BASE (ref);
817 result->quick_push (temp);
819 memset (&temp, 0, sizeof (temp));
820 temp.type = NULL_TREE;
821 temp.opcode = ERROR_MARK;
822 temp.op0 = TMR_INDEX2 (ref);
823 temp.off = -1;
824 result->quick_push (temp);
826 memset (&temp, 0, sizeof (temp));
827 temp.type = NULL_TREE;
828 temp.opcode = TREE_CODE (TMR_BASE (ref));
829 temp.op0 = TMR_BASE (ref);
830 temp.off = -1;
831 result->quick_push (temp);
832 return;
835 /* For non-calls, store the information that makes up the address. */
836 tree orig = ref;
837 while (ref)
839 vn_reference_op_s temp;
841 memset (&temp, 0, sizeof (temp));
842 temp.type = TREE_TYPE (ref);
843 temp.opcode = TREE_CODE (ref);
844 temp.off = -1;
846 switch (temp.opcode)
848 case MODIFY_EXPR:
849 temp.op0 = TREE_OPERAND (ref, 1);
850 break;
851 case WITH_SIZE_EXPR:
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.off = 0;
854 break;
855 case MEM_REF:
856 /* The base address gets its own vn_reference_op_s structure. */
857 temp.op0 = TREE_OPERAND (ref, 1);
858 if (!mem_ref_offset (ref).to_shwi (&temp.off))
859 temp.off = -1;
860 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
861 temp.base = MR_DEPENDENCE_BASE (ref);
862 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
863 break;
864 case BIT_FIELD_REF:
865 /* Record bits, position and storage order. */
866 temp.op0 = TREE_OPERAND (ref, 1);
867 temp.op1 = TREE_OPERAND (ref, 2);
868 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
869 temp.off = -1;
870 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
871 break;
872 case COMPONENT_REF:
873 /* The field decl is enough to unambiguously specify the field,
874 a matching type is not necessary and a mismatching type
875 is always a spurious difference. */
876 temp.type = NULL_TREE;
877 temp.op0 = TREE_OPERAND (ref, 1);
878 temp.op1 = TREE_OPERAND (ref, 2);
880 tree this_offset = component_ref_field_offset (ref);
881 if (this_offset
882 && poly_int_tree_p (this_offset))
884 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
885 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
887 poly_offset_int off
888 = (wi::to_poly_offset (this_offset)
889 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
890 /* Probibit value-numbering zero offset components
891 of addresses the same before the pass folding
892 __builtin_object_size had a chance to run
893 (checking cfun->after_inlining does the
894 trick here). */
895 if (TREE_CODE (orig) != ADDR_EXPR
896 || maybe_ne (off, 0)
897 || cfun->after_inlining)
898 off.to_shwi (&temp.off);
902 break;
903 case ARRAY_RANGE_REF:
904 case ARRAY_REF:
906 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
907 /* Record index as operand. */
908 temp.op0 = TREE_OPERAND (ref, 1);
909 /* Always record lower bounds and element size. */
910 temp.op1 = array_ref_low_bound (ref);
911 /* But record element size in units of the type alignment. */
912 temp.op2 = TREE_OPERAND (ref, 3);
913 temp.align = eltype->type_common.align;
914 if (! temp.op2)
915 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
916 size_int (TYPE_ALIGN_UNIT (eltype)));
917 if (poly_int_tree_p (temp.op0)
918 && poly_int_tree_p (temp.op1)
919 && TREE_CODE (temp.op2) == INTEGER_CST)
921 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
922 - wi::to_poly_offset (temp.op1))
923 * wi::to_offset (temp.op2)
924 * vn_ref_op_align_unit (&temp));
925 off.to_shwi (&temp.off);
928 break;
929 case VAR_DECL:
930 if (DECL_HARD_REGISTER (ref))
932 temp.op0 = ref;
933 break;
935 /* Fallthru. */
936 case PARM_DECL:
937 case CONST_DECL:
938 case RESULT_DECL:
939 /* Canonicalize decls to MEM[&decl] which is what we end up with
940 when valueizing MEM[ptr] with ptr = &decl. */
941 temp.opcode = MEM_REF;
942 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
943 temp.off = 0;
944 result->safe_push (temp);
945 temp.opcode = ADDR_EXPR;
946 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
947 temp.type = TREE_TYPE (temp.op0);
948 temp.off = -1;
949 break;
950 case STRING_CST:
951 case INTEGER_CST:
952 case COMPLEX_CST:
953 case VECTOR_CST:
954 case REAL_CST:
955 case FIXED_CST:
956 case CONSTRUCTOR:
957 case SSA_NAME:
958 temp.op0 = ref;
959 break;
960 case ADDR_EXPR:
961 if (is_gimple_min_invariant (ref))
963 temp.op0 = ref;
964 break;
966 break;
967 /* These are only interesting for their operands, their
968 existence, and their type. They will never be the last
969 ref in the chain of references (IE they require an
970 operand), so we don't have to put anything
971 for op* as it will be handled by the iteration */
972 case REALPART_EXPR:
973 temp.off = 0;
974 break;
975 case VIEW_CONVERT_EXPR:
976 temp.off = 0;
977 temp.reverse = storage_order_barrier_p (ref);
978 break;
979 case IMAGPART_EXPR:
980 /* This is only interesting for its constant offset. */
981 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
982 break;
983 default:
984 gcc_unreachable ();
986 result->safe_push (temp);
988 if (REFERENCE_CLASS_P (ref)
989 || TREE_CODE (ref) == MODIFY_EXPR
990 || TREE_CODE (ref) == WITH_SIZE_EXPR
991 || (TREE_CODE (ref) == ADDR_EXPR
992 && !is_gimple_min_invariant (ref)))
993 ref = TREE_OPERAND (ref, 0);
994 else
995 ref = NULL_TREE;
999 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1000 operands in *OPS, the reference alias set SET and the reference type TYPE.
1001 Return true if something useful was produced. */
1003 bool
1004 ao_ref_init_from_vn_reference (ao_ref *ref,
1005 alias_set_type set, tree type,
1006 vec<vn_reference_op_s> ops)
1008 vn_reference_op_t op;
1009 unsigned i;
1010 tree base = NULL_TREE;
1011 tree *op0_p = &base;
1012 poly_offset_int offset = 0;
1013 poly_offset_int max_size;
1014 poly_offset_int size = -1;
1015 tree size_tree = NULL_TREE;
1016 alias_set_type base_alias_set = -1;
1018 /* First get the final access size from just the outermost expression. */
1019 op = &ops[0];
1020 if (op->opcode == COMPONENT_REF)
1021 size_tree = DECL_SIZE (op->op0);
1022 else if (op->opcode == BIT_FIELD_REF)
1023 size_tree = op->op0;
1024 else
1026 machine_mode mode = TYPE_MODE (type);
1027 if (mode == BLKmode)
1028 size_tree = TYPE_SIZE (type);
1029 else
1030 size = GET_MODE_BITSIZE (mode);
1032 if (size_tree != NULL_TREE
1033 && poly_int_tree_p (size_tree))
1034 size = wi::to_poly_offset (size_tree);
1036 /* Initially, maxsize is the same as the accessed element size.
1037 In the following it will only grow (or become -1). */
1038 max_size = size;
1040 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1041 and find the ultimate containing object. */
1042 FOR_EACH_VEC_ELT (ops, i, op)
1044 switch (op->opcode)
1046 /* These may be in the reference ops, but we cannot do anything
1047 sensible with them here. */
1048 case ADDR_EXPR:
1049 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1050 if (base != NULL_TREE
1051 && TREE_CODE (base) == MEM_REF
1052 && op->op0
1053 && DECL_P (TREE_OPERAND (op->op0, 0)))
1055 vn_reference_op_t pop = &ops[i-1];
1056 base = TREE_OPERAND (op->op0, 0);
1057 if (known_eq (pop->off, -1))
1059 max_size = -1;
1060 offset = 0;
1062 else
1063 offset += pop->off * BITS_PER_UNIT;
1064 op0_p = NULL;
1065 break;
1067 /* Fallthru. */
1068 case CALL_EXPR:
1069 return false;
1071 /* Record the base objects. */
1072 case MEM_REF:
1073 base_alias_set = get_deref_alias_set (op->op0);
1074 *op0_p = build2 (MEM_REF, op->type,
1075 NULL_TREE, op->op0);
1076 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1077 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1078 op0_p = &TREE_OPERAND (*op0_p, 0);
1079 break;
1081 case VAR_DECL:
1082 case PARM_DECL:
1083 case RESULT_DECL:
1084 case SSA_NAME:
1085 *op0_p = op->op0;
1086 op0_p = NULL;
1087 break;
1089 /* And now the usual component-reference style ops. */
1090 case BIT_FIELD_REF:
1091 offset += wi::to_poly_offset (op->op1);
1092 break;
1094 case COMPONENT_REF:
1096 tree field = op->op0;
1097 /* We do not have a complete COMPONENT_REF tree here so we
1098 cannot use component_ref_field_offset. Do the interesting
1099 parts manually. */
1100 tree this_offset = DECL_FIELD_OFFSET (field);
1102 if (op->op1 || !poly_int_tree_p (this_offset))
1103 max_size = -1;
1104 else
1106 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1107 << LOG2_BITS_PER_UNIT);
1108 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1109 offset += woffset;
1111 break;
1114 case ARRAY_RANGE_REF:
1115 case ARRAY_REF:
1116 /* We recorded the lower bound and the element size. */
1117 if (!poly_int_tree_p (op->op0)
1118 || !poly_int_tree_p (op->op1)
1119 || TREE_CODE (op->op2) != INTEGER_CST)
1120 max_size = -1;
1121 else
1123 poly_offset_int woffset
1124 = wi::sext (wi::to_poly_offset (op->op0)
1125 - wi::to_poly_offset (op->op1),
1126 TYPE_PRECISION (TREE_TYPE (op->op0)));
1127 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1128 woffset <<= LOG2_BITS_PER_UNIT;
1129 offset += woffset;
1131 break;
1133 case REALPART_EXPR:
1134 break;
1136 case IMAGPART_EXPR:
1137 offset += size;
1138 break;
1140 case VIEW_CONVERT_EXPR:
1141 break;
1143 case STRING_CST:
1144 case INTEGER_CST:
1145 case COMPLEX_CST:
1146 case VECTOR_CST:
1147 case REAL_CST:
1148 case CONSTRUCTOR:
1149 case CONST_DECL:
1150 return false;
1152 default:
1153 return false;
1157 if (base == NULL_TREE)
1158 return false;
1160 ref->ref = NULL_TREE;
1161 ref->base = base;
1162 ref->ref_alias_set = set;
1163 if (base_alias_set != -1)
1164 ref->base_alias_set = base_alias_set;
1165 else
1166 ref->base_alias_set = get_alias_set (base);
1167 /* We discount volatiles from value-numbering elsewhere. */
1168 ref->volatile_p = false;
1170 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1172 ref->offset = 0;
1173 ref->size = -1;
1174 ref->max_size = -1;
1175 return true;
1178 if (!offset.to_shwi (&ref->offset))
1180 ref->offset = 0;
1181 ref->max_size = -1;
1182 return true;
1185 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1186 ref->max_size = -1;
1188 return true;
1191 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1192 vn_reference_op_s's. */
1194 static void
1195 copy_reference_ops_from_call (gcall *call,
1196 vec<vn_reference_op_s> *result)
1198 vn_reference_op_s temp;
1199 unsigned i;
1200 tree lhs = gimple_call_lhs (call);
1201 int lr;
1203 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1204 different. By adding the lhs here in the vector, we ensure that the
1205 hashcode is different, guaranteeing a different value number. */
1206 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1208 memset (&temp, 0, sizeof (temp));
1209 temp.opcode = MODIFY_EXPR;
1210 temp.type = TREE_TYPE (lhs);
1211 temp.op0 = lhs;
1212 temp.off = -1;
1213 result->safe_push (temp);
1216 /* Copy the type, opcode, function, static chain and EH region, if any. */
1217 memset (&temp, 0, sizeof (temp));
1218 temp.type = gimple_call_return_type (call);
1219 temp.opcode = CALL_EXPR;
1220 temp.op0 = gimple_call_fn (call);
1221 temp.op1 = gimple_call_chain (call);
1222 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1223 temp.op2 = size_int (lr);
1224 temp.off = -1;
1225 result->safe_push (temp);
1227 /* Copy the call arguments. As they can be references as well,
1228 just chain them together. */
1229 for (i = 0; i < gimple_call_num_args (call); ++i)
1231 tree callarg = gimple_call_arg (call, i);
1232 copy_reference_ops_from_ref (callarg, result);
1236 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1237 *I_P to point to the last element of the replacement. */
1238 static bool
1239 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1240 unsigned int *i_p)
1242 unsigned int i = *i_p;
1243 vn_reference_op_t op = &(*ops)[i];
1244 vn_reference_op_t mem_op = &(*ops)[i - 1];
1245 tree addr_base;
1246 poly_int64 addr_offset = 0;
1248 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1249 from .foo.bar to the preceding MEM_REF offset and replace the
1250 address with &OBJ. */
1251 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1252 &addr_offset);
1253 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1254 if (addr_base != TREE_OPERAND (op->op0, 0))
1256 poly_offset_int off
1257 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1258 SIGNED)
1259 + addr_offset);
1260 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1261 op->op0 = build_fold_addr_expr (addr_base);
1262 if (tree_fits_shwi_p (mem_op->op0))
1263 mem_op->off = tree_to_shwi (mem_op->op0);
1264 else
1265 mem_op->off = -1;
1266 return true;
1268 return false;
1271 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1272 *I_P to point to the last element of the replacement. */
1273 static bool
1274 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1275 unsigned int *i_p)
1277 unsigned int i = *i_p;
1278 vn_reference_op_t op = &(*ops)[i];
1279 vn_reference_op_t mem_op = &(*ops)[i - 1];
1280 gimple *def_stmt;
1281 enum tree_code code;
1282 poly_offset_int off;
1284 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1285 if (!is_gimple_assign (def_stmt))
1286 return false;
1288 code = gimple_assign_rhs_code (def_stmt);
1289 if (code != ADDR_EXPR
1290 && code != POINTER_PLUS_EXPR)
1291 return false;
1293 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1295 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1296 from .foo.bar to the preceding MEM_REF offset and replace the
1297 address with &OBJ. */
1298 if (code == ADDR_EXPR)
1300 tree addr, addr_base;
1301 poly_int64 addr_offset;
1303 addr = gimple_assign_rhs1 (def_stmt);
1304 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1305 &addr_offset);
1306 /* If that didn't work because the address isn't invariant propagate
1307 the reference tree from the address operation in case the current
1308 dereference isn't offsetted. */
1309 if (!addr_base
1310 && *i_p == ops->length () - 1
1311 && known_eq (off, 0)
1312 /* This makes us disable this transform for PRE where the
1313 reference ops might be also used for code insertion which
1314 is invalid. */
1315 && default_vn_walk_kind == VN_WALKREWRITE)
1317 auto_vec<vn_reference_op_s, 32> tem;
1318 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1319 /* Make sure to preserve TBAA info. The only objects not
1320 wrapped in MEM_REFs that can have their address taken are
1321 STRING_CSTs. */
1322 if (tem.length () >= 2
1323 && tem[tem.length () - 2].opcode == MEM_REF)
1325 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1326 new_mem_op->op0
1327 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1328 wi::to_poly_wide (new_mem_op->op0));
1330 else
1331 gcc_assert (tem.last ().opcode == STRING_CST);
1332 ops->pop ();
1333 ops->pop ();
1334 ops->safe_splice (tem);
1335 --*i_p;
1336 return true;
1338 if (!addr_base
1339 || TREE_CODE (addr_base) != MEM_REF
1340 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1342 return false;
1344 off += addr_offset;
1345 off += mem_ref_offset (addr_base);
1346 op->op0 = TREE_OPERAND (addr_base, 0);
1348 else
1350 tree ptr, ptroff;
1351 ptr = gimple_assign_rhs1 (def_stmt);
1352 ptroff = gimple_assign_rhs2 (def_stmt);
1353 if (TREE_CODE (ptr) != SSA_NAME
1354 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1355 /* Make sure to not endlessly recurse.
1356 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1357 happen when we value-number a PHI to its backedge value. */
1358 || SSA_VAL (ptr) == op->op0
1359 || !poly_int_tree_p (ptroff))
1360 return false;
1362 off += wi::to_poly_offset (ptroff);
1363 op->op0 = ptr;
1366 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1367 if (tree_fits_shwi_p (mem_op->op0))
1368 mem_op->off = tree_to_shwi (mem_op->op0);
1369 else
1370 mem_op->off = -1;
1371 /* ??? Can end up with endless recursion here!?
1372 gcc.c-torture/execute/strcmp-1.c */
1373 if (TREE_CODE (op->op0) == SSA_NAME)
1374 op->op0 = SSA_VAL (op->op0);
1375 if (TREE_CODE (op->op0) != SSA_NAME)
1376 op->opcode = TREE_CODE (op->op0);
1378 /* And recurse. */
1379 if (TREE_CODE (op->op0) == SSA_NAME)
1380 vn_reference_maybe_forwprop_address (ops, i_p);
1381 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1382 vn_reference_fold_indirect (ops, i_p);
1383 return true;
1386 /* Optimize the reference REF to a constant if possible or return
1387 NULL_TREE if not. */
1389 tree
1390 fully_constant_vn_reference_p (vn_reference_t ref)
1392 vec<vn_reference_op_s> operands = ref->operands;
1393 vn_reference_op_t op;
1395 /* Try to simplify the translated expression if it is
1396 a call to a builtin function with at most two arguments. */
1397 op = &operands[0];
1398 if (op->opcode == CALL_EXPR
1399 && TREE_CODE (op->op0) == ADDR_EXPR
1400 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1401 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1402 && operands.length () >= 2
1403 && operands.length () <= 3)
1405 vn_reference_op_t arg0, arg1 = NULL;
1406 bool anyconst = false;
1407 arg0 = &operands[1];
1408 if (operands.length () > 2)
1409 arg1 = &operands[2];
1410 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1411 || (arg0->opcode == ADDR_EXPR
1412 && is_gimple_min_invariant (arg0->op0)))
1413 anyconst = true;
1414 if (arg1
1415 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1416 || (arg1->opcode == ADDR_EXPR
1417 && is_gimple_min_invariant (arg1->op0))))
1418 anyconst = true;
1419 if (anyconst)
1421 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1422 arg1 ? 2 : 1,
1423 arg0->op0,
1424 arg1 ? arg1->op0 : NULL);
1425 if (folded
1426 && TREE_CODE (folded) == NOP_EXPR)
1427 folded = TREE_OPERAND (folded, 0);
1428 if (folded
1429 && is_gimple_min_invariant (folded))
1430 return folded;
1434 /* Simplify reads from constants or constant initializers. */
1435 else if (BITS_PER_UNIT == 8
1436 && COMPLETE_TYPE_P (ref->type)
1437 && is_gimple_reg_type (ref->type))
1439 poly_int64 off = 0;
1440 HOST_WIDE_INT size;
1441 if (INTEGRAL_TYPE_P (ref->type))
1442 size = TYPE_PRECISION (ref->type);
1443 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1444 size = tree_to_shwi (TYPE_SIZE (ref->type));
1445 else
1446 return NULL_TREE;
1447 if (size % BITS_PER_UNIT != 0
1448 || size > MAX_BITSIZE_MODE_ANY_MODE)
1449 return NULL_TREE;
1450 size /= BITS_PER_UNIT;
1451 unsigned i;
1452 for (i = 0; i < operands.length (); ++i)
1454 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1456 ++i;
1457 break;
1459 if (known_eq (operands[i].off, -1))
1460 return NULL_TREE;
1461 off += operands[i].off;
1462 if (operands[i].opcode == MEM_REF)
1464 ++i;
1465 break;
1468 vn_reference_op_t base = &operands[--i];
1469 tree ctor = error_mark_node;
1470 tree decl = NULL_TREE;
1471 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1472 ctor = base->op0;
1473 else if (base->opcode == MEM_REF
1474 && base[1].opcode == ADDR_EXPR
1475 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1476 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1477 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1479 decl = TREE_OPERAND (base[1].op0, 0);
1480 if (TREE_CODE (decl) == STRING_CST)
1481 ctor = decl;
1482 else
1483 ctor = ctor_for_folding (decl);
1485 if (ctor == NULL_TREE)
1486 return build_zero_cst (ref->type);
1487 else if (ctor != error_mark_node)
1489 HOST_WIDE_INT const_off;
1490 if (decl)
1492 tree res = fold_ctor_reference (ref->type, ctor,
1493 off * BITS_PER_UNIT,
1494 size * BITS_PER_UNIT, decl);
1495 if (res)
1497 STRIP_USELESS_TYPE_CONVERSION (res);
1498 if (is_gimple_min_invariant (res))
1499 return res;
1502 else if (off.is_constant (&const_off))
1504 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1505 int len = native_encode_expr (ctor, buf, size, const_off);
1506 if (len > 0)
1507 return native_interpret_expr (ref->type, buf, len);
1512 return NULL_TREE;
1515 /* Return true if OPS contain a storage order barrier. */
1517 static bool
1518 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1520 vn_reference_op_t op;
1521 unsigned i;
1523 FOR_EACH_VEC_ELT (ops, i, op)
1524 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1525 return true;
1527 return false;
1530 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1531 structures into their value numbers. This is done in-place, and
1532 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1533 whether any operands were valueized. */
1535 static vec<vn_reference_op_s>
1536 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1537 bool with_avail = false)
1539 vn_reference_op_t vro;
1540 unsigned int i;
1542 *valueized_anything = false;
1544 FOR_EACH_VEC_ELT (orig, i, vro)
1546 if (vro->opcode == SSA_NAME
1547 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1549 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1550 if (tem != vro->op0)
1552 *valueized_anything = true;
1553 vro->op0 = tem;
1555 /* If it transforms from an SSA_NAME to a constant, update
1556 the opcode. */
1557 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1558 vro->opcode = TREE_CODE (vro->op0);
1560 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1562 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1563 if (tem != vro->op1)
1565 *valueized_anything = true;
1566 vro->op1 = tem;
1569 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1571 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1572 if (tem != vro->op2)
1574 *valueized_anything = true;
1575 vro->op2 = tem;
1578 /* If it transforms from an SSA_NAME to an address, fold with
1579 a preceding indirect reference. */
1580 if (i > 0
1581 && vro->op0
1582 && TREE_CODE (vro->op0) == ADDR_EXPR
1583 && orig[i - 1].opcode == MEM_REF)
1585 if (vn_reference_fold_indirect (&orig, &i))
1586 *valueized_anything = true;
1588 else if (i > 0
1589 && vro->opcode == SSA_NAME
1590 && orig[i - 1].opcode == MEM_REF)
1592 if (vn_reference_maybe_forwprop_address (&orig, &i))
1593 *valueized_anything = true;
1595 /* If it transforms a non-constant ARRAY_REF into a constant
1596 one, adjust the constant offset. */
1597 else if (vro->opcode == ARRAY_REF
1598 && known_eq (vro->off, -1)
1599 && poly_int_tree_p (vro->op0)
1600 && poly_int_tree_p (vro->op1)
1601 && TREE_CODE (vro->op2) == INTEGER_CST)
1603 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1604 - wi::to_poly_offset (vro->op1))
1605 * wi::to_offset (vro->op2)
1606 * vn_ref_op_align_unit (vro));
1607 off.to_shwi (&vro->off);
1611 return orig;
1614 static vec<vn_reference_op_s>
1615 valueize_refs (vec<vn_reference_op_s> orig)
1617 bool tem;
1618 return valueize_refs_1 (orig, &tem);
1621 static vec<vn_reference_op_s> shared_lookup_references;
1623 /* Create a vector of vn_reference_op_s structures from REF, a
1624 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1625 this function. *VALUEIZED_ANYTHING will specify whether any
1626 operands were valueized. */
1628 static vec<vn_reference_op_s>
1629 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1631 if (!ref)
1632 return vNULL;
1633 shared_lookup_references.truncate (0);
1634 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1635 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1636 valueized_anything);
1637 return shared_lookup_references;
1640 /* Create a vector of vn_reference_op_s structures from CALL, a
1641 call statement. The vector is shared among all callers of
1642 this function. */
1644 static vec<vn_reference_op_s>
1645 valueize_shared_reference_ops_from_call (gcall *call)
1647 if (!call)
1648 return vNULL;
1649 shared_lookup_references.truncate (0);
1650 copy_reference_ops_from_call (call, &shared_lookup_references);
1651 shared_lookup_references = valueize_refs (shared_lookup_references);
1652 return shared_lookup_references;
1655 /* Lookup a SCCVN reference operation VR in the current hash table.
1656 Returns the resulting value number if it exists in the hash table,
1657 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1658 vn_reference_t stored in the hashtable if something is found. */
1660 static tree
1661 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1663 vn_reference_s **slot;
1664 hashval_t hash;
1666 hash = vr->hashcode;
1667 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1668 if (slot)
1670 if (vnresult)
1671 *vnresult = (vn_reference_t)*slot;
1672 return ((vn_reference_t)*slot)->result;
1675 return NULL_TREE;
1678 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1679 with the current VUSE and performs the expression lookup. */
1681 static void *
1682 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1683 unsigned int cnt, void *vr_)
1685 vn_reference_t vr = (vn_reference_t)vr_;
1686 vn_reference_s **slot;
1687 hashval_t hash;
1689 /* This bounds the stmt walks we perform on reference lookups
1690 to O(1) instead of O(N) where N is the number of dominating
1691 stores. */
1692 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1693 return (void *)-1;
1695 if (last_vuse_ptr)
1696 *last_vuse_ptr = vuse;
1698 /* Fixup vuse and hash. */
1699 if (vr->vuse)
1700 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1701 vr->vuse = vuse_ssa_val (vuse);
1702 if (vr->vuse)
1703 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1705 hash = vr->hashcode;
1706 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1707 if (slot)
1708 return *slot;
1710 return NULL;
1713 /* Lookup an existing or insert a new vn_reference entry into the
1714 value table for the VUSE, SET, TYPE, OPERANDS reference which
1715 has the value VALUE which is either a constant or an SSA name. */
1717 static vn_reference_t
1718 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1719 alias_set_type set,
1720 tree type,
1721 vec<vn_reference_op_s,
1722 va_heap> operands,
1723 tree value)
1725 vn_reference_s vr1;
1726 vn_reference_t result;
1727 unsigned value_id;
1728 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1729 vr1.operands = operands;
1730 vr1.type = type;
1731 vr1.set = set;
1732 vr1.hashcode = vn_reference_compute_hash (&vr1);
1733 if (vn_reference_lookup_1 (&vr1, &result))
1734 return result;
1735 if (TREE_CODE (value) == SSA_NAME)
1736 value_id = VN_INFO (value)->value_id;
1737 else
1738 value_id = get_or_alloc_constant_value_id (value);
1739 return vn_reference_insert_pieces (vuse, set, type,
1740 operands.copy (), value, value_id);
1743 /* Return a value-number for RCODE OPS... either by looking up an existing
1744 value-number for the simplified result or by inserting the operation if
1745 INSERT is true. */
1747 static tree
1748 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1750 tree result = NULL_TREE;
1751 /* We will be creating a value number for
1752 RCODE (OPS...).
1753 So first simplify and lookup this expression to see if it
1754 is already available. */
1755 mprts_hook = vn_lookup_simplify_result;
1756 bool res = false;
1757 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1759 case 1:
1760 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1761 break;
1762 case 2:
1763 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1764 break;
1765 case 3:
1766 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1767 break;
1769 mprts_hook = NULL;
1770 gimple *new_stmt = NULL;
1771 if (res
1772 && gimple_simplified_result_is_gimple_val (res_op))
1774 /* The expression is already available. */
1775 result = res_op->ops[0];
1776 /* Valueize it, simplification returns sth in AVAIL only. */
1777 if (TREE_CODE (result) == SSA_NAME)
1778 result = SSA_VAL (result);
1780 else
1782 tree val = vn_lookup_simplify_result (res_op);
1783 if (!val && insert)
1785 gimple_seq stmts = NULL;
1786 result = maybe_push_res_to_seq (res_op, &stmts);
1787 if (result)
1789 gcc_assert (gimple_seq_singleton_p (stmts));
1790 new_stmt = gimple_seq_first_stmt (stmts);
1793 else
1794 /* The expression is already available. */
1795 result = val;
1797 if (new_stmt)
1799 /* The expression is not yet available, value-number lhs to
1800 the new SSA_NAME we created. */
1801 /* Initialize value-number information properly. */
1802 vn_ssa_aux_t result_info = VN_INFO (result);
1803 result_info->valnum = result;
1804 result_info->value_id = get_next_value_id ();
1805 result_info->visited = 1;
1806 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1807 new_stmt);
1808 result_info->needs_insertion = true;
1809 /* ??? PRE phi-translation inserts NARYs without corresponding
1810 SSA name result. Re-use those but set their result according
1811 to the stmt we just built. */
1812 vn_nary_op_t nary = NULL;
1813 vn_nary_op_lookup_stmt (new_stmt, &nary);
1814 if (nary)
1816 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1817 nary->u.result = gimple_assign_lhs (new_stmt);
1819 /* As all "inserted" statements are singleton SCCs, insert
1820 to the valid table. This is strictly needed to
1821 avoid re-generating new value SSA_NAMEs for the same
1822 expression during SCC iteration over and over (the
1823 optimistic table gets cleared after each iteration).
1824 We do not need to insert into the optimistic table, as
1825 lookups there will fall back to the valid table. */
1826 else
1828 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1829 vn_nary_op_t vno1
1830 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1831 vno1->value_id = result_info->value_id;
1832 vno1->length = length;
1833 vno1->predicated_values = 0;
1834 vno1->u.result = result;
1835 init_vn_nary_op_from_stmt (vno1, new_stmt);
1836 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1837 /* Also do not link it into the undo chain. */
1838 last_inserted_nary = vno1->next;
1839 vno1->next = (vn_nary_op_t)(void *)-1;
1841 if (dump_file && (dump_flags & TDF_DETAILS))
1843 fprintf (dump_file, "Inserting name ");
1844 print_generic_expr (dump_file, result);
1845 fprintf (dump_file, " for expression ");
1846 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1847 fprintf (dump_file, "\n");
1850 return result;
1853 /* Return a value-number for RCODE OPS... either by looking up an existing
1854 value-number for the simplified result or by inserting the operation. */
1856 static tree
1857 vn_nary_build_or_lookup (gimple_match_op *res_op)
1859 return vn_nary_build_or_lookup_1 (res_op, true);
1862 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1863 its value if present. */
1865 tree
1866 vn_nary_simplify (vn_nary_op_t nary)
1868 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1869 return NULL_TREE;
1870 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1871 nary->type, nary->length);
1872 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1873 return vn_nary_build_or_lookup_1 (&op, false);
1876 basic_block vn_context_bb;
1878 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1879 from the statement defining VUSE and if not successful tries to
1880 translate *REFP and VR_ through an aggregate copy at the definition
1881 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1882 of *REF and *VR. If only disambiguation was performed then
1883 *DISAMBIGUATE_ONLY is set to true. */
1885 static void *
1886 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1887 bool *disambiguate_only)
1889 vn_reference_t vr = (vn_reference_t)vr_;
1890 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1891 tree base = ao_ref_base (ref);
1892 HOST_WIDE_INT offseti, maxsizei;
1893 static vec<vn_reference_op_s> lhs_ops;
1894 ao_ref lhs_ref;
1895 bool lhs_ref_ok = false;
1896 poly_int64 copy_size;
1898 /* First try to disambiguate after value-replacing in the definitions LHS. */
1899 if (is_gimple_assign (def_stmt))
1901 tree lhs = gimple_assign_lhs (def_stmt);
1902 bool valueized_anything = false;
1903 /* Avoid re-allocation overhead. */
1904 lhs_ops.truncate (0);
1905 basic_block saved_rpo_bb = vn_context_bb;
1906 vn_context_bb = gimple_bb (def_stmt);
1907 copy_reference_ops_from_ref (lhs, &lhs_ops);
1908 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1909 vn_context_bb = saved_rpo_bb;
1910 if (valueized_anything)
1912 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1913 get_alias_set (lhs),
1914 TREE_TYPE (lhs), lhs_ops);
1915 if (lhs_ref_ok
1916 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1918 *disambiguate_only = true;
1919 return NULL;
1922 else
1924 ao_ref_init (&lhs_ref, lhs);
1925 lhs_ref_ok = true;
1928 /* If we reach a clobbering statement try to skip it and see if
1929 we find a VN result with exactly the same value as the
1930 possible clobber. In this case we can ignore the clobber
1931 and return the found value.
1932 Note that we don't need to worry about partial overlapping
1933 accesses as we then can use TBAA to disambiguate against the
1934 clobbering statement when looking up a load (thus the
1935 VN_WALKREWRITE guard). */
1936 if (vn_walk_kind == VN_WALKREWRITE
1937 && is_gimple_reg_type (TREE_TYPE (lhs))
1938 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1940 tree *saved_last_vuse_ptr = last_vuse_ptr;
1941 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1942 last_vuse_ptr = NULL;
1943 tree saved_vuse = vr->vuse;
1944 hashval_t saved_hashcode = vr->hashcode;
1945 void *res = vn_reference_lookup_2 (ref,
1946 gimple_vuse (def_stmt), 0, vr);
1947 /* Need to restore vr->vuse and vr->hashcode. */
1948 vr->vuse = saved_vuse;
1949 vr->hashcode = saved_hashcode;
1950 last_vuse_ptr = saved_last_vuse_ptr;
1951 if (res && res != (void *)-1)
1953 vn_reference_t vnresult = (vn_reference_t) res;
1954 if (vnresult->result
1955 && operand_equal_p (vnresult->result,
1956 gimple_assign_rhs1 (def_stmt), 0))
1957 return res;
1961 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1962 && gimple_call_num_args (def_stmt) <= 4)
1964 /* For builtin calls valueize its arguments and call the
1965 alias oracle again. Valueization may improve points-to
1966 info of pointers and constify size and position arguments.
1967 Originally this was motivated by PR61034 which has
1968 conditional calls to free falsely clobbering ref because
1969 of imprecise points-to info of the argument. */
1970 tree oldargs[4];
1971 bool valueized_anything = false;
1972 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1974 oldargs[i] = gimple_call_arg (def_stmt, i);
1975 tree val = vn_valueize (oldargs[i]);
1976 if (val != oldargs[i])
1978 gimple_call_set_arg (def_stmt, i, val);
1979 valueized_anything = true;
1982 if (valueized_anything)
1984 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1985 ref);
1986 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1987 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1988 if (!res)
1990 *disambiguate_only = true;
1991 return NULL;
1996 if (*disambiguate_only)
1997 return (void *)-1;
1999 /* If we cannot constrain the size of the reference we cannot
2000 test if anything kills it. */
2001 if (!ref->max_size_known_p ())
2002 return (void *)-1;
2004 poly_int64 offset = ref->offset;
2005 poly_int64 maxsize = ref->max_size;
2007 /* We can't deduce anything useful from clobbers. */
2008 if (gimple_clobber_p (def_stmt))
2009 return (void *)-1;
2011 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2012 from that definition.
2013 1) Memset. */
2014 if (is_gimple_reg_type (vr->type)
2015 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2016 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2017 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2018 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2019 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2020 && offset.is_constant (&offseti)
2021 && offseti % BITS_PER_UNIT == 0))
2022 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2023 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2024 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2026 tree base2;
2027 poly_int64 offset2, size2, maxsize2;
2028 bool reverse;
2029 tree ref2 = gimple_call_arg (def_stmt, 0);
2030 if (TREE_CODE (ref2) == SSA_NAME)
2032 ref2 = SSA_VAL (ref2);
2033 if (TREE_CODE (ref2) == SSA_NAME
2034 && (TREE_CODE (base) != MEM_REF
2035 || TREE_OPERAND (base, 0) != ref2))
2037 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2038 if (gimple_assign_single_p (def_stmt)
2039 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2040 ref2 = gimple_assign_rhs1 (def_stmt);
2043 if (TREE_CODE (ref2) == ADDR_EXPR)
2045 ref2 = TREE_OPERAND (ref2, 0);
2046 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2047 &reverse);
2048 if (!known_size_p (maxsize2)
2049 || !known_eq (maxsize2, size2)
2050 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2051 return (void *)-1;
2053 else if (TREE_CODE (ref2) == SSA_NAME)
2055 poly_int64 soff;
2056 if (TREE_CODE (base) != MEM_REF
2057 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2058 return (void *)-1;
2059 offset += soff;
2060 offset2 = 0;
2061 if (TREE_OPERAND (base, 0) != ref2)
2063 gimple *def = SSA_NAME_DEF_STMT (ref2);
2064 if (is_gimple_assign (def)
2065 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2066 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2067 && poly_int_tree_p (gimple_assign_rhs2 (def))
2068 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2069 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2071 ref2 = gimple_assign_rhs1 (def);
2072 if (TREE_CODE (ref2) == SSA_NAME)
2073 ref2 = SSA_VAL (ref2);
2075 else
2076 return (void *)-1;
2079 else
2080 return (void *)-1;
2081 tree len = gimple_call_arg (def_stmt, 2);
2082 if (known_subrange_p (offset, maxsize, offset2,
2083 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2085 tree val;
2086 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2087 val = build_zero_cst (vr->type);
2088 else if (INTEGRAL_TYPE_P (vr->type)
2089 && known_eq (ref->size, 8))
2091 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2092 vr->type, gimple_call_arg (def_stmt, 1));
2093 val = vn_nary_build_or_lookup (&res_op);
2094 if (!val
2095 || (TREE_CODE (val) == SSA_NAME
2096 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2097 return (void *)-1;
2099 else
2101 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2102 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2103 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2104 len);
2105 val = native_interpret_expr (vr->type, buf, len);
2106 if (!val)
2107 return (void *)-1;
2109 return vn_reference_lookup_or_insert_for_pieces
2110 (vuse, vr->set, vr->type, vr->operands, val);
2114 /* 2) Assignment from an empty CONSTRUCTOR. */
2115 else if (is_gimple_reg_type (vr->type)
2116 && gimple_assign_single_p (def_stmt)
2117 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2118 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2120 tree base2;
2121 poly_int64 offset2, size2, maxsize2;
2122 bool reverse;
2123 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2124 &offset2, &size2, &maxsize2, &reverse);
2125 if (known_size_p (maxsize2)
2126 && operand_equal_p (base, base2, 0)
2127 && known_subrange_p (offset, maxsize, offset2, size2))
2129 tree val = build_zero_cst (vr->type);
2130 return vn_reference_lookup_or_insert_for_pieces
2131 (vuse, vr->set, vr->type, vr->operands, val);
2135 /* 3) Assignment from a constant. We can use folds native encode/interpret
2136 routines to extract the assigned bits. */
2137 else if (known_eq (ref->size, maxsize)
2138 && is_gimple_reg_type (vr->type)
2139 && !contains_storage_order_barrier_p (vr->operands)
2140 && gimple_assign_single_p (def_stmt)
2141 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2142 /* native_encode and native_decode operate on arrays of bytes
2143 and so fundamentally need a compile-time size and offset. */
2144 && maxsize.is_constant (&maxsizei)
2145 && maxsizei % BITS_PER_UNIT == 0
2146 && offset.is_constant (&offseti)
2147 && offseti % BITS_PER_UNIT == 0
2148 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2149 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2150 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2152 tree base2;
2153 HOST_WIDE_INT offset2, size2;
2154 bool reverse;
2155 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2156 &offset2, &size2, &reverse);
2157 if (base2
2158 && !reverse
2159 && size2 % BITS_PER_UNIT == 0
2160 && offset2 % BITS_PER_UNIT == 0
2161 && operand_equal_p (base, base2, 0)
2162 && known_subrange_p (offseti, maxsizei, offset2, size2))
2164 /* We support up to 512-bit values (for V8DFmode). */
2165 unsigned char buffer[64];
2166 int len;
2168 tree rhs = gimple_assign_rhs1 (def_stmt);
2169 if (TREE_CODE (rhs) == SSA_NAME)
2170 rhs = SSA_VAL (rhs);
2171 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2172 buffer, sizeof (buffer),
2173 (offseti - offset2) / BITS_PER_UNIT);
2174 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2176 tree type = vr->type;
2177 /* Make sure to interpret in a type that has a range
2178 covering the whole access size. */
2179 if (INTEGRAL_TYPE_P (vr->type)
2180 && maxsizei != TYPE_PRECISION (vr->type))
2181 type = build_nonstandard_integer_type (maxsizei,
2182 TYPE_UNSIGNED (type));
2183 tree val = native_interpret_expr (type, buffer,
2184 maxsizei / BITS_PER_UNIT);
2185 /* If we chop off bits because the types precision doesn't
2186 match the memory access size this is ok when optimizing
2187 reads but not when called from the DSE code during
2188 elimination. */
2189 if (val
2190 && type != vr->type)
2192 if (! int_fits_type_p (val, vr->type))
2193 val = NULL_TREE;
2194 else
2195 val = fold_convert (vr->type, val);
2198 if (val)
2199 return vn_reference_lookup_or_insert_for_pieces
2200 (vuse, vr->set, vr->type, vr->operands, val);
2205 /* 4) Assignment from an SSA name which definition we may be able
2206 to access pieces from. */
2207 else if (known_eq (ref->size, maxsize)
2208 && is_gimple_reg_type (vr->type)
2209 && !contains_storage_order_barrier_p (vr->operands)
2210 && gimple_assign_single_p (def_stmt)
2211 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2213 tree base2;
2214 poly_int64 offset2, size2, maxsize2;
2215 bool reverse;
2216 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2217 &offset2, &size2, &maxsize2,
2218 &reverse);
2219 if (!reverse
2220 && known_size_p (maxsize2)
2221 && known_eq (maxsize2, size2)
2222 && operand_equal_p (base, base2, 0)
2223 && known_subrange_p (offset, maxsize, offset2, size2)
2224 /* ??? We can't handle bitfield precision extracts without
2225 either using an alternate type for the BIT_FIELD_REF and
2226 then doing a conversion or possibly adjusting the offset
2227 according to endianness. */
2228 && (! INTEGRAL_TYPE_P (vr->type)
2229 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2230 && multiple_p (ref->size, BITS_PER_UNIT))
2232 gimple_match_op op (gimple_match_cond::UNCOND,
2233 BIT_FIELD_REF, vr->type,
2234 vn_valueize (gimple_assign_rhs1 (def_stmt)),
2235 bitsize_int (ref->size),
2236 bitsize_int (offset - offset2));
2237 tree val = vn_nary_build_or_lookup (&op);
2238 if (val
2239 && (TREE_CODE (val) != SSA_NAME
2240 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2242 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2243 (vuse, vr->set, vr->type, vr->operands, val);
2244 return res;
2249 /* 5) For aggregate copies translate the reference through them if
2250 the copy kills ref. */
2251 else if (vn_walk_kind == VN_WALKREWRITE
2252 && gimple_assign_single_p (def_stmt)
2253 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2254 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2255 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2257 tree base2;
2258 int i, j, k;
2259 auto_vec<vn_reference_op_s> rhs;
2260 vn_reference_op_t vro;
2261 ao_ref r;
2263 if (!lhs_ref_ok)
2264 return (void *)-1;
2266 /* See if the assignment kills REF. */
2267 base2 = ao_ref_base (&lhs_ref);
2268 if (!lhs_ref.max_size_known_p ()
2269 || (base != base2
2270 && (TREE_CODE (base) != MEM_REF
2271 || TREE_CODE (base2) != MEM_REF
2272 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2273 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2274 TREE_OPERAND (base2, 1))))
2275 || !stmt_kills_ref_p (def_stmt, ref))
2276 return (void *)-1;
2278 /* Find the common base of ref and the lhs. lhs_ops already
2279 contains valueized operands for the lhs. */
2280 i = vr->operands.length () - 1;
2281 j = lhs_ops.length () - 1;
2282 while (j >= 0 && i >= 0
2283 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2285 i--;
2286 j--;
2289 /* ??? The innermost op should always be a MEM_REF and we already
2290 checked that the assignment to the lhs kills vr. Thus for
2291 aggregate copies using char[] types the vn_reference_op_eq
2292 may fail when comparing types for compatibility. But we really
2293 don't care here - further lookups with the rewritten operands
2294 will simply fail if we messed up types too badly. */
2295 poly_int64 extra_off = 0;
2296 if (j == 0 && i >= 0
2297 && lhs_ops[0].opcode == MEM_REF
2298 && maybe_ne (lhs_ops[0].off, -1))
2300 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2301 i--, j--;
2302 else if (vr->operands[i].opcode == MEM_REF
2303 && maybe_ne (vr->operands[i].off, -1))
2305 extra_off = vr->operands[i].off - lhs_ops[0].off;
2306 i--, j--;
2310 /* i now points to the first additional op.
2311 ??? LHS may not be completely contained in VR, one or more
2312 VIEW_CONVERT_EXPRs could be in its way. We could at least
2313 try handling outermost VIEW_CONVERT_EXPRs. */
2314 if (j != -1)
2315 return (void *)-1;
2317 /* Punt if the additional ops contain a storage order barrier. */
2318 for (k = i; k >= 0; k--)
2320 vro = &vr->operands[k];
2321 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2322 return (void *)-1;
2325 /* Now re-write REF to be based on the rhs of the assignment. */
2326 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2328 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2329 if (maybe_ne (extra_off, 0))
2331 if (rhs.length () < 2)
2332 return (void *)-1;
2333 int ix = rhs.length () - 2;
2334 if (rhs[ix].opcode != MEM_REF
2335 || known_eq (rhs[ix].off, -1))
2336 return (void *)-1;
2337 rhs[ix].off += extra_off;
2338 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2339 build_int_cst (TREE_TYPE (rhs[ix].op0),
2340 extra_off));
2343 /* We need to pre-pend vr->operands[0..i] to rhs. */
2344 vec<vn_reference_op_s> old = vr->operands;
2345 if (i + 1 + rhs.length () > vr->operands.length ())
2346 vr->operands.safe_grow (i + 1 + rhs.length ());
2347 else
2348 vr->operands.truncate (i + 1 + rhs.length ());
2349 FOR_EACH_VEC_ELT (rhs, j, vro)
2350 vr->operands[i + 1 + j] = *vro;
2351 vr->operands = valueize_refs (vr->operands);
2352 if (old == shared_lookup_references)
2353 shared_lookup_references = vr->operands;
2354 vr->hashcode = vn_reference_compute_hash (vr);
2356 /* Try folding the new reference to a constant. */
2357 tree val = fully_constant_vn_reference_p (vr);
2358 if (val)
2359 return vn_reference_lookup_or_insert_for_pieces
2360 (vuse, vr->set, vr->type, vr->operands, val);
2362 /* Adjust *ref from the new operands. */
2363 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2364 return (void *)-1;
2365 /* This can happen with bitfields. */
2366 if (maybe_ne (ref->size, r.size))
2367 return (void *)-1;
2368 *ref = r;
2370 /* Do not update last seen VUSE after translating. */
2371 last_vuse_ptr = NULL;
2373 /* Keep looking for the adjusted *REF / VR pair. */
2374 return NULL;
2377 /* 6) For memcpy copies translate the reference through them if
2378 the copy kills ref. */
2379 else if (vn_walk_kind == VN_WALKREWRITE
2380 && is_gimple_reg_type (vr->type)
2381 /* ??? Handle BCOPY as well. */
2382 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2383 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2384 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2385 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2386 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2387 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2388 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2389 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2391 tree lhs, rhs;
2392 ao_ref r;
2393 poly_int64 rhs_offset, lhs_offset;
2394 vn_reference_op_s op;
2395 poly_uint64 mem_offset;
2396 poly_int64 at, byte_maxsize;
2398 /* Only handle non-variable, addressable refs. */
2399 if (maybe_ne (ref->size, maxsize)
2400 || !multiple_p (offset, BITS_PER_UNIT, &at)
2401 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2402 return (void *)-1;
2404 /* Extract a pointer base and an offset for the destination. */
2405 lhs = gimple_call_arg (def_stmt, 0);
2406 lhs_offset = 0;
2407 if (TREE_CODE (lhs) == SSA_NAME)
2409 lhs = vn_valueize (lhs);
2410 if (TREE_CODE (lhs) == SSA_NAME)
2412 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2413 if (gimple_assign_single_p (def_stmt)
2414 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2415 lhs = gimple_assign_rhs1 (def_stmt);
2418 if (TREE_CODE (lhs) == ADDR_EXPR)
2420 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2421 &lhs_offset);
2422 if (!tem)
2423 return (void *)-1;
2424 if (TREE_CODE (tem) == MEM_REF
2425 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2427 lhs = TREE_OPERAND (tem, 0);
2428 if (TREE_CODE (lhs) == SSA_NAME)
2429 lhs = vn_valueize (lhs);
2430 lhs_offset += mem_offset;
2432 else if (DECL_P (tem))
2433 lhs = build_fold_addr_expr (tem);
2434 else
2435 return (void *)-1;
2437 if (TREE_CODE (lhs) != SSA_NAME
2438 && TREE_CODE (lhs) != ADDR_EXPR)
2439 return (void *)-1;
2441 /* Extract a pointer base and an offset for the source. */
2442 rhs = gimple_call_arg (def_stmt, 1);
2443 rhs_offset = 0;
2444 if (TREE_CODE (rhs) == SSA_NAME)
2445 rhs = vn_valueize (rhs);
2446 if (TREE_CODE (rhs) == ADDR_EXPR)
2448 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2449 &rhs_offset);
2450 if (!tem)
2451 return (void *)-1;
2452 if (TREE_CODE (tem) == MEM_REF
2453 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2455 rhs = TREE_OPERAND (tem, 0);
2456 rhs_offset += mem_offset;
2458 else if (DECL_P (tem)
2459 || TREE_CODE (tem) == STRING_CST)
2460 rhs = build_fold_addr_expr (tem);
2461 else
2462 return (void *)-1;
2464 if (TREE_CODE (rhs) != SSA_NAME
2465 && TREE_CODE (rhs) != ADDR_EXPR)
2466 return (void *)-1;
2468 /* The bases of the destination and the references have to agree. */
2469 if (TREE_CODE (base) == MEM_REF)
2471 if (TREE_OPERAND (base, 0) != lhs
2472 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2473 return (void *) -1;
2474 at += mem_offset;
2476 else if (!DECL_P (base)
2477 || TREE_CODE (lhs) != ADDR_EXPR
2478 || TREE_OPERAND (lhs, 0) != base)
2479 return (void *)-1;
2481 /* If the access is completely outside of the memcpy destination
2482 area there is no aliasing. */
2483 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2484 return NULL;
2485 /* And the access has to be contained within the memcpy destination. */
2486 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2487 return (void *)-1;
2489 /* Make room for 2 operands in the new reference. */
2490 if (vr->operands.length () < 2)
2492 vec<vn_reference_op_s> old = vr->operands;
2493 vr->operands.safe_grow_cleared (2);
2494 if (old == shared_lookup_references)
2495 shared_lookup_references = vr->operands;
2497 else
2498 vr->operands.truncate (2);
2500 /* The looked-through reference is a simple MEM_REF. */
2501 memset (&op, 0, sizeof (op));
2502 op.type = vr->type;
2503 op.opcode = MEM_REF;
2504 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2505 op.off = at - lhs_offset + rhs_offset;
2506 vr->operands[0] = op;
2507 op.type = TREE_TYPE (rhs);
2508 op.opcode = TREE_CODE (rhs);
2509 op.op0 = rhs;
2510 op.off = -1;
2511 vr->operands[1] = op;
2512 vr->hashcode = vn_reference_compute_hash (vr);
2514 /* Try folding the new reference to a constant. */
2515 tree val = fully_constant_vn_reference_p (vr);
2516 if (val)
2517 return vn_reference_lookup_or_insert_for_pieces
2518 (vuse, vr->set, vr->type, vr->operands, val);
2520 /* Adjust *ref from the new operands. */
2521 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2522 return (void *)-1;
2523 /* This can happen with bitfields. */
2524 if (maybe_ne (ref->size, r.size))
2525 return (void *)-1;
2526 *ref = r;
2528 /* Do not update last seen VUSE after translating. */
2529 last_vuse_ptr = NULL;
2531 /* Keep looking for the adjusted *REF / VR pair. */
2532 return NULL;
2535 /* Bail out and stop walking. */
2536 return (void *)-1;
2539 /* Return a reference op vector from OP that can be used for
2540 vn_reference_lookup_pieces. The caller is responsible for releasing
2541 the vector. */
2543 vec<vn_reference_op_s>
2544 vn_reference_operands_for_lookup (tree op)
2546 bool valueized;
2547 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2550 /* Lookup a reference operation by it's parts, in the current hash table.
2551 Returns the resulting value number if it exists in the hash table,
2552 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2553 vn_reference_t stored in the hashtable if something is found. */
2555 tree
2556 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2557 vec<vn_reference_op_s> operands,
2558 vn_reference_t *vnresult, vn_lookup_kind kind)
2560 struct vn_reference_s vr1;
2561 vn_reference_t tmp;
2562 tree cst;
2564 if (!vnresult)
2565 vnresult = &tmp;
2566 *vnresult = NULL;
2568 vr1.vuse = vuse_ssa_val (vuse);
2569 shared_lookup_references.truncate (0);
2570 shared_lookup_references.safe_grow (operands.length ());
2571 memcpy (shared_lookup_references.address (),
2572 operands.address (),
2573 sizeof (vn_reference_op_s)
2574 * operands.length ());
2575 vr1.operands = operands = shared_lookup_references
2576 = valueize_refs (shared_lookup_references);
2577 vr1.type = type;
2578 vr1.set = set;
2579 vr1.hashcode = vn_reference_compute_hash (&vr1);
2580 if ((cst = fully_constant_vn_reference_p (&vr1)))
2581 return cst;
2583 vn_reference_lookup_1 (&vr1, vnresult);
2584 if (!*vnresult
2585 && kind != VN_NOWALK
2586 && vr1.vuse)
2588 ao_ref r;
2589 vn_walk_kind = kind;
2590 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2591 *vnresult =
2592 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2593 vn_reference_lookup_2,
2594 vn_reference_lookup_3,
2595 vuse_valueize, &vr1);
2596 gcc_checking_assert (vr1.operands == shared_lookup_references);
2599 if (*vnresult)
2600 return (*vnresult)->result;
2602 return NULL_TREE;
2605 /* Lookup OP in the current hash table, and return the resulting value
2606 number if it exists in the hash table. Return NULL_TREE if it does
2607 not exist in the hash table or if the result field of the structure
2608 was NULL.. VNRESULT will be filled in with the vn_reference_t
2609 stored in the hashtable if one exists. When TBAA_P is false assume
2610 we are looking up a store and treat it as having alias-set zero. */
2612 tree
2613 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2614 vn_reference_t *vnresult, bool tbaa_p)
2616 vec<vn_reference_op_s> operands;
2617 struct vn_reference_s vr1;
2618 tree cst;
2619 bool valuezied_anything;
2621 if (vnresult)
2622 *vnresult = NULL;
2624 vr1.vuse = vuse_ssa_val (vuse);
2625 vr1.operands = operands
2626 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2627 vr1.type = TREE_TYPE (op);
2628 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2629 vr1.hashcode = vn_reference_compute_hash (&vr1);
2630 if ((cst = fully_constant_vn_reference_p (&vr1)))
2631 return cst;
2633 if (kind != VN_NOWALK
2634 && vr1.vuse)
2636 vn_reference_t wvnresult;
2637 ao_ref r;
2638 /* Make sure to use a valueized reference if we valueized anything.
2639 Otherwise preserve the full reference for advanced TBAA. */
2640 if (!valuezied_anything
2641 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2642 vr1.operands))
2643 ao_ref_init (&r, op);
2644 if (! tbaa_p)
2645 r.ref_alias_set = r.base_alias_set = 0;
2646 vn_walk_kind = kind;
2647 wvnresult =
2648 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2649 vn_reference_lookup_2,
2650 vn_reference_lookup_3,
2651 vuse_valueize, &vr1);
2652 gcc_checking_assert (vr1.operands == shared_lookup_references);
2653 if (wvnresult)
2655 if (vnresult)
2656 *vnresult = wvnresult;
2657 return wvnresult->result;
2660 return NULL_TREE;
2663 return vn_reference_lookup_1 (&vr1, vnresult);
2666 /* Lookup CALL in the current hash table and return the entry in
2667 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2669 void
2670 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2671 vn_reference_t vr)
2673 if (vnresult)
2674 *vnresult = NULL;
2676 tree vuse = gimple_vuse (call);
2678 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2679 vr->operands = valueize_shared_reference_ops_from_call (call);
2680 vr->type = gimple_expr_type (call);
2681 vr->set = 0;
2682 vr->hashcode = vn_reference_compute_hash (vr);
2683 vn_reference_lookup_1 (vr, vnresult);
2686 /* Insert OP into the current hash table with a value number of RESULT. */
2688 static void
2689 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2691 vn_reference_s **slot;
2692 vn_reference_t vr1;
2693 bool tem;
2695 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2696 if (TREE_CODE (result) == SSA_NAME)
2697 vr1->value_id = VN_INFO (result)->value_id;
2698 else
2699 vr1->value_id = get_or_alloc_constant_value_id (result);
2700 vr1->vuse = vuse_ssa_val (vuse);
2701 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2702 vr1->type = TREE_TYPE (op);
2703 vr1->set = get_alias_set (op);
2704 vr1->hashcode = vn_reference_compute_hash (vr1);
2705 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2706 vr1->result_vdef = vdef;
2708 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2709 INSERT);
2711 /* Because IL walking on reference lookup can end up visiting
2712 a def that is only to be visited later in iteration order
2713 when we are about to make an irreducible region reducible
2714 the def can be effectively processed and its ref being inserted
2715 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2716 but save a lookup if we deal with already inserted refs here. */
2717 if (*slot)
2719 /* We cannot assert that we have the same value either because
2720 when disentangling an irreducible region we may end up visiting
2721 a use before the corresponding def. That's a missed optimization
2722 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2723 if (dump_file && (dump_flags & TDF_DETAILS)
2724 && !operand_equal_p ((*slot)->result, vr1->result, 0))
2726 fprintf (dump_file, "Keeping old value ");
2727 print_generic_expr (dump_file, (*slot)->result);
2728 fprintf (dump_file, " because of collision\n");
2730 free_reference (vr1);
2731 obstack_free (&vn_tables_obstack, vr1);
2732 return;
2735 *slot = vr1;
2736 vr1->next = last_inserted_ref;
2737 last_inserted_ref = vr1;
2740 /* Insert a reference by it's pieces into the current hash table with
2741 a value number of RESULT. Return the resulting reference
2742 structure we created. */
2744 vn_reference_t
2745 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2746 vec<vn_reference_op_s> operands,
2747 tree result, unsigned int value_id)
2750 vn_reference_s **slot;
2751 vn_reference_t vr1;
2753 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2754 vr1->value_id = value_id;
2755 vr1->vuse = vuse_ssa_val (vuse);
2756 vr1->operands = valueize_refs (operands);
2757 vr1->type = type;
2758 vr1->set = set;
2759 vr1->hashcode = vn_reference_compute_hash (vr1);
2760 if (result && TREE_CODE (result) == SSA_NAME)
2761 result = SSA_VAL (result);
2762 vr1->result = result;
2764 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2765 INSERT);
2767 /* At this point we should have all the things inserted that we have
2768 seen before, and we should never try inserting something that
2769 already exists. */
2770 gcc_assert (!*slot);
2772 *slot = vr1;
2773 vr1->next = last_inserted_ref;
2774 last_inserted_ref = vr1;
2775 return vr1;
2778 /* Compute and return the hash value for nary operation VBO1. */
2780 static hashval_t
2781 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2783 inchash::hash hstate;
2784 unsigned i;
2786 for (i = 0; i < vno1->length; ++i)
2787 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2788 vno1->op[i] = SSA_VAL (vno1->op[i]);
2790 if (((vno1->length == 2
2791 && commutative_tree_code (vno1->opcode))
2792 || (vno1->length == 3
2793 && commutative_ternary_tree_code (vno1->opcode)))
2794 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2795 std::swap (vno1->op[0], vno1->op[1]);
2796 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2797 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2799 std::swap (vno1->op[0], vno1->op[1]);
2800 vno1->opcode = swap_tree_comparison (vno1->opcode);
2803 hstate.add_int (vno1->opcode);
2804 for (i = 0; i < vno1->length; ++i)
2805 inchash::add_expr (vno1->op[i], hstate);
2807 return hstate.end ();
2810 /* Compare nary operations VNO1 and VNO2 and return true if they are
2811 equivalent. */
2813 bool
2814 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2816 unsigned i;
2818 if (vno1->hashcode != vno2->hashcode)
2819 return false;
2821 if (vno1->length != vno2->length)
2822 return false;
2824 if (vno1->opcode != vno2->opcode
2825 || !types_compatible_p (vno1->type, vno2->type))
2826 return false;
2828 for (i = 0; i < vno1->length; ++i)
2829 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2830 return false;
2832 /* BIT_INSERT_EXPR has an implict operand as the type precision
2833 of op1. Need to check to make sure they are the same. */
2834 if (vno1->opcode == BIT_INSERT_EXPR
2835 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2836 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2837 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2838 return false;
2840 return true;
2843 /* Initialize VNO from the pieces provided. */
2845 static void
2846 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2847 enum tree_code code, tree type, tree *ops)
2849 vno->opcode = code;
2850 vno->length = length;
2851 vno->type = type;
2852 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2855 /* Initialize VNO from OP. */
2857 static void
2858 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2860 unsigned i;
2862 vno->opcode = TREE_CODE (op);
2863 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2864 vno->type = TREE_TYPE (op);
2865 for (i = 0; i < vno->length; ++i)
2866 vno->op[i] = TREE_OPERAND (op, i);
2869 /* Return the number of operands for a vn_nary ops structure from STMT. */
2871 static unsigned int
2872 vn_nary_length_from_stmt (gimple *stmt)
2874 switch (gimple_assign_rhs_code (stmt))
2876 case REALPART_EXPR:
2877 case IMAGPART_EXPR:
2878 case VIEW_CONVERT_EXPR:
2879 return 1;
2881 case BIT_FIELD_REF:
2882 return 3;
2884 case CONSTRUCTOR:
2885 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2887 default:
2888 return gimple_num_ops (stmt) - 1;
2892 /* Initialize VNO from STMT. */
2894 static void
2895 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2897 unsigned i;
2899 vno->opcode = gimple_assign_rhs_code (stmt);
2900 vno->type = gimple_expr_type (stmt);
2901 switch (vno->opcode)
2903 case REALPART_EXPR:
2904 case IMAGPART_EXPR:
2905 case VIEW_CONVERT_EXPR:
2906 vno->length = 1;
2907 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2908 break;
2910 case BIT_FIELD_REF:
2911 vno->length = 3;
2912 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2913 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2914 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2915 break;
2917 case CONSTRUCTOR:
2918 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2919 for (i = 0; i < vno->length; ++i)
2920 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2921 break;
2923 default:
2924 gcc_checking_assert (!gimple_assign_single_p (stmt));
2925 vno->length = gimple_num_ops (stmt) - 1;
2926 for (i = 0; i < vno->length; ++i)
2927 vno->op[i] = gimple_op (stmt, i + 1);
2931 /* Compute the hashcode for VNO and look for it in the hash table;
2932 return the resulting value number if it exists in the hash table.
2933 Return NULL_TREE if it does not exist in the hash table or if the
2934 result field of the operation is NULL. VNRESULT will contain the
2935 vn_nary_op_t from the hashtable if it exists. */
2937 static tree
2938 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2940 vn_nary_op_s **slot;
2942 if (vnresult)
2943 *vnresult = NULL;
2945 vno->hashcode = vn_nary_op_compute_hash (vno);
2946 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2947 if (!slot)
2948 return NULL_TREE;
2949 if (vnresult)
2950 *vnresult = *slot;
2951 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
2954 /* Lookup a n-ary operation by its pieces and return the resulting value
2955 number if it exists in the hash table. Return NULL_TREE if it does
2956 not exist in the hash table or if the result field of the operation
2957 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2958 if it exists. */
2960 tree
2961 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2962 tree type, tree *ops, vn_nary_op_t *vnresult)
2964 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2965 sizeof_vn_nary_op (length));
2966 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2967 return vn_nary_op_lookup_1 (vno1, vnresult);
2970 /* Lookup OP in the current hash table, and return the resulting value
2971 number if it exists in the hash table. Return NULL_TREE if it does
2972 not exist in the hash table or if the result field of the operation
2973 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2974 if it exists. */
2976 tree
2977 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2979 vn_nary_op_t vno1
2980 = XALLOCAVAR (struct vn_nary_op_s,
2981 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2982 init_vn_nary_op_from_op (vno1, op);
2983 return vn_nary_op_lookup_1 (vno1, vnresult);
2986 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2987 value number if it exists in the hash table. Return NULL_TREE if
2988 it does not exist in the hash table. VNRESULT will contain the
2989 vn_nary_op_t from the hashtable if it exists. */
2991 tree
2992 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2994 vn_nary_op_t vno1
2995 = XALLOCAVAR (struct vn_nary_op_s,
2996 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2997 init_vn_nary_op_from_stmt (vno1, stmt);
2998 return vn_nary_op_lookup_1 (vno1, vnresult);
3001 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3003 static vn_nary_op_t
3004 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
3006 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
3009 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3010 obstack. */
3012 static vn_nary_op_t
3013 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
3015 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
3017 vno1->value_id = value_id;
3018 vno1->length = length;
3019 vno1->predicated_values = 0;
3020 vno1->u.result = result;
3022 return vno1;
3025 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3026 VNO->HASHCODE first. */
3028 static vn_nary_op_t
3029 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3030 bool compute_hash)
3032 vn_nary_op_s **slot;
3034 if (compute_hash)
3036 vno->hashcode = vn_nary_op_compute_hash (vno);
3037 gcc_assert (! vno->predicated_values
3038 || (! vno->u.values->next
3039 && vno->u.values->valid_dominated_by_p[0] != EXIT_BLOCK
3040 && vno->u.values->valid_dominated_by_p[1] == EXIT_BLOCK));
3043 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3044 vno->unwind_to = *slot;
3045 if (*slot)
3047 /* Prefer non-predicated values.
3048 ??? Only if those are constant, otherwise, with constant predicated
3049 value, turn them into predicated values with entry-block validity
3050 (??? but we always find the first valid result currently). */
3051 if ((*slot)->predicated_values
3052 && ! vno->predicated_values)
3054 /* ??? We cannot remove *slot from the unwind stack list.
3055 For the moment we deal with this by skipping not found
3056 entries but this isn't ideal ... */
3057 *slot = vno;
3058 /* ??? Maintain a stack of states we can unwind in
3059 vn_nary_op_s? But how far do we unwind? In reality
3060 we need to push change records somewhere... Or not
3061 unwind vn_nary_op_s and linking them but instead
3062 unwind the results "list", linking that, which also
3063 doesn't move on hashtable resize. */
3064 /* We can also have a ->unwind_to recording *slot there.
3065 That way we can make u.values a fixed size array with
3066 recording the number of entries but of course we then
3067 have always N copies for each unwind_to-state. Or we
3068 make sure to only ever append and each unwinding will
3069 pop off one entry (but how to deal with predicated
3070 replaced with non-predicated here?) */
3071 vno->next = last_inserted_nary;
3072 last_inserted_nary = vno;
3073 return vno;
3075 else if (vno->predicated_values
3076 && ! (*slot)->predicated_values)
3077 return *slot;
3078 else if (vno->predicated_values
3079 && (*slot)->predicated_values)
3081 /* ??? Factor this all into a insert_single_predicated_value
3082 routine. */
3083 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3084 basic_block vno_bb
3085 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3086 vn_pval *nval = vno->u.values;
3087 vn_pval **next = &vno->u.values;
3088 bool found = false;
3089 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3091 if (expressions_equal_p (val->result, vno->u.values->result))
3093 found = true;
3094 for (unsigned i = 0; i < val->n; ++i)
3096 basic_block val_bb
3097 = BASIC_BLOCK_FOR_FN (cfun,
3098 val->valid_dominated_by_p[i]);
3099 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3100 /* Value registered with more generic predicate. */
3101 return *slot;
3102 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3103 /* Shouldn't happen, we insert in RPO order. */
3104 gcc_unreachable ();
3106 /* Append value. */
3107 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3108 sizeof (vn_pval)
3109 + val->n * sizeof (int));
3110 (*next)->next = NULL;
3111 (*next)->result = val->result;
3112 (*next)->n = val->n + 1;
3113 memcpy ((*next)->valid_dominated_by_p,
3114 val->valid_dominated_by_p,
3115 val->n * sizeof (int));
3116 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3117 next = &(*next)->next;
3118 if (dump_file && (dump_flags & TDF_DETAILS))
3119 fprintf (dump_file, "Appending predicate to value.\n");
3120 continue;
3122 /* Copy other predicated values. */
3123 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3124 sizeof (vn_pval)
3125 + (val->n-1) * sizeof (int));
3126 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3127 (*next)->next = NULL;
3128 next = &(*next)->next;
3130 if (!found)
3131 *next = nval;
3133 *slot = vno;
3134 vno->next = last_inserted_nary;
3135 last_inserted_nary = vno;
3136 return vno;
3139 /* While we do not want to insert things twice it's awkward to
3140 avoid it in the case where visit_nary_op pattern-matches stuff
3141 and ends up simplifying the replacement to itself. We then
3142 get two inserts, one from visit_nary_op and one from
3143 vn_nary_build_or_lookup.
3144 So allow inserts with the same value number. */
3145 if ((*slot)->u.result == vno->u.result)
3146 return *slot;
3149 /* ??? There's also optimistic vs. previous commited state merging
3150 that is problematic for the case of unwinding. */
3152 /* ??? We should return NULL if we do not use 'vno' and have the
3153 caller release it. */
3154 gcc_assert (!*slot);
3156 *slot = vno;
3157 vno->next = last_inserted_nary;
3158 last_inserted_nary = vno;
3159 return vno;
3162 /* Insert a n-ary operation into the current hash table using it's
3163 pieces. Return the vn_nary_op_t structure we created and put in
3164 the hashtable. */
3166 vn_nary_op_t
3167 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3168 tree type, tree *ops,
3169 tree result, unsigned int value_id)
3171 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3172 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3173 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3176 static vn_nary_op_t
3177 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3178 tree type, tree *ops,
3179 tree result, unsigned int value_id,
3180 edge pred_e)
3182 /* ??? Currently tracking BBs. */
3183 if (! single_pred_p (pred_e->dest))
3185 /* Never record for backedges. */
3186 if (pred_e->flags & EDGE_DFS_BACK)
3187 return NULL;
3188 edge_iterator ei;
3189 edge e;
3190 int cnt = 0;
3191 /* Ignore backedges. */
3192 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3193 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3194 cnt++;
3195 if (cnt != 1)
3196 return NULL;
3198 if (dump_file && (dump_flags & TDF_DETAILS)
3199 /* ??? Fix dumping, but currently we only get comparisons. */
3200 && TREE_CODE_CLASS (code) == tcc_comparison)
3202 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3203 pred_e->dest->index);
3204 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3205 fprintf (dump_file, " %s ", get_tree_code_name (code));
3206 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3207 fprintf (dump_file, " == %s\n",
3208 integer_zerop (result) ? "false" : "true");
3210 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3211 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3212 vno1->predicated_values = 1;
3213 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3214 sizeof (vn_pval));
3215 vno1->u.values->next = NULL;
3216 vno1->u.values->result = result;
3217 vno1->u.values->n = 1;
3218 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3219 vno1->u.values->valid_dominated_by_p[1] = EXIT_BLOCK;
3220 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3223 static bool
3224 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3226 static tree
3227 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3229 if (! vno->predicated_values)
3230 return vno->u.result;
3231 for (vn_pval *val = vno->u.values; val; val = val->next)
3232 for (unsigned i = 0; i < val->n; ++i)
3233 if (dominated_by_p_w_unex (bb,
3234 BASIC_BLOCK_FOR_FN
3235 (cfun, val->valid_dominated_by_p[i])))
3236 return val->result;
3237 return NULL_TREE;
3240 /* Insert OP into the current hash table with a value number of
3241 RESULT. Return the vn_nary_op_t structure we created and put in
3242 the hashtable. */
3244 vn_nary_op_t
3245 vn_nary_op_insert (tree op, tree result)
3247 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3248 vn_nary_op_t vno1;
3250 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3251 init_vn_nary_op_from_op (vno1, op);
3252 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3255 /* Insert the rhs of STMT into the current hash table with a value number of
3256 RESULT. */
3258 static vn_nary_op_t
3259 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3261 vn_nary_op_t vno1
3262 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3263 result, VN_INFO (result)->value_id);
3264 init_vn_nary_op_from_stmt (vno1, stmt);
3265 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3268 /* Compute a hashcode for PHI operation VP1 and return it. */
3270 static inline hashval_t
3271 vn_phi_compute_hash (vn_phi_t vp1)
3273 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3274 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3275 tree phi1op;
3276 tree type;
3277 edge e;
3278 edge_iterator ei;
3280 /* If all PHI arguments are constants we need to distinguish
3281 the PHI node via its type. */
3282 type = vp1->type;
3283 hstate.merge_hash (vn_hash_type (type));
3285 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3287 /* Don't hash backedge values they need to be handled as VN_TOP
3288 for optimistic value-numbering. */
3289 if (e->flags & EDGE_DFS_BACK)
3290 continue;
3292 phi1op = vp1->phiargs[e->dest_idx];
3293 if (phi1op == VN_TOP)
3294 continue;
3295 inchash::add_expr (phi1op, hstate);
3298 return hstate.end ();
3302 /* Return true if COND1 and COND2 represent the same condition, set
3303 *INVERTED_P if one needs to be inverted to make it the same as
3304 the other. */
3306 static bool
3307 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3308 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3310 enum tree_code code1 = gimple_cond_code (cond1);
3311 enum tree_code code2 = gimple_cond_code (cond2);
3313 *inverted_p = false;
3314 if (code1 == code2)
3316 else if (code1 == swap_tree_comparison (code2))
3317 std::swap (lhs2, rhs2);
3318 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3319 *inverted_p = true;
3320 else if (code1 == invert_tree_comparison
3321 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3323 std::swap (lhs2, rhs2);
3324 *inverted_p = true;
3326 else
3327 return false;
3329 return ((expressions_equal_p (lhs1, lhs2)
3330 && expressions_equal_p (rhs1, rhs2))
3331 || (commutative_tree_code (code1)
3332 && expressions_equal_p (lhs1, rhs2)
3333 && expressions_equal_p (rhs1, lhs2)));
3336 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3338 static int
3339 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3341 if (vp1->hashcode != vp2->hashcode)
3342 return false;
3344 if (vp1->block != vp2->block)
3346 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3347 return false;
3349 switch (EDGE_COUNT (vp1->block->preds))
3351 case 1:
3352 /* Single-arg PHIs are just copies. */
3353 break;
3355 case 2:
3357 /* Rule out backedges into the PHI. */
3358 if (vp1->block->loop_father->header == vp1->block
3359 || vp2->block->loop_father->header == vp2->block)
3360 return false;
3362 /* If the PHI nodes do not have compatible types
3363 they are not the same. */
3364 if (!types_compatible_p (vp1->type, vp2->type))
3365 return false;
3367 basic_block idom1
3368 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3369 basic_block idom2
3370 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3371 /* If the immediate dominator end in switch stmts multiple
3372 values may end up in the same PHI arg via intermediate
3373 CFG merges. */
3374 if (EDGE_COUNT (idom1->succs) != 2
3375 || EDGE_COUNT (idom2->succs) != 2)
3376 return false;
3378 /* Verify the controlling stmt is the same. */
3379 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3380 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3381 if (! last1 || ! last2)
3382 return false;
3383 bool inverted_p;
3384 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3385 last2, vp2->cclhs, vp2->ccrhs,
3386 &inverted_p))
3387 return false;
3389 /* Get at true/false controlled edges into the PHI. */
3390 edge te1, te2, fe1, fe2;
3391 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3392 &te1, &fe1)
3393 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3394 &te2, &fe2))
3395 return false;
3397 /* Swap edges if the second condition is the inverted of the
3398 first. */
3399 if (inverted_p)
3400 std::swap (te2, fe2);
3402 /* ??? Handle VN_TOP specially. */
3403 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3404 vp2->phiargs[te2->dest_idx])
3405 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3406 vp2->phiargs[fe2->dest_idx]))
3407 return false;
3409 return true;
3412 default:
3413 return false;
3417 /* If the PHI nodes do not have compatible types
3418 they are not the same. */
3419 if (!types_compatible_p (vp1->type, vp2->type))
3420 return false;
3422 /* Any phi in the same block will have it's arguments in the
3423 same edge order, because of how we store phi nodes. */
3424 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3426 tree phi1op = vp1->phiargs[i];
3427 tree phi2op = vp2->phiargs[i];
3428 if (phi1op == VN_TOP || phi2op == VN_TOP)
3429 continue;
3430 if (!expressions_equal_p (phi1op, phi2op))
3431 return false;
3434 return true;
3437 /* Lookup PHI in the current hash table, and return the resulting
3438 value number if it exists in the hash table. Return NULL_TREE if
3439 it does not exist in the hash table. */
3441 static tree
3442 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3444 vn_phi_s **slot;
3445 struct vn_phi_s *vp1;
3446 edge e;
3447 edge_iterator ei;
3449 vp1 = XALLOCAVAR (struct vn_phi_s,
3450 sizeof (struct vn_phi_s)
3451 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3453 /* Canonicalize the SSA_NAME's to their value number. */
3454 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3456 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3457 if (TREE_CODE (def) == SSA_NAME
3458 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3459 def = SSA_VAL (def);
3460 vp1->phiargs[e->dest_idx] = def;
3462 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3463 vp1->block = gimple_bb (phi);
3464 /* Extract values of the controlling condition. */
3465 vp1->cclhs = NULL_TREE;
3466 vp1->ccrhs = NULL_TREE;
3467 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3468 if (EDGE_COUNT (idom1->succs) == 2)
3469 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3471 /* ??? We want to use SSA_VAL here. But possibly not
3472 allow VN_TOP. */
3473 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3474 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3476 vp1->hashcode = vn_phi_compute_hash (vp1);
3477 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3478 if (!slot)
3479 return NULL_TREE;
3480 return (*slot)->result;
3483 /* Insert PHI into the current hash table with a value number of
3484 RESULT. */
3486 static vn_phi_t
3487 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3489 vn_phi_s **slot;
3490 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3491 sizeof (vn_phi_s)
3492 + ((gimple_phi_num_args (phi) - 1)
3493 * sizeof (tree)));
3494 edge e;
3495 edge_iterator ei;
3497 /* Canonicalize the SSA_NAME's to their value number. */
3498 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3500 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3501 if (TREE_CODE (def) == SSA_NAME
3502 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3503 def = SSA_VAL (def);
3504 vp1->phiargs[e->dest_idx] = def;
3506 vp1->value_id = VN_INFO (result)->value_id;
3507 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3508 vp1->block = gimple_bb (phi);
3509 /* Extract values of the controlling condition. */
3510 vp1->cclhs = NULL_TREE;
3511 vp1->ccrhs = NULL_TREE;
3512 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3513 if (EDGE_COUNT (idom1->succs) == 2)
3514 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3516 /* ??? We want to use SSA_VAL here. But possibly not
3517 allow VN_TOP. */
3518 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3519 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3521 vp1->result = result;
3522 vp1->hashcode = vn_phi_compute_hash (vp1);
3524 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3525 gcc_assert (!*slot);
3527 *slot = vp1;
3528 vp1->next = last_inserted_phi;
3529 last_inserted_phi = vp1;
3530 return vp1;
3534 /* Return true if BB1 is dominated by BB2 taking into account edges
3535 that are not executable. */
3537 static bool
3538 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3540 edge_iterator ei;
3541 edge e;
3543 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3544 return true;
3546 /* Before iterating we'd like to know if there exists a
3547 (executable) path from bb2 to bb1 at all, if not we can
3548 directly return false. For now simply iterate once. */
3550 /* Iterate to the single executable bb1 predecessor. */
3551 if (EDGE_COUNT (bb1->preds) > 1)
3553 edge prede = NULL;
3554 FOR_EACH_EDGE (e, ei, bb1->preds)
3555 if (e->flags & EDGE_EXECUTABLE)
3557 if (prede)
3559 prede = NULL;
3560 break;
3562 prede = e;
3564 if (prede)
3566 bb1 = prede->src;
3568 /* Re-do the dominance check with changed bb1. */
3569 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3570 return true;
3574 /* Iterate to the single executable bb2 successor. */
3575 edge succe = NULL;
3576 FOR_EACH_EDGE (e, ei, bb2->succs)
3577 if (e->flags & EDGE_EXECUTABLE)
3579 if (succe)
3581 succe = NULL;
3582 break;
3584 succe = e;
3586 if (succe)
3588 /* Verify the reached block is only reached through succe.
3589 If there is only one edge we can spare us the dominator
3590 check and iterate directly. */
3591 if (EDGE_COUNT (succe->dest->preds) > 1)
3593 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3594 if (e != succe
3595 && (e->flags & EDGE_EXECUTABLE))
3597 succe = NULL;
3598 break;
3601 if (succe)
3603 bb2 = succe->dest;
3605 /* Re-do the dominance check with changed bb2. */
3606 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3607 return true;
3611 /* We could now iterate updating bb1 / bb2. */
3612 return false;
3615 /* Set the value number of FROM to TO, return true if it has changed
3616 as a result. */
3618 static inline bool
3619 set_ssa_val_to (tree from, tree to)
3621 vn_ssa_aux_t from_info = VN_INFO (from);
3622 tree currval = from_info->valnum; // SSA_VAL (from)
3623 poly_int64 toff, coff;
3625 /* The only thing we allow as value numbers are ssa_names
3626 and invariants. So assert that here. We don't allow VN_TOP
3627 as visiting a stmt should produce a value-number other than
3628 that.
3629 ??? Still VN_TOP can happen for unreachable code, so force
3630 it to varying in that case. Not all code is prepared to
3631 get VN_TOP on valueization. */
3632 if (to == VN_TOP)
3634 /* ??? When iterating and visiting PHI <undef, backedge-value>
3635 for the first time we rightfully get VN_TOP and we need to
3636 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3637 With SCCVN we were simply lucky we iterated the other PHI
3638 cycles first and thus visited the backedge-value DEF. */
3639 if (currval == VN_TOP)
3640 goto set_and_exit;
3641 if (dump_file && (dump_flags & TDF_DETAILS))
3642 fprintf (dump_file, "Forcing value number to varying on "
3643 "receiving VN_TOP\n");
3644 to = from;
3647 gcc_checking_assert (to != NULL_TREE
3648 && ((TREE_CODE (to) == SSA_NAME
3649 && (to == from || SSA_VAL (to) == to))
3650 || is_gimple_min_invariant (to)));
3652 if (from != to)
3654 if (currval == from)
3656 if (dump_file && (dump_flags & TDF_DETAILS))
3658 fprintf (dump_file, "Not changing value number of ");
3659 print_generic_expr (dump_file, from);
3660 fprintf (dump_file, " from VARYING to ");
3661 print_generic_expr (dump_file, to);
3662 fprintf (dump_file, "\n");
3664 return false;
3666 else if (currval != VN_TOP
3667 && ! is_gimple_min_invariant (currval)
3668 && ! ssa_undefined_value_p (currval, false)
3669 && is_gimple_min_invariant (to))
3671 if (dump_file && (dump_flags & TDF_DETAILS))
3673 fprintf (dump_file, "Forcing VARYING instead of changing "
3674 "value number of ");
3675 print_generic_expr (dump_file, from);
3676 fprintf (dump_file, " from ");
3677 print_generic_expr (dump_file, currval);
3678 fprintf (dump_file, " (non-constant) to ");
3679 print_generic_expr (dump_file, to);
3680 fprintf (dump_file, " (constant)\n");
3682 to = from;
3684 else if (TREE_CODE (to) == SSA_NAME
3685 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3686 to = from;
3689 set_and_exit:
3690 if (dump_file && (dump_flags & TDF_DETAILS))
3692 fprintf (dump_file, "Setting value number of ");
3693 print_generic_expr (dump_file, from);
3694 fprintf (dump_file, " to ");
3695 print_generic_expr (dump_file, to);
3698 if (currval != to
3699 && !operand_equal_p (currval, to, 0)
3700 /* Different undefined SSA names are not actually different. See
3701 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3702 && !(TREE_CODE (currval) == SSA_NAME
3703 && TREE_CODE (to) == SSA_NAME
3704 && ssa_undefined_value_p (currval, false)
3705 && ssa_undefined_value_p (to, false))
3706 /* ??? For addresses involving volatile objects or types operand_equal_p
3707 does not reliably detect ADDR_EXPRs as equal. We know we are only
3708 getting invariant gimple addresses here, so can use
3709 get_addr_base_and_unit_offset to do this comparison. */
3710 && !(TREE_CODE (currval) == ADDR_EXPR
3711 && TREE_CODE (to) == ADDR_EXPR
3712 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3713 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3714 && known_eq (coff, toff)))
3716 if (dump_file && (dump_flags & TDF_DETAILS))
3717 fprintf (dump_file, " (changed)\n");
3718 from_info->valnum = to;
3719 return true;
3721 if (dump_file && (dump_flags & TDF_DETAILS))
3722 fprintf (dump_file, "\n");
3723 return false;
3726 /* Set all definitions in STMT to value number to themselves.
3727 Return true if a value number changed. */
3729 static bool
3730 defs_to_varying (gimple *stmt)
3732 bool changed = false;
3733 ssa_op_iter iter;
3734 def_operand_p defp;
3736 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3738 tree def = DEF_FROM_PTR (defp);
3739 changed |= set_ssa_val_to (def, def);
3741 return changed;
3744 /* Visit a copy between LHS and RHS, return true if the value number
3745 changed. */
3747 static bool
3748 visit_copy (tree lhs, tree rhs)
3750 /* Valueize. */
3751 rhs = SSA_VAL (rhs);
3753 return set_ssa_val_to (lhs, rhs);
3756 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3757 is the same. */
3759 static tree
3760 valueized_wider_op (tree wide_type, tree op)
3762 if (TREE_CODE (op) == SSA_NAME)
3763 op = vn_valueize (op);
3765 /* Either we have the op widened available. */
3766 tree ops[3] = {};
3767 ops[0] = op;
3768 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3769 wide_type, ops, NULL);
3770 if (tem)
3771 return tem;
3773 /* Or the op is truncated from some existing value. */
3774 if (TREE_CODE (op) == SSA_NAME)
3776 gimple *def = SSA_NAME_DEF_STMT (op);
3777 if (is_gimple_assign (def)
3778 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3780 tem = gimple_assign_rhs1 (def);
3781 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3783 if (TREE_CODE (tem) == SSA_NAME)
3784 tem = vn_valueize (tem);
3785 return tem;
3790 /* For constants simply extend it. */
3791 if (TREE_CODE (op) == INTEGER_CST)
3792 return wide_int_to_tree (wide_type, wi::to_wide (op));
3794 return NULL_TREE;
3797 /* Visit a nary operator RHS, value number it, and return true if the
3798 value number of LHS has changed as a result. */
3800 static bool
3801 visit_nary_op (tree lhs, gassign *stmt)
3803 vn_nary_op_t vnresult;
3804 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3805 if (! result && vnresult)
3806 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3807 if (result)
3808 return set_ssa_val_to (lhs, result);
3810 /* Do some special pattern matching for redundancies of operations
3811 in different types. */
3812 enum tree_code code = gimple_assign_rhs_code (stmt);
3813 tree type = TREE_TYPE (lhs);
3814 tree rhs1 = gimple_assign_rhs1 (stmt);
3815 switch (code)
3817 CASE_CONVERT:
3818 /* Match arithmetic done in a different type where we can easily
3819 substitute the result from some earlier sign-changed or widened
3820 operation. */
3821 if (INTEGRAL_TYPE_P (type)
3822 && TREE_CODE (rhs1) == SSA_NAME
3823 /* We only handle sign-changes or zero-extension -> & mask. */
3824 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3825 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3826 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3828 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3829 if (def
3830 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3831 || gimple_assign_rhs_code (def) == MINUS_EXPR
3832 || gimple_assign_rhs_code (def) == MULT_EXPR))
3834 tree ops[3] = {};
3835 /* Either we have the op widened available. */
3836 ops[0] = valueized_wider_op (type,
3837 gimple_assign_rhs1 (def));
3838 if (ops[0])
3839 ops[1] = valueized_wider_op (type,
3840 gimple_assign_rhs2 (def));
3841 if (ops[0] && ops[1])
3843 ops[0] = vn_nary_op_lookup_pieces
3844 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3845 /* We have wider operation available. */
3846 if (ops[0])
3848 unsigned lhs_prec = TYPE_PRECISION (type);
3849 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3850 if (lhs_prec == rhs_prec)
3852 gimple_match_op match_op (gimple_match_cond::UNCOND,
3853 NOP_EXPR, type, ops[0]);
3854 result = vn_nary_build_or_lookup (&match_op);
3855 if (result)
3857 bool changed = set_ssa_val_to (lhs, result);
3858 vn_nary_op_insert_stmt (stmt, result);
3859 return changed;
3862 else
3864 tree mask = wide_int_to_tree
3865 (type, wi::mask (rhs_prec, false, lhs_prec));
3866 gimple_match_op match_op (gimple_match_cond::UNCOND,
3867 BIT_AND_EXPR,
3868 TREE_TYPE (lhs),
3869 ops[0], mask);
3870 result = vn_nary_build_or_lookup (&match_op);
3871 if (result)
3873 bool changed = set_ssa_val_to (lhs, result);
3874 vn_nary_op_insert_stmt (stmt, result);
3875 return changed;
3882 default:;
3885 bool changed = set_ssa_val_to (lhs, lhs);
3886 vn_nary_op_insert_stmt (stmt, lhs);
3887 return changed;
3890 /* Visit a call STMT storing into LHS. Return true if the value number
3891 of the LHS has changed as a result. */
3893 static bool
3894 visit_reference_op_call (tree lhs, gcall *stmt)
3896 bool changed = false;
3897 struct vn_reference_s vr1;
3898 vn_reference_t vnresult = NULL;
3899 tree vdef = gimple_vdef (stmt);
3901 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3902 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3903 lhs = NULL_TREE;
3905 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3906 if (vnresult)
3908 if (vnresult->result_vdef && vdef)
3909 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3910 else if (vdef)
3911 /* If the call was discovered to be pure or const reflect
3912 that as far as possible. */
3913 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3915 if (!vnresult->result && lhs)
3916 vnresult->result = lhs;
3918 if (vnresult->result && lhs)
3919 changed |= set_ssa_val_to (lhs, vnresult->result);
3921 else
3923 vn_reference_t vr2;
3924 vn_reference_s **slot;
3925 tree vdef_val = vdef;
3926 if (vdef)
3928 /* If we value numbered an indirect functions function to
3929 one not clobbering memory value number its VDEF to its
3930 VUSE. */
3931 tree fn = gimple_call_fn (stmt);
3932 if (fn && TREE_CODE (fn) == SSA_NAME)
3934 fn = SSA_VAL (fn);
3935 if (TREE_CODE (fn) == ADDR_EXPR
3936 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3937 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3938 & (ECF_CONST | ECF_PURE)))
3939 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3941 changed |= set_ssa_val_to (vdef, vdef_val);
3943 if (lhs)
3944 changed |= set_ssa_val_to (lhs, lhs);
3945 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3946 vr2->vuse = vr1.vuse;
3947 /* As we are not walking the virtual operand chain we know the
3948 shared_lookup_references are still original so we can re-use
3949 them here. */
3950 vr2->operands = vr1.operands.copy ();
3951 vr2->type = vr1.type;
3952 vr2->set = vr1.set;
3953 vr2->hashcode = vr1.hashcode;
3954 vr2->result = lhs;
3955 vr2->result_vdef = vdef_val;
3956 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3957 INSERT);
3958 gcc_assert (!*slot);
3959 *slot = vr2;
3960 vr2->next = last_inserted_ref;
3961 last_inserted_ref = vr2;
3964 return changed;
3967 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3968 and return true if the value number of the LHS has changed as a result. */
3970 static bool
3971 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3973 bool changed = false;
3974 tree last_vuse;
3975 tree result;
3977 last_vuse = gimple_vuse (stmt);
3978 last_vuse_ptr = &last_vuse;
3979 result = vn_reference_lookup (op, gimple_vuse (stmt),
3980 default_vn_walk_kind, NULL, true);
3981 last_vuse_ptr = NULL;
3983 /* We handle type-punning through unions by value-numbering based
3984 on offset and size of the access. Be prepared to handle a
3985 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3986 if (result
3987 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3989 /* We will be setting the value number of lhs to the value number
3990 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3991 So first simplify and lookup this expression to see if it
3992 is already available. */
3993 gimple_match_op res_op (gimple_match_cond::UNCOND,
3994 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3995 result = vn_nary_build_or_lookup (&res_op);
3996 /* When building the conversion fails avoid inserting the reference
3997 again. */
3998 if (!result)
3999 return set_ssa_val_to (lhs, lhs);
4002 if (result)
4003 changed = set_ssa_val_to (lhs, result);
4004 else
4006 changed = set_ssa_val_to (lhs, lhs);
4007 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
4010 return changed;
4014 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4015 and return true if the value number of the LHS has changed as a result. */
4017 static bool
4018 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4020 bool changed = false;
4021 vn_reference_t vnresult = NULL;
4022 tree assign;
4023 bool resultsame = false;
4024 tree vuse = gimple_vuse (stmt);
4025 tree vdef = gimple_vdef (stmt);
4027 if (TREE_CODE (op) == SSA_NAME)
4028 op = SSA_VAL (op);
4030 /* First we want to lookup using the *vuses* from the store and see
4031 if there the last store to this location with the same address
4032 had the same value.
4034 The vuses represent the memory state before the store. If the
4035 memory state, address, and value of the store is the same as the
4036 last store to this location, then this store will produce the
4037 same memory state as that store.
4039 In this case the vdef versions for this store are value numbered to those
4040 vuse versions, since they represent the same memory state after
4041 this store.
4043 Otherwise, the vdefs for the store are used when inserting into
4044 the table, since the store generates a new memory state. */
4046 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4047 if (vnresult
4048 && vnresult->result)
4050 tree result = vnresult->result;
4051 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4052 || result == SSA_VAL (result));
4053 resultsame = expressions_equal_p (result, op);
4054 if (resultsame)
4056 /* If the TBAA state isn't compatible for downstream reads
4057 we cannot value-number the VDEFs the same. */
4058 alias_set_type set = get_alias_set (lhs);
4059 if (vnresult->set != set
4060 && ! alias_set_subset_of (set, vnresult->set))
4061 resultsame = false;
4065 if (!resultsame)
4067 /* Only perform the following when being called from PRE
4068 which embeds tail merging. */
4069 if (default_vn_walk_kind == VN_WALK)
4071 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4072 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4073 if (vnresult)
4075 VN_INFO (vdef)->visited = true;
4076 return set_ssa_val_to (vdef, vnresult->result_vdef);
4080 if (dump_file && (dump_flags & TDF_DETAILS))
4082 fprintf (dump_file, "No store match\n");
4083 fprintf (dump_file, "Value numbering store ");
4084 print_generic_expr (dump_file, lhs);
4085 fprintf (dump_file, " to ");
4086 print_generic_expr (dump_file, op);
4087 fprintf (dump_file, "\n");
4089 /* Have to set value numbers before insert, since insert is
4090 going to valueize the references in-place. */
4091 if (vdef)
4092 changed |= set_ssa_val_to (vdef, vdef);
4094 /* Do not insert structure copies into the tables. */
4095 if (is_gimple_min_invariant (op)
4096 || is_gimple_reg (op))
4097 vn_reference_insert (lhs, op, vdef, NULL);
4099 /* Only perform the following when being called from PRE
4100 which embeds tail merging. */
4101 if (default_vn_walk_kind == VN_WALK)
4103 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4104 vn_reference_insert (assign, lhs, vuse, vdef);
4107 else
4109 /* We had a match, so value number the vdef to have the value
4110 number of the vuse it came from. */
4112 if (dump_file && (dump_flags & TDF_DETAILS))
4113 fprintf (dump_file, "Store matched earlier value, "
4114 "value numbering store vdefs to matching vuses.\n");
4116 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4119 return changed;
4122 /* Visit and value number PHI, return true if the value number
4123 changed. When BACKEDGES_VARYING_P is true then assume all
4124 backedge values are varying. When INSERTED is not NULL then
4125 this is just a ahead query for a possible iteration, set INSERTED
4126 to true if we'd insert into the hashtable. */
4128 static bool
4129 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4131 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4132 tree backedge_val = NULL_TREE;
4133 bool seen_non_backedge = false;
4134 tree sameval_base = NULL_TREE;
4135 poly_int64 soff, doff;
4136 unsigned n_executable = 0;
4137 edge_iterator ei;
4138 edge e;
4140 /* TODO: We could check for this in initialization, and replace this
4141 with a gcc_assert. */
4142 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4143 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4145 /* We track whether a PHI was CSEd to to avoid excessive iterations
4146 that would be necessary only because the PHI changed arguments
4147 but not value. */
4148 if (!inserted)
4149 gimple_set_plf (phi, GF_PLF_1, false);
4151 /* See if all non-TOP arguments have the same value. TOP is
4152 equivalent to everything, so we can ignore it. */
4153 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4154 if (e->flags & EDGE_EXECUTABLE)
4156 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4158 ++n_executable;
4159 if (TREE_CODE (def) == SSA_NAME)
4161 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4162 def = SSA_VAL (def);
4163 if (e->flags & EDGE_DFS_BACK)
4164 backedge_val = def;
4166 if (!(e->flags & EDGE_DFS_BACK))
4167 seen_non_backedge = true;
4168 if (def == VN_TOP)
4170 /* Ignore undefined defs for sameval but record one. */
4171 else if (TREE_CODE (def) == SSA_NAME
4172 && ! virtual_operand_p (def)
4173 && ssa_undefined_value_p (def, false))
4174 seen_undef = def;
4175 else if (sameval == VN_TOP)
4176 sameval = def;
4177 else if (!expressions_equal_p (def, sameval))
4179 /* We know we're arriving only with invariant addresses here,
4180 try harder comparing them. We can do some caching here
4181 which we cannot do in expressions_equal_p. */
4182 if (TREE_CODE (def) == ADDR_EXPR
4183 && TREE_CODE (sameval) == ADDR_EXPR
4184 && sameval_base != (void *)-1)
4186 if (!sameval_base)
4187 sameval_base = get_addr_base_and_unit_offset
4188 (TREE_OPERAND (sameval, 0), &soff);
4189 if (!sameval_base)
4190 sameval_base = (tree)(void *)-1;
4191 else if ((get_addr_base_and_unit_offset
4192 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4193 && known_eq (soff, doff))
4194 continue;
4196 sameval = NULL_TREE;
4197 break;
4201 /* If the value we want to use is the backedge and that wasn't visited
4202 yet or if we should take it as VARYING but it has a non-VARYING
4203 value drop to VARYING. This only happens when not iterating.
4204 If we value-number a virtual operand never value-number to the
4205 value from the backedge as that confuses the alias-walking code.
4206 See gcc.dg/torture/pr87176.c. If the value is the same on a
4207 non-backedge everything is OK though. */
4208 if (backedge_val
4209 && !seen_non_backedge
4210 && TREE_CODE (backedge_val) == SSA_NAME
4211 && sameval == backedge_val
4212 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
4213 || !SSA_VISITED (backedge_val)
4214 || SSA_VAL (backedge_val) != backedge_val))
4215 /* Note this just drops to VARYING without inserting the PHI into
4216 the hashes. */
4217 result = PHI_RESULT (phi);
4218 /* If none of the edges was executable keep the value-number at VN_TOP,
4219 if only a single edge is exectuable use its value. */
4220 else if (n_executable <= 1)
4221 result = seen_undef ? seen_undef : sameval;
4222 /* If we saw only undefined values and VN_TOP use one of the
4223 undefined values. */
4224 else if (sameval == VN_TOP)
4225 result = seen_undef ? seen_undef : sameval;
4226 /* First see if it is equivalent to a phi node in this block. We prefer
4227 this as it allows IV elimination - see PRs 66502 and 67167. */
4228 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4230 if (!inserted
4231 && TREE_CODE (result) == SSA_NAME
4232 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4234 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4235 if (dump_file && (dump_flags & TDF_DETAILS))
4237 fprintf (dump_file, "Marking CSEd to PHI node ");
4238 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4239 0, TDF_SLIM);
4240 fprintf (dump_file, "\n");
4244 /* If all values are the same use that, unless we've seen undefined
4245 values as well and the value isn't constant.
4246 CCP/copyprop have the same restriction to not remove uninit warnings. */
4247 else if (sameval
4248 && (! seen_undef || is_gimple_min_invariant (sameval)))
4249 result = sameval;
4250 else
4252 result = PHI_RESULT (phi);
4253 /* Only insert PHIs that are varying, for constant value numbers
4254 we mess up equivalences otherwise as we are only comparing
4255 the immediate controlling predicates. */
4256 vn_phi_insert (phi, result, backedges_varying_p);
4257 if (inserted)
4258 *inserted = true;
4261 return set_ssa_val_to (PHI_RESULT (phi), result);
4264 /* Try to simplify RHS using equivalences and constant folding. */
4266 static tree
4267 try_to_simplify (gassign *stmt)
4269 enum tree_code code = gimple_assign_rhs_code (stmt);
4270 tree tem;
4272 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4273 in this case, there is no point in doing extra work. */
4274 if (code == SSA_NAME)
4275 return NULL_TREE;
4277 /* First try constant folding based on our current lattice. */
4278 mprts_hook = vn_lookup_simplify_result;
4279 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4280 mprts_hook = NULL;
4281 if (tem
4282 && (TREE_CODE (tem) == SSA_NAME
4283 || is_gimple_min_invariant (tem)))
4284 return tem;
4286 return NULL_TREE;
4289 /* Visit and value number STMT, return true if the value number
4290 changed. */
4292 static bool
4293 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4295 bool changed = false;
4297 if (dump_file && (dump_flags & TDF_DETAILS))
4299 fprintf (dump_file, "Value numbering stmt = ");
4300 print_gimple_stmt (dump_file, stmt, 0);
4303 if (gimple_code (stmt) == GIMPLE_PHI)
4304 changed = visit_phi (stmt, NULL, backedges_varying_p);
4305 else if (gimple_has_volatile_ops (stmt))
4306 changed = defs_to_varying (stmt);
4307 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4309 enum tree_code code = gimple_assign_rhs_code (ass);
4310 tree lhs = gimple_assign_lhs (ass);
4311 tree rhs1 = gimple_assign_rhs1 (ass);
4312 tree simplified;
4314 /* Shortcut for copies. Simplifying copies is pointless,
4315 since we copy the expression and value they represent. */
4316 if (code == SSA_NAME
4317 && TREE_CODE (lhs) == SSA_NAME)
4319 changed = visit_copy (lhs, rhs1);
4320 goto done;
4322 simplified = try_to_simplify (ass);
4323 if (simplified)
4325 if (dump_file && (dump_flags & TDF_DETAILS))
4327 fprintf (dump_file, "RHS ");
4328 print_gimple_expr (dump_file, ass, 0);
4329 fprintf (dump_file, " simplified to ");
4330 print_generic_expr (dump_file, simplified);
4331 fprintf (dump_file, "\n");
4334 /* Setting value numbers to constants will occasionally
4335 screw up phi congruence because constants are not
4336 uniquely associated with a single ssa name that can be
4337 looked up. */
4338 if (simplified
4339 && is_gimple_min_invariant (simplified)
4340 && TREE_CODE (lhs) == SSA_NAME)
4342 changed = set_ssa_val_to (lhs, simplified);
4343 goto done;
4345 else if (simplified
4346 && TREE_CODE (simplified) == SSA_NAME
4347 && TREE_CODE (lhs) == SSA_NAME)
4349 changed = visit_copy (lhs, simplified);
4350 goto done;
4353 if ((TREE_CODE (lhs) == SSA_NAME
4354 /* We can substitute SSA_NAMEs that are live over
4355 abnormal edges with their constant value. */
4356 && !(gimple_assign_copy_p (ass)
4357 && is_gimple_min_invariant (rhs1))
4358 && !(simplified
4359 && is_gimple_min_invariant (simplified))
4360 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4361 /* Stores or copies from SSA_NAMEs that are live over
4362 abnormal edges are a problem. */
4363 || (code == SSA_NAME
4364 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4365 changed = defs_to_varying (ass);
4366 else if (REFERENCE_CLASS_P (lhs)
4367 || DECL_P (lhs))
4368 changed = visit_reference_op_store (lhs, rhs1, ass);
4369 else if (TREE_CODE (lhs) == SSA_NAME)
4371 if ((gimple_assign_copy_p (ass)
4372 && is_gimple_min_invariant (rhs1))
4373 || (simplified
4374 && is_gimple_min_invariant (simplified)))
4376 if (simplified)
4377 changed = set_ssa_val_to (lhs, simplified);
4378 else
4379 changed = set_ssa_val_to (lhs, rhs1);
4381 else
4383 /* Visit the original statement. */
4384 switch (vn_get_stmt_kind (ass))
4386 case VN_NARY:
4387 changed = visit_nary_op (lhs, ass);
4388 break;
4389 case VN_REFERENCE:
4390 changed = visit_reference_op_load (lhs, rhs1, ass);
4391 break;
4392 default:
4393 changed = defs_to_varying (ass);
4394 break;
4398 else
4399 changed = defs_to_varying (ass);
4401 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4403 tree lhs = gimple_call_lhs (call_stmt);
4404 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4406 /* Try constant folding based on our current lattice. */
4407 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4408 vn_valueize);
4409 if (simplified)
4411 if (dump_file && (dump_flags & TDF_DETAILS))
4413 fprintf (dump_file, "call ");
4414 print_gimple_expr (dump_file, call_stmt, 0);
4415 fprintf (dump_file, " simplified to ");
4416 print_generic_expr (dump_file, simplified);
4417 fprintf (dump_file, "\n");
4420 /* Setting value numbers to constants will occasionally
4421 screw up phi congruence because constants are not
4422 uniquely associated with a single ssa name that can be
4423 looked up. */
4424 if (simplified
4425 && is_gimple_min_invariant (simplified))
4427 changed = set_ssa_val_to (lhs, simplified);
4428 if (gimple_vdef (call_stmt))
4429 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4430 SSA_VAL (gimple_vuse (call_stmt)));
4431 goto done;
4433 else if (simplified
4434 && TREE_CODE (simplified) == SSA_NAME)
4436 changed = visit_copy (lhs, simplified);
4437 if (gimple_vdef (call_stmt))
4438 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4439 SSA_VAL (gimple_vuse (call_stmt)));
4440 goto done;
4442 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4444 changed = defs_to_varying (call_stmt);
4445 goto done;
4449 /* Pick up flags from a devirtualization target. */
4450 tree fn = gimple_call_fn (stmt);
4451 int extra_fnflags = 0;
4452 if (fn && TREE_CODE (fn) == SSA_NAME)
4454 fn = SSA_VAL (fn);
4455 if (TREE_CODE (fn) == ADDR_EXPR
4456 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4457 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4459 if (!gimple_call_internal_p (call_stmt)
4460 && (/* Calls to the same function with the same vuse
4461 and the same operands do not necessarily return the same
4462 value, unless they're pure or const. */
4463 ((gimple_call_flags (call_stmt) | extra_fnflags)
4464 & (ECF_PURE | ECF_CONST))
4465 /* If calls have a vdef, subsequent calls won't have
4466 the same incoming vuse. So, if 2 calls with vdef have the
4467 same vuse, we know they're not subsequent.
4468 We can value number 2 calls to the same function with the
4469 same vuse and the same operands which are not subsequent
4470 the same, because there is no code in the program that can
4471 compare the 2 values... */
4472 || (gimple_vdef (call_stmt)
4473 /* ... unless the call returns a pointer which does
4474 not alias with anything else. In which case the
4475 information that the values are distinct are encoded
4476 in the IL. */
4477 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4478 /* Only perform the following when being called from PRE
4479 which embeds tail merging. */
4480 && default_vn_walk_kind == VN_WALK)))
4481 changed = visit_reference_op_call (lhs, call_stmt);
4482 else
4483 changed = defs_to_varying (call_stmt);
4485 else
4486 changed = defs_to_varying (stmt);
4487 done:
4488 return changed;
4492 /* Allocate a value number table. */
4494 static void
4495 allocate_vn_table (vn_tables_t table, unsigned size)
4497 table->phis = new vn_phi_table_type (size);
4498 table->nary = new vn_nary_op_table_type (size);
4499 table->references = new vn_reference_table_type (size);
4502 /* Free a value number table. */
4504 static void
4505 free_vn_table (vn_tables_t table)
4507 /* Walk over elements and release vectors. */
4508 vn_reference_iterator_type hir;
4509 vn_reference_t vr;
4510 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4511 vr->operands.release ();
4512 delete table->phis;
4513 table->phis = NULL;
4514 delete table->nary;
4515 table->nary = NULL;
4516 delete table->references;
4517 table->references = NULL;
4520 /* Set *ID according to RESULT. */
4522 static void
4523 set_value_id_for_result (tree result, unsigned int *id)
4525 if (result && TREE_CODE (result) == SSA_NAME)
4526 *id = VN_INFO (result)->value_id;
4527 else if (result && is_gimple_min_invariant (result))
4528 *id = get_or_alloc_constant_value_id (result);
4529 else
4530 *id = get_next_value_id ();
4533 /* Set the value ids in the valid hash tables. */
4535 static void
4536 set_hashtable_value_ids (void)
4538 vn_nary_op_iterator_type hin;
4539 vn_phi_iterator_type hip;
4540 vn_reference_iterator_type hir;
4541 vn_nary_op_t vno;
4542 vn_reference_t vr;
4543 vn_phi_t vp;
4545 /* Now set the value ids of the things we had put in the hash
4546 table. */
4548 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4549 if (! vno->predicated_values)
4550 set_value_id_for_result (vno->u.result, &vno->value_id);
4552 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4553 set_value_id_for_result (vp->result, &vp->value_id);
4555 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4556 hir)
4557 set_value_id_for_result (vr->result, &vr->value_id);
4560 /* Return the maximum value id we have ever seen. */
4562 unsigned int
4563 get_max_value_id (void)
4565 return next_value_id;
4568 /* Return the next unique value id. */
4570 unsigned int
4571 get_next_value_id (void)
4573 return next_value_id++;
4577 /* Compare two expressions E1 and E2 and return true if they are equal. */
4579 bool
4580 expressions_equal_p (tree e1, tree e2)
4582 /* The obvious case. */
4583 if (e1 == e2)
4584 return true;
4586 /* If either one is VN_TOP consider them equal. */
4587 if (e1 == VN_TOP || e2 == VN_TOP)
4588 return true;
4590 /* If only one of them is null, they cannot be equal. */
4591 if (!e1 || !e2)
4592 return false;
4594 /* Now perform the actual comparison. */
4595 if (TREE_CODE (e1) == TREE_CODE (e2)
4596 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4597 return true;
4599 return false;
4603 /* Return true if the nary operation NARY may trap. This is a copy
4604 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4606 bool
4607 vn_nary_may_trap (vn_nary_op_t nary)
4609 tree type;
4610 tree rhs2 = NULL_TREE;
4611 bool honor_nans = false;
4612 bool honor_snans = false;
4613 bool fp_operation = false;
4614 bool honor_trapv = false;
4615 bool handled, ret;
4616 unsigned i;
4618 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4619 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4620 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4622 type = nary->type;
4623 fp_operation = FLOAT_TYPE_P (type);
4624 if (fp_operation)
4626 honor_nans = flag_trapping_math && !flag_finite_math_only;
4627 honor_snans = flag_signaling_nans != 0;
4629 else if (INTEGRAL_TYPE_P (type)
4630 && TYPE_OVERFLOW_TRAPS (type))
4631 honor_trapv = true;
4633 if (nary->length >= 2)
4634 rhs2 = nary->op[1];
4635 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4636 honor_trapv,
4637 honor_nans, honor_snans, rhs2,
4638 &handled);
4639 if (handled
4640 && ret)
4641 return true;
4643 for (i = 0; i < nary->length; ++i)
4644 if (tree_could_trap_p (nary->op[i]))
4645 return true;
4647 return false;
4651 class eliminate_dom_walker : public dom_walker
4653 public:
4654 eliminate_dom_walker (cdi_direction, bitmap);
4655 ~eliminate_dom_walker ();
4657 virtual edge before_dom_children (basic_block);
4658 virtual void after_dom_children (basic_block);
4660 virtual tree eliminate_avail (basic_block, tree op);
4661 virtual void eliminate_push_avail (basic_block, tree op);
4662 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
4664 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
4666 unsigned eliminate_cleanup (bool region_p = false);
4668 bool do_pre;
4669 unsigned int el_todo;
4670 unsigned int eliminations;
4671 unsigned int insertions;
4673 /* SSA names that had their defs inserted by PRE if do_pre. */
4674 bitmap inserted_exprs;
4676 /* Blocks with statements that have had their EH properties changed. */
4677 bitmap need_eh_cleanup;
4679 /* Blocks with statements that have had their AB properties changed. */
4680 bitmap need_ab_cleanup;
4682 /* Local state for the eliminate domwalk. */
4683 auto_vec<gimple *> to_remove;
4684 auto_vec<gimple *> to_fixup;
4685 auto_vec<tree> avail;
4686 auto_vec<tree> avail_stack;
4689 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4690 bitmap inserted_exprs_)
4691 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4692 el_todo (0), eliminations (0), insertions (0),
4693 inserted_exprs (inserted_exprs_)
4695 need_eh_cleanup = BITMAP_ALLOC (NULL);
4696 need_ab_cleanup = BITMAP_ALLOC (NULL);
4699 eliminate_dom_walker::~eliminate_dom_walker ()
4701 BITMAP_FREE (need_eh_cleanup);
4702 BITMAP_FREE (need_ab_cleanup);
4705 /* Return a leader for OP that is available at the current point of the
4706 eliminate domwalk. */
4708 tree
4709 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4711 tree valnum = VN_INFO (op)->valnum;
4712 if (TREE_CODE (valnum) == SSA_NAME)
4714 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4715 return valnum;
4716 if (avail.length () > SSA_NAME_VERSION (valnum))
4717 return avail[SSA_NAME_VERSION (valnum)];
4719 else if (is_gimple_min_invariant (valnum))
4720 return valnum;
4721 return NULL_TREE;
4724 /* At the current point of the eliminate domwalk make OP available. */
4726 void
4727 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4729 tree valnum = VN_INFO (op)->valnum;
4730 if (TREE_CODE (valnum) == SSA_NAME)
4732 if (avail.length () <= SSA_NAME_VERSION (valnum))
4733 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4734 tree pushop = op;
4735 if (avail[SSA_NAME_VERSION (valnum)])
4736 pushop = avail[SSA_NAME_VERSION (valnum)];
4737 avail_stack.safe_push (pushop);
4738 avail[SSA_NAME_VERSION (valnum)] = op;
4742 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4743 the leader for the expression if insertion was successful. */
4745 tree
4746 eliminate_dom_walker::eliminate_insert (basic_block bb,
4747 gimple_stmt_iterator *gsi, tree val)
4749 /* We can insert a sequence with a single assignment only. */
4750 gimple_seq stmts = VN_INFO (val)->expr;
4751 if (!gimple_seq_singleton_p (stmts))
4752 return NULL_TREE;
4753 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4754 if (!stmt
4755 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4756 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4757 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4758 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4759 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4760 return NULL_TREE;
4762 tree op = gimple_assign_rhs1 (stmt);
4763 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4764 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4765 op = TREE_OPERAND (op, 0);
4766 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4767 if (!leader)
4768 return NULL_TREE;
4770 tree res;
4771 stmts = NULL;
4772 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4773 res = gimple_build (&stmts, BIT_FIELD_REF,
4774 TREE_TYPE (val), leader,
4775 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4776 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4777 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4778 res = gimple_build (&stmts, BIT_AND_EXPR,
4779 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4780 else
4781 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4782 TREE_TYPE (val), leader);
4783 if (TREE_CODE (res) != SSA_NAME
4784 || SSA_NAME_IS_DEFAULT_DEF (res)
4785 || gimple_bb (SSA_NAME_DEF_STMT (res)))
4787 gimple_seq_discard (stmts);
4789 /* During propagation we have to treat SSA info conservatively
4790 and thus we can end up simplifying the inserted expression
4791 at elimination time to sth not defined in stmts. */
4792 /* But then this is a redundancy we failed to detect. Which means
4793 res now has two values. That doesn't play well with how
4794 we track availability here, so give up. */
4795 if (dump_file && (dump_flags & TDF_DETAILS))
4797 if (TREE_CODE (res) == SSA_NAME)
4798 res = eliminate_avail (bb, res);
4799 if (res)
4801 fprintf (dump_file, "Failed to insert expression for value ");
4802 print_generic_expr (dump_file, val);
4803 fprintf (dump_file, " which is really fully redundant to ");
4804 print_generic_expr (dump_file, res);
4805 fprintf (dump_file, "\n");
4809 return NULL_TREE;
4811 else
4813 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4814 VN_INFO (res)->valnum = val;
4815 VN_INFO (res)->visited = true;
4818 insertions++;
4819 if (dump_file && (dump_flags & TDF_DETAILS))
4821 fprintf (dump_file, "Inserted ");
4822 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4825 return res;
4828 void
4829 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4831 tree sprime = NULL_TREE;
4832 gimple *stmt = gsi_stmt (*gsi);
4833 tree lhs = gimple_get_lhs (stmt);
4834 if (lhs && TREE_CODE (lhs) == SSA_NAME
4835 && !gimple_has_volatile_ops (stmt)
4836 /* See PR43491. Do not replace a global register variable when
4837 it is a the RHS of an assignment. Do replace local register
4838 variables since gcc does not guarantee a local variable will
4839 be allocated in register.
4840 ??? The fix isn't effective here. This should instead
4841 be ensured by not value-numbering them the same but treating
4842 them like volatiles? */
4843 && !(gimple_assign_single_p (stmt)
4844 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4845 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4846 && is_global_var (gimple_assign_rhs1 (stmt)))))
4848 sprime = eliminate_avail (b, lhs);
4849 if (!sprime)
4851 /* If there is no existing usable leader but SCCVN thinks
4852 it has an expression it wants to use as replacement,
4853 insert that. */
4854 tree val = VN_INFO (lhs)->valnum;
4855 if (val != VN_TOP
4856 && TREE_CODE (val) == SSA_NAME
4857 && VN_INFO (val)->needs_insertion
4858 && VN_INFO (val)->expr != NULL
4859 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
4860 eliminate_push_avail (b, sprime);
4863 /* If this now constitutes a copy duplicate points-to
4864 and range info appropriately. This is especially
4865 important for inserted code. See tree-ssa-copy.c
4866 for similar code. */
4867 if (sprime
4868 && TREE_CODE (sprime) == SSA_NAME)
4870 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4871 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4872 && SSA_NAME_PTR_INFO (lhs)
4873 && ! SSA_NAME_PTR_INFO (sprime))
4875 duplicate_ssa_name_ptr_info (sprime,
4876 SSA_NAME_PTR_INFO (lhs));
4877 if (b != sprime_b)
4878 mark_ptr_info_alignment_unknown
4879 (SSA_NAME_PTR_INFO (sprime));
4881 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4882 && SSA_NAME_RANGE_INFO (lhs)
4883 && ! SSA_NAME_RANGE_INFO (sprime)
4884 && b == sprime_b)
4885 duplicate_ssa_name_range_info (sprime,
4886 SSA_NAME_RANGE_TYPE (lhs),
4887 SSA_NAME_RANGE_INFO (lhs));
4890 /* Inhibit the use of an inserted PHI on a loop header when
4891 the address of the memory reference is a simple induction
4892 variable. In other cases the vectorizer won't do anything
4893 anyway (either it's loop invariant or a complicated
4894 expression). */
4895 if (sprime
4896 && TREE_CODE (sprime) == SSA_NAME
4897 && do_pre
4898 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
4899 && loop_outer (b->loop_father)
4900 && has_zero_uses (sprime)
4901 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4902 && gimple_assign_load_p (stmt))
4904 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4905 basic_block def_bb = gimple_bb (def_stmt);
4906 if (gimple_code (def_stmt) == GIMPLE_PHI
4907 && def_bb->loop_father->header == def_bb)
4909 loop_p loop = def_bb->loop_father;
4910 ssa_op_iter iter;
4911 tree op;
4912 bool found = false;
4913 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4915 affine_iv iv;
4916 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4917 if (def_bb
4918 && flow_bb_inside_loop_p (loop, def_bb)
4919 && simple_iv (loop, loop, op, &iv, true))
4921 found = true;
4922 break;
4925 if (found)
4927 if (dump_file && (dump_flags & TDF_DETAILS))
4929 fprintf (dump_file, "Not replacing ");
4930 print_gimple_expr (dump_file, stmt, 0);
4931 fprintf (dump_file, " with ");
4932 print_generic_expr (dump_file, sprime);
4933 fprintf (dump_file, " which would add a loop"
4934 " carried dependence to loop %d\n",
4935 loop->num);
4937 /* Don't keep sprime available. */
4938 sprime = NULL_TREE;
4943 if (sprime)
4945 /* If we can propagate the value computed for LHS into
4946 all uses don't bother doing anything with this stmt. */
4947 if (may_propagate_copy (lhs, sprime))
4949 /* Mark it for removal. */
4950 to_remove.safe_push (stmt);
4952 /* ??? Don't count copy/constant propagations. */
4953 if (gimple_assign_single_p (stmt)
4954 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4955 || gimple_assign_rhs1 (stmt) == sprime))
4956 return;
4958 if (dump_file && (dump_flags & TDF_DETAILS))
4960 fprintf (dump_file, "Replaced ");
4961 print_gimple_expr (dump_file, stmt, 0);
4962 fprintf (dump_file, " with ");
4963 print_generic_expr (dump_file, sprime);
4964 fprintf (dump_file, " in all uses of ");
4965 print_gimple_stmt (dump_file, stmt, 0);
4968 eliminations++;
4969 return;
4972 /* If this is an assignment from our leader (which
4973 happens in the case the value-number is a constant)
4974 then there is nothing to do. */
4975 if (gimple_assign_single_p (stmt)
4976 && sprime == gimple_assign_rhs1 (stmt))
4977 return;
4979 /* Else replace its RHS. */
4980 bool can_make_abnormal_goto
4981 = is_gimple_call (stmt)
4982 && stmt_can_make_abnormal_goto (stmt);
4984 if (dump_file && (dump_flags & TDF_DETAILS))
4986 fprintf (dump_file, "Replaced ");
4987 print_gimple_expr (dump_file, stmt, 0);
4988 fprintf (dump_file, " with ");
4989 print_generic_expr (dump_file, sprime);
4990 fprintf (dump_file, " in ");
4991 print_gimple_stmt (dump_file, stmt, 0);
4994 eliminations++;
4995 gimple *orig_stmt = stmt;
4996 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4997 TREE_TYPE (sprime)))
4998 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4999 tree vdef = gimple_vdef (stmt);
5000 tree vuse = gimple_vuse (stmt);
5001 propagate_tree_value_into_stmt (gsi, sprime);
5002 stmt = gsi_stmt (*gsi);
5003 update_stmt (stmt);
5004 /* In case the VDEF on the original stmt was released, value-number
5005 it to the VUSE. This is to make vuse_ssa_val able to skip
5006 released virtual operands. */
5007 if (vdef != gimple_vdef (stmt))
5009 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
5010 VN_INFO (vdef)->valnum = vuse;
5013 /* If we removed EH side-effects from the statement, clean
5014 its EH information. */
5015 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
5017 bitmap_set_bit (need_eh_cleanup,
5018 gimple_bb (stmt)->index);
5019 if (dump_file && (dump_flags & TDF_DETAILS))
5020 fprintf (dump_file, " Removed EH side-effects.\n");
5023 /* Likewise for AB side-effects. */
5024 if (can_make_abnormal_goto
5025 && !stmt_can_make_abnormal_goto (stmt))
5027 bitmap_set_bit (need_ab_cleanup,
5028 gimple_bb (stmt)->index);
5029 if (dump_file && (dump_flags & TDF_DETAILS))
5030 fprintf (dump_file, " Removed AB side-effects.\n");
5033 return;
5037 /* If the statement is a scalar store, see if the expression
5038 has the same value number as its rhs. If so, the store is
5039 dead. */
5040 if (gimple_assign_single_p (stmt)
5041 && !gimple_has_volatile_ops (stmt)
5042 && !is_gimple_reg (gimple_assign_lhs (stmt))
5043 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5044 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5046 tree val;
5047 tree rhs = gimple_assign_rhs1 (stmt);
5048 vn_reference_t vnresult;
5049 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5050 &vnresult, false);
5051 if (TREE_CODE (rhs) == SSA_NAME)
5052 rhs = VN_INFO (rhs)->valnum;
5053 if (val
5054 && operand_equal_p (val, rhs, 0))
5056 /* We can only remove the later store if the former aliases
5057 at least all accesses the later one does or if the store
5058 was to readonly memory storing the same value. */
5059 alias_set_type set = get_alias_set (lhs);
5060 if (! vnresult
5061 || vnresult->set == set
5062 || alias_set_subset_of (set, vnresult->set))
5064 if (dump_file && (dump_flags & TDF_DETAILS))
5066 fprintf (dump_file, "Deleted redundant store ");
5067 print_gimple_stmt (dump_file, stmt, 0);
5070 /* Queue stmt for removal. */
5071 to_remove.safe_push (stmt);
5072 return;
5077 /* If this is a control statement value numbering left edges
5078 unexecuted on force the condition in a way consistent with
5079 that. */
5080 if (gcond *cond = dyn_cast <gcond *> (stmt))
5082 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5083 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5085 if (dump_file && (dump_flags & TDF_DETAILS))
5087 fprintf (dump_file, "Removing unexecutable edge from ");
5088 print_gimple_stmt (dump_file, stmt, 0);
5090 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5091 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5092 gimple_cond_make_true (cond);
5093 else
5094 gimple_cond_make_false (cond);
5095 update_stmt (cond);
5096 el_todo |= TODO_cleanup_cfg;
5097 return;
5101 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5102 bool was_noreturn = (is_gimple_call (stmt)
5103 && gimple_call_noreturn_p (stmt));
5104 tree vdef = gimple_vdef (stmt);
5105 tree vuse = gimple_vuse (stmt);
5107 /* If we didn't replace the whole stmt (or propagate the result
5108 into all uses), replace all uses on this stmt with their
5109 leaders. */
5110 bool modified = false;
5111 use_operand_p use_p;
5112 ssa_op_iter iter;
5113 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5115 tree use = USE_FROM_PTR (use_p);
5116 /* ??? The call code above leaves stmt operands un-updated. */
5117 if (TREE_CODE (use) != SSA_NAME)
5118 continue;
5119 tree sprime;
5120 if (SSA_NAME_IS_DEFAULT_DEF (use))
5121 /* ??? For default defs BB shouldn't matter, but we have to
5122 solve the inconsistency between rpo eliminate and
5123 dom eliminate avail valueization first. */
5124 sprime = eliminate_avail (b, use);
5125 else
5126 /* Look for sth available at the definition block of the argument.
5127 This avoids inconsistencies between availability there which
5128 decides if the stmt can be removed and availability at the
5129 use site. The SSA property ensures that things available
5130 at the definition are also available at uses. */
5131 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5132 if (sprime && sprime != use
5133 && may_propagate_copy (use, sprime)
5134 /* We substitute into debug stmts to avoid excessive
5135 debug temporaries created by removed stmts, but we need
5136 to avoid doing so for inserted sprimes as we never want
5137 to create debug temporaries for them. */
5138 && (!inserted_exprs
5139 || TREE_CODE (sprime) != SSA_NAME
5140 || !is_gimple_debug (stmt)
5141 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5143 propagate_value (use_p, sprime);
5144 modified = true;
5148 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5149 into which is a requirement for the IPA devirt machinery. */
5150 gimple *old_stmt = stmt;
5151 if (modified)
5153 /* If a formerly non-invariant ADDR_EXPR is turned into an
5154 invariant one it was on a separate stmt. */
5155 if (gimple_assign_single_p (stmt)
5156 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5157 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5158 gimple_stmt_iterator prev = *gsi;
5159 gsi_prev (&prev);
5160 if (fold_stmt (gsi))
5162 /* fold_stmt may have created new stmts inbetween
5163 the previous stmt and the folded stmt. Mark
5164 all defs created there as varying to not confuse
5165 the SCCVN machinery as we're using that even during
5166 elimination. */
5167 if (gsi_end_p (prev))
5168 prev = gsi_start_bb (b);
5169 else
5170 gsi_next (&prev);
5171 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5174 tree def;
5175 ssa_op_iter dit;
5176 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5177 dit, SSA_OP_ALL_DEFS)
5178 /* As existing DEFs may move between stmts
5179 only process new ones. */
5180 if (! has_VN_INFO (def))
5182 VN_INFO (def)->valnum = def;
5183 VN_INFO (def)->visited = true;
5185 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5186 break;
5187 gsi_next (&prev);
5189 while (1);
5191 stmt = gsi_stmt (*gsi);
5192 /* In case we folded the stmt away schedule the NOP for removal. */
5193 if (gimple_nop_p (stmt))
5194 to_remove.safe_push (stmt);
5197 /* Visit indirect calls and turn them into direct calls if
5198 possible using the devirtualization machinery. Do this before
5199 checking for required EH/abnormal/noreturn cleanup as devird
5200 may expose more of those. */
5201 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5203 tree fn = gimple_call_fn (call_stmt);
5204 if (fn
5205 && flag_devirtualize
5206 && virtual_method_call_p (fn))
5208 tree otr_type = obj_type_ref_class (fn);
5209 unsigned HOST_WIDE_INT otr_tok
5210 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5211 tree instance;
5212 ipa_polymorphic_call_context context (current_function_decl,
5213 fn, stmt, &instance);
5214 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5215 otr_type, stmt);
5216 bool final;
5217 vec <cgraph_node *> targets
5218 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5219 otr_tok, context, &final);
5220 if (dump_file)
5221 dump_possible_polymorphic_call_targets (dump_file,
5222 obj_type_ref_class (fn),
5223 otr_tok, context);
5224 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5226 tree fn;
5227 if (targets.length () == 1)
5228 fn = targets[0]->decl;
5229 else
5230 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5231 if (dump_enabled_p ())
5233 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5234 "converting indirect call to "
5235 "function %s\n",
5236 lang_hooks.decl_printable_name (fn, 2));
5238 gimple_call_set_fndecl (call_stmt, fn);
5239 /* If changing the call to __builtin_unreachable
5240 or similar noreturn function, adjust gimple_call_fntype
5241 too. */
5242 if (gimple_call_noreturn_p (call_stmt)
5243 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5244 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5245 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5246 == void_type_node))
5247 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5248 maybe_remove_unused_call_args (cfun, call_stmt);
5249 modified = true;
5254 if (modified)
5256 /* When changing a call into a noreturn call, cfg cleanup
5257 is needed to fix up the noreturn call. */
5258 if (!was_noreturn
5259 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5260 to_fixup.safe_push (stmt);
5261 /* When changing a condition or switch into one we know what
5262 edge will be executed, schedule a cfg cleanup. */
5263 if ((gimple_code (stmt) == GIMPLE_COND
5264 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5265 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5266 || (gimple_code (stmt) == GIMPLE_SWITCH
5267 && TREE_CODE (gimple_switch_index
5268 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5269 el_todo |= TODO_cleanup_cfg;
5270 /* If we removed EH side-effects from the statement, clean
5271 its EH information. */
5272 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5274 bitmap_set_bit (need_eh_cleanup,
5275 gimple_bb (stmt)->index);
5276 if (dump_file && (dump_flags & TDF_DETAILS))
5277 fprintf (dump_file, " Removed EH side-effects.\n");
5279 /* Likewise for AB side-effects. */
5280 if (can_make_abnormal_goto
5281 && !stmt_can_make_abnormal_goto (stmt))
5283 bitmap_set_bit (need_ab_cleanup,
5284 gimple_bb (stmt)->index);
5285 if (dump_file && (dump_flags & TDF_DETAILS))
5286 fprintf (dump_file, " Removed AB side-effects.\n");
5288 update_stmt (stmt);
5289 /* In case the VDEF on the original stmt was released, value-number
5290 it to the VUSE. This is to make vuse_ssa_val able to skip
5291 released virtual operands. */
5292 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5293 VN_INFO (vdef)->valnum = vuse;
5296 /* Make new values available - for fully redundant LHS we
5297 continue with the next stmt above and skip this. */
5298 def_operand_p defp;
5299 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5300 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5303 /* Perform elimination for the basic-block B during the domwalk. */
5305 edge
5306 eliminate_dom_walker::before_dom_children (basic_block b)
5308 /* Mark new bb. */
5309 avail_stack.safe_push (NULL_TREE);
5311 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5312 if (!(b->flags & BB_EXECUTABLE))
5313 return NULL;
5315 vn_context_bb = b;
5317 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5319 gphi *phi = gsi.phi ();
5320 tree res = PHI_RESULT (phi);
5322 if (virtual_operand_p (res))
5324 gsi_next (&gsi);
5325 continue;
5328 tree sprime = eliminate_avail (b, res);
5329 if (sprime
5330 && sprime != res)
5332 if (dump_file && (dump_flags & TDF_DETAILS))
5334 fprintf (dump_file, "Replaced redundant PHI node defining ");
5335 print_generic_expr (dump_file, res);
5336 fprintf (dump_file, " with ");
5337 print_generic_expr (dump_file, sprime);
5338 fprintf (dump_file, "\n");
5341 /* If we inserted this PHI node ourself, it's not an elimination. */
5342 if (! inserted_exprs
5343 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5344 eliminations++;
5346 /* If we will propagate into all uses don't bother to do
5347 anything. */
5348 if (may_propagate_copy (res, sprime))
5350 /* Mark the PHI for removal. */
5351 to_remove.safe_push (phi);
5352 gsi_next (&gsi);
5353 continue;
5356 remove_phi_node (&gsi, false);
5358 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5359 sprime = fold_convert (TREE_TYPE (res), sprime);
5360 gimple *stmt = gimple_build_assign (res, sprime);
5361 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5362 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5363 continue;
5366 eliminate_push_avail (b, res);
5367 gsi_next (&gsi);
5370 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5371 !gsi_end_p (gsi);
5372 gsi_next (&gsi))
5373 eliminate_stmt (b, &gsi);
5375 /* Replace destination PHI arguments. */
5376 edge_iterator ei;
5377 edge e;
5378 FOR_EACH_EDGE (e, ei, b->succs)
5379 if (e->flags & EDGE_EXECUTABLE)
5380 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5381 !gsi_end_p (gsi);
5382 gsi_next (&gsi))
5384 gphi *phi = gsi.phi ();
5385 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5386 tree arg = USE_FROM_PTR (use_p);
5387 if (TREE_CODE (arg) != SSA_NAME
5388 || virtual_operand_p (arg))
5389 continue;
5390 tree sprime = eliminate_avail (b, arg);
5391 if (sprime && may_propagate_copy (arg, sprime))
5392 propagate_value (use_p, sprime);
5395 vn_context_bb = NULL;
5397 return NULL;
5400 /* Make no longer available leaders no longer available. */
5402 void
5403 eliminate_dom_walker::after_dom_children (basic_block)
5405 tree entry;
5406 while ((entry = avail_stack.pop ()) != NULL_TREE)
5408 tree valnum = VN_INFO (entry)->valnum;
5409 tree old = avail[SSA_NAME_VERSION (valnum)];
5410 if (old == entry)
5411 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5412 else
5413 avail[SSA_NAME_VERSION (valnum)] = entry;
5417 /* Remove queued stmts and perform delayed cleanups. */
5419 unsigned
5420 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5422 statistics_counter_event (cfun, "Eliminated", eliminations);
5423 statistics_counter_event (cfun, "Insertions", insertions);
5425 /* We cannot remove stmts during BB walk, especially not release SSA
5426 names there as this confuses the VN machinery. The stmts ending
5427 up in to_remove are either stores or simple copies.
5428 Remove stmts in reverse order to make debug stmt creation possible. */
5429 while (!to_remove.is_empty ())
5431 bool do_release_defs = true;
5432 gimple *stmt = to_remove.pop ();
5434 /* When we are value-numbering a region we do not require exit PHIs to
5435 be present so we have to make sure to deal with uses outside of the
5436 region of stmts that we thought are eliminated.
5437 ??? Note we may be confused by uses in dead regions we didn't run
5438 elimination on. Rather than checking individual uses we accept
5439 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5440 contains such example). */
5441 if (region_p)
5443 if (gphi *phi = dyn_cast <gphi *> (stmt))
5445 tree lhs = gimple_phi_result (phi);
5446 if (!has_zero_uses (lhs))
5448 if (dump_file && (dump_flags & TDF_DETAILS))
5449 fprintf (dump_file, "Keeping eliminated stmt live "
5450 "as copy because of out-of-region uses\n");
5451 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5452 gimple *copy = gimple_build_assign (lhs, sprime);
5453 gimple_stmt_iterator gsi
5454 = gsi_after_labels (gimple_bb (stmt));
5455 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5456 do_release_defs = false;
5459 else if (tree lhs = gimple_get_lhs (stmt))
5460 if (TREE_CODE (lhs) == SSA_NAME
5461 && !has_zero_uses (lhs))
5463 if (dump_file && (dump_flags & TDF_DETAILS))
5464 fprintf (dump_file, "Keeping eliminated stmt live "
5465 "as copy because of out-of-region uses\n");
5466 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5467 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5468 if (is_gimple_assign (stmt))
5470 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5471 update_stmt (gsi_stmt (gsi));
5472 continue;
5474 else
5476 gimple *copy = gimple_build_assign (lhs, sprime);
5477 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5478 do_release_defs = false;
5483 if (dump_file && (dump_flags & TDF_DETAILS))
5485 fprintf (dump_file, "Removing dead stmt ");
5486 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5489 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5490 if (gimple_code (stmt) == GIMPLE_PHI)
5491 remove_phi_node (&gsi, do_release_defs);
5492 else
5494 basic_block bb = gimple_bb (stmt);
5495 unlink_stmt_vdef (stmt);
5496 if (gsi_remove (&gsi, true))
5497 bitmap_set_bit (need_eh_cleanup, bb->index);
5498 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5499 bitmap_set_bit (need_ab_cleanup, bb->index);
5500 if (do_release_defs)
5501 release_defs (stmt);
5504 /* Removing a stmt may expose a forwarder block. */
5505 el_todo |= TODO_cleanup_cfg;
5508 /* Fixup stmts that became noreturn calls. This may require splitting
5509 blocks and thus isn't possible during the dominator walk. Do this
5510 in reverse order so we don't inadvertedly remove a stmt we want to
5511 fixup by visiting a dominating now noreturn call first. */
5512 while (!to_fixup.is_empty ())
5514 gimple *stmt = to_fixup.pop ();
5516 if (dump_file && (dump_flags & TDF_DETAILS))
5518 fprintf (dump_file, "Fixing up noreturn call ");
5519 print_gimple_stmt (dump_file, stmt, 0);
5522 if (fixup_noreturn_call (stmt))
5523 el_todo |= TODO_cleanup_cfg;
5526 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5527 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5529 if (do_eh_cleanup)
5530 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5532 if (do_ab_cleanup)
5533 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5535 if (do_eh_cleanup || do_ab_cleanup)
5536 el_todo |= TODO_cleanup_cfg;
5538 return el_todo;
5541 /* Eliminate fully redundant computations. */
5543 unsigned
5544 eliminate_with_rpo_vn (bitmap inserted_exprs)
5546 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5548 walker.walk (cfun->cfg->x_entry_block_ptr);
5549 return walker.eliminate_cleanup ();
5552 static unsigned
5553 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5554 bool iterate, bool eliminate);
5556 void
5557 run_rpo_vn (vn_lookup_kind kind)
5559 default_vn_walk_kind = kind;
5560 do_rpo_vn (cfun, NULL, NULL, true, false);
5562 /* ??? Prune requirement of these. */
5563 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5564 constant_value_ids = BITMAP_ALLOC (NULL);
5566 /* Initialize the value ids and prune out remaining VN_TOPs
5567 from dead code. */
5568 tree name;
5569 unsigned i;
5570 FOR_EACH_SSA_NAME (i, name, cfun)
5572 vn_ssa_aux_t info = VN_INFO (name);
5573 if (!info->visited
5574 || info->valnum == VN_TOP)
5575 info->valnum = name;
5576 if (info->valnum == name)
5577 info->value_id = get_next_value_id ();
5578 else if (is_gimple_min_invariant (info->valnum))
5579 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5582 /* Propagate. */
5583 FOR_EACH_SSA_NAME (i, name, cfun)
5585 vn_ssa_aux_t info = VN_INFO (name);
5586 if (TREE_CODE (info->valnum) == SSA_NAME
5587 && info->valnum != name
5588 && info->value_id != VN_INFO (info->valnum)->value_id)
5589 info->value_id = VN_INFO (info->valnum)->value_id;
5592 set_hashtable_value_ids ();
5594 if (dump_file && (dump_flags & TDF_DETAILS))
5596 fprintf (dump_file, "Value numbers:\n");
5597 FOR_EACH_SSA_NAME (i, name, cfun)
5599 if (VN_INFO (name)->visited
5600 && SSA_VAL (name) != name)
5602 print_generic_expr (dump_file, name);
5603 fprintf (dump_file, " = ");
5604 print_generic_expr (dump_file, SSA_VAL (name));
5605 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5611 /* Free VN associated data structures. */
5613 void
5614 free_rpo_vn (void)
5616 free_vn_table (valid_info);
5617 XDELETE (valid_info);
5618 obstack_free (&vn_tables_obstack, NULL);
5619 obstack_free (&vn_tables_insert_obstack, NULL);
5621 vn_ssa_aux_iterator_type it;
5622 vn_ssa_aux_t info;
5623 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5624 if (info->needs_insertion)
5625 release_ssa_name (info->name);
5626 obstack_free (&vn_ssa_aux_obstack, NULL);
5627 delete vn_ssa_aux_hash;
5629 delete constant_to_value_id;
5630 constant_to_value_id = NULL;
5631 BITMAP_FREE (constant_value_ids);
5634 /* Adaptor to the elimination engine using RPO availability. */
5636 class rpo_elim : public eliminate_dom_walker
5638 public:
5639 rpo_elim(basic_block entry_)
5640 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
5641 ~rpo_elim();
5643 virtual tree eliminate_avail (basic_block, tree op);
5645 virtual void eliminate_push_avail (basic_block, tree);
5647 basic_block entry;
5648 /* Instead of having a local availability lattice for each
5649 basic-block and availability at X defined as union of
5650 the local availabilities at X and its dominators we're
5651 turning this upside down and track availability per
5652 value given values are usually made available at very
5653 few points (at least one).
5654 So we have a value -> vec<location, leader> map where
5655 LOCATION is specifying the basic-block LEADER is made
5656 available for VALUE. We push to this vector in RPO
5657 order thus for iteration we can simply pop the last
5658 entries.
5659 LOCATION is the basic-block index and LEADER is its
5660 SSA name version. */
5661 /* ??? We'd like to use auto_vec here with embedded storage
5662 but that doesn't play well until we can provide move
5663 constructors and use std::move on hash-table expansion.
5664 So for now this is a bit more expensive than necessary.
5665 We eventually want to switch to a chaining scheme like
5666 for hashtable entries for unwinding which would make
5667 making the vector part of the vn_ssa_aux structure possible. */
5668 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
5669 rpo_avail_t m_rpo_avail;
5672 /* Global RPO state for access from hooks. */
5673 static rpo_elim *rpo_avail;
5675 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5677 static tree
5678 vn_lookup_simplify_result (gimple_match_op *res_op)
5680 if (!res_op->code.is_tree_code ())
5681 return NULL_TREE;
5682 tree *ops = res_op->ops;
5683 unsigned int length = res_op->num_ops;
5684 if (res_op->code == CONSTRUCTOR
5685 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5686 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5687 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5689 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5690 ops = XALLOCAVEC (tree, length);
5691 for (unsigned i = 0; i < length; ++i)
5692 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5694 vn_nary_op_t vnresult = NULL;
5695 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5696 res_op->type, ops, &vnresult);
5697 /* If this is used from expression simplification make sure to
5698 return an available expression. */
5699 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
5700 res = rpo_avail->eliminate_avail (vn_context_bb, res);
5701 return res;
5704 rpo_elim::~rpo_elim ()
5706 /* Release the avail vectors. */
5707 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5708 i != m_rpo_avail.end (); ++i)
5709 (*i).second.release ();
5712 /* Return a leader for OPs value that is valid at BB. */
5714 tree
5715 rpo_elim::eliminate_avail (basic_block bb, tree op)
5717 bool visited;
5718 tree valnum = SSA_VAL (op, &visited);
5719 /* If we didn't visit OP then it must be defined outside of the
5720 region we process and also dominate it. So it is available. */
5721 if (!visited)
5722 return op;
5723 if (TREE_CODE (valnum) == SSA_NAME)
5725 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5726 return valnum;
5727 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5728 if (!av || av->is_empty ())
5729 return NULL_TREE;
5730 int i = av->length () - 1;
5731 if ((*av)[i].first == bb->index)
5732 /* On tramp3d 90% of the cases are here. */
5733 return ssa_name ((*av)[i].second);
5736 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5737 /* ??? During elimination we have to use availability at the
5738 definition site of a use we try to replace. This
5739 is required to not run into inconsistencies because
5740 of dominated_by_p_w_unex behavior and removing a definition
5741 while not replacing all uses.
5742 ??? We could try to consistently walk dominators
5743 ignoring non-executable regions. The nearest common
5744 dominator of bb and abb is where we can stop walking. We
5745 may also be able to "pre-compute" (bits of) the next immediate
5746 (non-)dominator during the RPO walk when marking edges as
5747 executable. */
5748 if (dominated_by_p_w_unex (bb, abb))
5750 tree leader = ssa_name ((*av)[i].second);
5751 /* Prevent eliminations that break loop-closed SSA. */
5752 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5753 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5754 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5755 (leader))->loop_father,
5756 bb))
5757 return NULL_TREE;
5758 if (dump_file && (dump_flags & TDF_DETAILS))
5760 print_generic_expr (dump_file, leader);
5761 fprintf (dump_file, " is available for ");
5762 print_generic_expr (dump_file, valnum);
5763 fprintf (dump_file, "\n");
5765 /* On tramp3d 99% of the _remaining_ cases succeed at
5766 the first enty. */
5767 return leader;
5769 /* ??? Can we somehow skip to the immediate dominator
5770 RPO index (bb_to_rpo)? Again, maybe not worth, on
5771 tramp3d the worst number of elements in the vector is 9. */
5773 while (--i >= 0);
5775 else if (valnum != VN_TOP)
5776 /* valnum is is_gimple_min_invariant. */
5777 return valnum;
5778 return NULL_TREE;
5781 /* Make LEADER a leader for its value at BB. */
5783 void
5784 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5786 tree valnum = VN_INFO (leader)->valnum;
5787 if (valnum == VN_TOP)
5788 return;
5789 if (dump_file && (dump_flags & TDF_DETAILS))
5791 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5792 print_generic_expr (dump_file, leader);
5793 fprintf (dump_file, " for value ");
5794 print_generic_expr (dump_file, valnum);
5795 fprintf (dump_file, "\n");
5797 bool existed;
5798 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5799 if (!existed)
5801 new (&av) vec<std::pair<int, int> >;
5802 av = vNULL;
5803 av.reserve_exact (2);
5805 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5808 /* Valueization hook for RPO VN plus required state. */
5810 tree
5811 rpo_vn_valueize (tree name)
5813 if (TREE_CODE (name) == SSA_NAME)
5815 vn_ssa_aux_t val = VN_INFO (name);
5816 if (val)
5818 tree tem = val->valnum;
5819 if (tem != VN_TOP && tem != name)
5821 if (TREE_CODE (tem) != SSA_NAME)
5822 return tem;
5823 /* For all values we only valueize to an available leader
5824 which means we can use SSA name info without restriction. */
5825 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5826 if (tem)
5827 return tem;
5831 return name;
5834 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5835 inverted condition. */
5837 static void
5838 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5840 switch (code)
5842 case LT_EXPR:
5843 /* a < b -> a {!,<}= b */
5844 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5845 ops, boolean_true_node, 0, pred_e);
5846 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5847 ops, boolean_true_node, 0, pred_e);
5848 /* a < b -> ! a {>,=} b */
5849 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5850 ops, boolean_false_node, 0, pred_e);
5851 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5852 ops, boolean_false_node, 0, pred_e);
5853 break;
5854 case GT_EXPR:
5855 /* a > b -> a {!,>}= b */
5856 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5857 ops, boolean_true_node, 0, pred_e);
5858 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5859 ops, boolean_true_node, 0, pred_e);
5860 /* a > b -> ! a {<,=} b */
5861 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5862 ops, boolean_false_node, 0, pred_e);
5863 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5864 ops, boolean_false_node, 0, pred_e);
5865 break;
5866 case EQ_EXPR:
5867 /* a == b -> ! a {<,>} b */
5868 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5869 ops, boolean_false_node, 0, pred_e);
5870 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5871 ops, boolean_false_node, 0, pred_e);
5872 break;
5873 case LE_EXPR:
5874 case GE_EXPR:
5875 case NE_EXPR:
5876 /* Nothing besides inverted condition. */
5877 break;
5878 default:;
5882 /* Main stmt worker for RPO VN, process BB. */
5884 static unsigned
5885 process_bb (rpo_elim &avail, basic_block bb,
5886 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
5887 bool do_region, bitmap exit_bbs)
5889 unsigned todo = 0;
5890 edge_iterator ei;
5891 edge e;
5893 vn_context_bb = bb;
5895 /* If we are in loop-closed SSA preserve this state. This is
5896 relevant when called on regions from outside of FRE/PRE. */
5897 bool lc_phi_nodes = false;
5898 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
5899 FOR_EACH_EDGE (e, ei, bb->preds)
5900 if (e->src->loop_father != e->dest->loop_father
5901 && flow_loop_nested_p (e->dest->loop_father,
5902 e->src->loop_father))
5904 lc_phi_nodes = true;
5905 break;
5908 /* Value-number all defs in the basic-block. */
5909 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5910 gsi_next (&gsi))
5912 gphi *phi = gsi.phi ();
5913 tree res = PHI_RESULT (phi);
5914 vn_ssa_aux_t res_info = VN_INFO (res);
5915 if (!bb_visited)
5917 gcc_assert (!res_info->visited);
5918 res_info->valnum = VN_TOP;
5919 res_info->visited = true;
5922 /* When not iterating force backedge values to varying. */
5923 visit_stmt (phi, !iterate_phis);
5924 if (virtual_operand_p (res))
5925 continue;
5927 /* Eliminate */
5928 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5929 how we handle backedges and availability.
5930 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5931 tree val = res_info->valnum;
5932 if (res != val && !iterate && eliminate)
5934 if (tree leader = avail.eliminate_avail (bb, res))
5936 if (leader != res
5937 /* Preserve loop-closed SSA form. */
5938 && (! lc_phi_nodes
5939 || is_gimple_min_invariant (leader)))
5941 if (dump_file && (dump_flags & TDF_DETAILS))
5943 fprintf (dump_file, "Replaced redundant PHI node "
5944 "defining ");
5945 print_generic_expr (dump_file, res);
5946 fprintf (dump_file, " with ");
5947 print_generic_expr (dump_file, leader);
5948 fprintf (dump_file, "\n");
5950 avail.eliminations++;
5952 if (may_propagate_copy (res, leader))
5954 /* Schedule for removal. */
5955 avail.to_remove.safe_push (phi);
5956 continue;
5958 /* ??? Else generate a copy stmt. */
5962 /* Only make defs available that not already are. But make
5963 sure loop-closed SSA PHI node defs are picked up for
5964 downstream uses. */
5965 if (lc_phi_nodes
5966 || res == val
5967 || ! avail.eliminate_avail (bb, res))
5968 avail.eliminate_push_avail (bb, res);
5971 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5972 we do this when processing the last stmt as we have to do this
5973 before elimination which otherwise forces GIMPLE_CONDs to
5974 if (1 != 0) style when seeing non-executable edges. */
5975 if (gsi_end_p (gsi_start_bb (bb)))
5977 FOR_EACH_EDGE (e, ei, bb->succs)
5979 if (e->flags & EDGE_EXECUTABLE)
5980 continue;
5981 if (dump_file && (dump_flags & TDF_DETAILS))
5982 fprintf (dump_file,
5983 "marking outgoing edge %d -> %d executable\n",
5984 e->src->index, e->dest->index);
5985 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
5986 e->flags |= EDGE_EXECUTABLE;
5987 e->dest->flags |= BB_EXECUTABLE;
5990 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5991 !gsi_end_p (gsi); gsi_next (&gsi))
5993 ssa_op_iter i;
5994 tree op;
5995 if (!bb_visited)
5997 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
5999 vn_ssa_aux_t op_info = VN_INFO (op);
6000 gcc_assert (!op_info->visited);
6001 op_info->valnum = VN_TOP;
6002 op_info->visited = true;
6005 /* We somehow have to deal with uses that are not defined
6006 in the processed region. Forcing unvisited uses to
6007 varying here doesn't play well with def-use following during
6008 expression simplification, so we deal with this by checking
6009 the visited flag in SSA_VAL. */
6012 visit_stmt (gsi_stmt (gsi));
6014 gimple *last = gsi_stmt (gsi);
6015 e = NULL;
6016 switch (gimple_code (last))
6018 case GIMPLE_SWITCH:
6019 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
6020 (as_a <gswitch *> (last))));
6021 break;
6022 case GIMPLE_COND:
6024 tree lhs = vn_valueize (gimple_cond_lhs (last));
6025 tree rhs = vn_valueize (gimple_cond_rhs (last));
6026 tree val = gimple_simplify (gimple_cond_code (last),
6027 boolean_type_node, lhs, rhs,
6028 NULL, vn_valueize);
6029 /* If the condition didn't simplfy see if we have recorded
6030 an expression from sofar taken edges. */
6031 if (! val || TREE_CODE (val) != INTEGER_CST)
6033 vn_nary_op_t vnresult;
6034 tree ops[2];
6035 ops[0] = lhs;
6036 ops[1] = rhs;
6037 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6038 boolean_type_node, ops,
6039 &vnresult);
6040 /* Did we get a predicated value? */
6041 if (! val && vnresult && vnresult->predicated_values)
6043 val = vn_nary_op_get_predicated_value (vnresult, bb);
6044 if (val && dump_file && (dump_flags & TDF_DETAILS))
6046 fprintf (dump_file, "Got predicated value ");
6047 print_generic_expr (dump_file, val, TDF_NONE);
6048 fprintf (dump_file, " for ");
6049 print_gimple_stmt (dump_file, last, TDF_SLIM);
6053 if (val)
6054 e = find_taken_edge (bb, val);
6055 if (! e)
6057 /* If we didn't manage to compute the taken edge then
6058 push predicated expressions for the condition itself
6059 and related conditions to the hashtables. This allows
6060 simplification of redundant conditions which is
6061 important as early cleanup. */
6062 edge true_e, false_e;
6063 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6064 enum tree_code code = gimple_cond_code (last);
6065 enum tree_code icode
6066 = invert_tree_comparison (code, HONOR_NANS (lhs));
6067 tree ops[2];
6068 ops[0] = lhs;
6069 ops[1] = rhs;
6070 if (do_region
6071 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6072 true_e = NULL;
6073 if (do_region
6074 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6075 false_e = NULL;
6076 if (true_e)
6077 vn_nary_op_insert_pieces_predicated
6078 (2, code, boolean_type_node, ops,
6079 boolean_true_node, 0, true_e);
6080 if (false_e)
6081 vn_nary_op_insert_pieces_predicated
6082 (2, code, boolean_type_node, ops,
6083 boolean_false_node, 0, false_e);
6084 if (icode != ERROR_MARK)
6086 if (true_e)
6087 vn_nary_op_insert_pieces_predicated
6088 (2, icode, boolean_type_node, ops,
6089 boolean_false_node, 0, true_e);
6090 if (false_e)
6091 vn_nary_op_insert_pieces_predicated
6092 (2, icode, boolean_type_node, ops,
6093 boolean_true_node, 0, false_e);
6095 /* Relax for non-integers, inverted condition handled
6096 above. */
6097 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6099 if (true_e)
6100 insert_related_predicates_on_edge (code, ops, true_e);
6101 if (false_e)
6102 insert_related_predicates_on_edge (icode, ops, false_e);
6105 break;
6107 case GIMPLE_GOTO:
6108 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6109 break;
6110 default:
6111 e = NULL;
6113 if (e)
6115 todo = TODO_cleanup_cfg;
6116 if (!(e->flags & EDGE_EXECUTABLE))
6118 if (dump_file && (dump_flags & TDF_DETAILS))
6119 fprintf (dump_file,
6120 "marking known outgoing %sedge %d -> %d executable\n",
6121 e->flags & EDGE_DFS_BACK ? "back-" : "",
6122 e->src->index, e->dest->index);
6123 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6124 e->flags |= EDGE_EXECUTABLE;
6125 e->dest->flags |= BB_EXECUTABLE;
6128 else if (gsi_one_before_end_p (gsi))
6130 FOR_EACH_EDGE (e, ei, bb->succs)
6132 if (e->flags & EDGE_EXECUTABLE)
6133 continue;
6134 if (dump_file && (dump_flags & TDF_DETAILS))
6135 fprintf (dump_file,
6136 "marking outgoing edge %d -> %d executable\n",
6137 e->src->index, e->dest->index);
6138 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6139 e->flags |= EDGE_EXECUTABLE;
6140 e->dest->flags |= BB_EXECUTABLE;
6144 /* Eliminate. That also pushes to avail. */
6145 if (eliminate && ! iterate)
6146 avail.eliminate_stmt (bb, &gsi);
6147 else
6148 /* If not eliminating, make all not already available defs
6149 available. */
6150 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6151 if (! avail.eliminate_avail (bb, op))
6152 avail.eliminate_push_avail (bb, op);
6155 /* Eliminate in destination PHI arguments. Always substitute in dest
6156 PHIs, even for non-executable edges. This handles region
6157 exits PHIs. */
6158 if (!iterate && eliminate)
6159 FOR_EACH_EDGE (e, ei, bb->succs)
6160 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6161 !gsi_end_p (gsi); gsi_next (&gsi))
6163 gphi *phi = gsi.phi ();
6164 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6165 tree arg = USE_FROM_PTR (use_p);
6166 if (TREE_CODE (arg) != SSA_NAME
6167 || virtual_operand_p (arg))
6168 continue;
6169 tree sprime;
6170 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6172 sprime = SSA_VAL (arg);
6173 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6174 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6176 else
6177 /* Look for sth available at the definition block of the argument.
6178 This avoids inconsistencies between availability there which
6179 decides if the stmt can be removed and availability at the
6180 use site. The SSA property ensures that things available
6181 at the definition are also available at uses. */
6182 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6183 arg);
6184 if (sprime
6185 && sprime != arg
6186 && may_propagate_copy (arg, sprime))
6187 propagate_value (use_p, sprime);
6190 vn_context_bb = NULL;
6191 return todo;
6194 /* Unwind state per basic-block. */
6196 struct unwind_state
6198 /* Times this block has been visited. */
6199 unsigned visited;
6200 /* Whether to handle this as iteration point or whether to treat
6201 incoming backedge PHI values as varying. */
6202 bool iterate;
6203 void *ob_top;
6204 vn_reference_t ref_top;
6205 vn_phi_t phi_top;
6206 vn_nary_op_t nary_top;
6209 /* Unwind the RPO VN state for iteration. */
6211 static void
6212 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6214 gcc_assert (to->iterate);
6215 for (; last_inserted_nary != to->nary_top;
6216 last_inserted_nary = last_inserted_nary->next)
6218 vn_nary_op_t *slot;
6219 slot = valid_info->nary->find_slot_with_hash
6220 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6221 /* Predication causes the need to restore previous state. */
6222 if ((*slot)->unwind_to)
6223 *slot = (*slot)->unwind_to;
6224 else
6225 valid_info->nary->clear_slot (slot);
6227 for (; last_inserted_phi != to->phi_top;
6228 last_inserted_phi = last_inserted_phi->next)
6230 vn_phi_t *slot;
6231 slot = valid_info->phis->find_slot_with_hash
6232 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6233 valid_info->phis->clear_slot (slot);
6235 for (; last_inserted_ref != to->ref_top;
6236 last_inserted_ref = last_inserted_ref->next)
6238 vn_reference_t *slot;
6239 slot = valid_info->references->find_slot_with_hash
6240 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6241 (*slot)->operands.release ();
6242 valid_info->references->clear_slot (slot);
6244 obstack_free (&vn_tables_obstack, to->ob_top);
6246 /* Prune [rpo_idx, ] from avail. */
6247 /* ??? This is O(number-of-values-in-region) which is
6248 O(region-size) rather than O(iteration-piece). */
6249 for (rpo_elim::rpo_avail_t::iterator i
6250 = avail.m_rpo_avail.begin ();
6251 i != avail.m_rpo_avail.end (); ++i)
6253 while (! (*i).second.is_empty ())
6255 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6256 break;
6257 (*i).second.pop ();
6262 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6263 If ITERATE is true then treat backedges optimistically as not
6264 executed and iterate. If ELIMINATE is true then perform
6265 elimination, otherwise leave that to the caller. */
6267 static unsigned
6268 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6269 bool iterate, bool eliminate)
6271 unsigned todo = 0;
6273 /* We currently do not support region-based iteration when
6274 elimination is requested. */
6275 gcc_assert (!entry || !iterate || !eliminate);
6276 /* When iterating we need loop info up-to-date. */
6277 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6279 bool do_region = entry != NULL;
6280 if (!do_region)
6282 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6283 exit_bbs = BITMAP_ALLOC (NULL);
6284 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6287 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6288 int n = rev_post_order_and_mark_dfs_back_seme (fn, entry, exit_bbs,
6289 iterate, rpo);
6290 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6291 for (int i = 0; i < n / 2; ++i)
6292 std::swap (rpo[i], rpo[n-i-1]);
6294 if (!do_region)
6295 BITMAP_FREE (exit_bbs);
6297 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6298 for (int i = 0; i < n; ++i)
6299 bb_to_rpo[rpo[i]] = i;
6301 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6303 rpo_elim avail (entry->dest);
6304 rpo_avail = &avail;
6306 /* Verify we have no extra entries into the region. */
6307 if (flag_checking && do_region)
6309 auto_bb_flag bb_in_region (fn);
6310 for (int i = 0; i < n; ++i)
6312 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6313 bb->flags |= bb_in_region;
6315 /* We can't merge the first two loops because we cannot rely
6316 on EDGE_DFS_BACK for edges not within the region. But if
6317 we decide to always have the bb_in_region flag we can
6318 do the checking during the RPO walk itself (but then it's
6319 also easy to handle MEME conservatively). */
6320 for (int i = 0; i < n; ++i)
6322 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6323 edge e;
6324 edge_iterator ei;
6325 FOR_EACH_EDGE (e, ei, bb->preds)
6326 gcc_assert (e == entry || (e->src->flags & bb_in_region));
6328 for (int i = 0; i < n; ++i)
6330 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6331 bb->flags &= ~bb_in_region;
6335 /* Create the VN state. For the initial size of the various hashtables
6336 use a heuristic based on region size and number of SSA names. */
6337 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6338 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6339 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6341 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6342 gcc_obstack_init (&vn_ssa_aux_obstack);
6344 gcc_obstack_init (&vn_tables_obstack);
6345 gcc_obstack_init (&vn_tables_insert_obstack);
6346 valid_info = XCNEW (struct vn_tables_s);
6347 allocate_vn_table (valid_info, region_size);
6348 last_inserted_ref = NULL;
6349 last_inserted_phi = NULL;
6350 last_inserted_nary = NULL;
6352 vn_valueize = rpo_vn_valueize;
6354 /* Initialize the unwind state and edge/BB executable state. */
6355 for (int i = 0; i < n; ++i)
6357 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6358 rpo_state[i].visited = 0;
6359 bb->flags &= ~BB_EXECUTABLE;
6360 bool has_backedges = false;
6361 edge e;
6362 edge_iterator ei;
6363 FOR_EACH_EDGE (e, ei, bb->preds)
6365 if (e->flags & EDGE_DFS_BACK)
6366 has_backedges = true;
6367 if (! iterate && (e->flags & EDGE_DFS_BACK))
6369 e->flags |= EDGE_EXECUTABLE;
6370 /* ??? Strictly speaking we only need to unconditionally
6371 process a block when it is in an irreducible region,
6372 thus when it may be reachable via the backedge only. */
6373 bb->flags |= BB_EXECUTABLE;
6375 else
6376 e->flags &= ~EDGE_EXECUTABLE;
6378 rpo_state[i].iterate = iterate && has_backedges;
6380 entry->flags |= EDGE_EXECUTABLE;
6381 entry->dest->flags |= BB_EXECUTABLE;
6383 /* As heuristic to improve compile-time we handle only the N innermost
6384 loops and the outermost one optimistically. */
6385 if (iterate)
6387 loop_p loop;
6388 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6389 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6390 if (loop_depth (loop) > max_depth)
6391 for (unsigned i = 2;
6392 i < loop_depth (loop) - max_depth; ++i)
6394 basic_block header = superloop_at_depth (loop, i)->header;
6395 bool non_latch_backedge = false;
6396 edge e;
6397 edge_iterator ei;
6398 FOR_EACH_EDGE (e, ei, header->preds)
6399 if (e->flags & EDGE_DFS_BACK)
6401 e->flags |= EDGE_EXECUTABLE;
6402 e->dest->flags |= BB_EXECUTABLE;
6403 /* There can be a non-latch backedge into the header
6404 which is part of an outer irreducible region. We
6405 cannot avoid iterating this block then. */
6406 if (!dominated_by_p (CDI_DOMINATORS,
6407 e->src, e->dest))
6409 if (dump_file && (dump_flags & TDF_DETAILS))
6410 fprintf (dump_file, "non-latch backedge %d -> %d "
6411 "forces iteration of loop %d\n",
6412 e->src->index, e->dest->index, loop->num);
6413 non_latch_backedge = true;
6416 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6420 /* Go and process all blocks, iterating as necessary. */
6421 int idx = 0;
6422 uint64_t nblk = 0;
6425 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6427 /* If the block has incoming backedges remember unwind state. This
6428 is required even for non-executable blocks since in irreducible
6429 regions we might reach them via the backedge and re-start iterating
6430 from there.
6431 Note we can individually mark blocks with incoming backedges to
6432 not iterate where we then handle PHIs conservatively. We do that
6433 heuristically to reduce compile-time for degenerate cases. */
6434 if (rpo_state[idx].iterate)
6436 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6437 rpo_state[idx].ref_top = last_inserted_ref;
6438 rpo_state[idx].phi_top = last_inserted_phi;
6439 rpo_state[idx].nary_top = last_inserted_nary;
6442 if (!(bb->flags & BB_EXECUTABLE))
6444 if (dump_file && (dump_flags & TDF_DETAILS))
6445 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6446 idx, bb->index);
6447 idx++;
6448 continue;
6451 if (dump_file && (dump_flags & TDF_DETAILS))
6452 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6453 nblk++;
6454 todo |= process_bb (avail, bb,
6455 rpo_state[idx].visited != 0,
6456 rpo_state[idx].iterate,
6457 iterate, eliminate, do_region, exit_bbs);
6458 rpo_state[idx].visited++;
6460 if (iterate)
6462 /* Verify if changed values flow over executable outgoing backedges
6463 and those change destination PHI values (that's the thing we
6464 can easily verify). Reduce over all such edges to the farthest
6465 away PHI. */
6466 int iterate_to = -1;
6467 edge_iterator ei;
6468 edge e;
6469 FOR_EACH_EDGE (e, ei, bb->succs)
6470 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6471 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6472 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6474 if (dump_file && (dump_flags & TDF_DETAILS))
6475 fprintf (dump_file, "Looking for changed values of backedge "
6476 "%d->%d destination PHIs\n",
6477 e->src->index, e->dest->index);
6478 vn_context_bb = e->dest;
6479 gphi_iterator gsi;
6480 for (gsi = gsi_start_phis (e->dest);
6481 !gsi_end_p (gsi); gsi_next (&gsi))
6483 bool inserted = false;
6484 /* While we'd ideally just iterate on value changes
6485 we CSE PHIs and do that even across basic-block
6486 boundaries. So even hashtable state changes can
6487 be important (which is roughly equivalent to
6488 PHI argument value changes). To not excessively
6489 iterate because of that we track whether a PHI
6490 was CSEd to with GF_PLF_1. */
6491 bool phival_changed;
6492 if ((phival_changed = visit_phi (gsi.phi (),
6493 &inserted, false))
6494 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6496 if (!phival_changed
6497 && dump_file && (dump_flags & TDF_DETAILS))
6498 fprintf (dump_file, "PHI was CSEd and hashtable "
6499 "state (changed)\n");
6500 int destidx = bb_to_rpo[e->dest->index];
6501 if (iterate_to == -1
6502 || destidx < iterate_to)
6503 iterate_to = destidx;
6504 break;
6507 vn_context_bb = NULL;
6509 if (iterate_to != -1)
6511 do_unwind (&rpo_state[iterate_to], iterate_to,
6512 avail, bb_to_rpo);
6513 idx = iterate_to;
6514 if (dump_file && (dump_flags & TDF_DETAILS))
6515 fprintf (dump_file, "Iterating to %d BB%d\n",
6516 iterate_to, rpo[iterate_to]);
6517 continue;
6521 idx++;
6523 while (idx < n);
6525 /* If statistics or dump file active. */
6526 int nex = 0;
6527 unsigned max_visited = 1;
6528 for (int i = 0; i < n; ++i)
6530 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6531 if (bb->flags & BB_EXECUTABLE)
6532 nex++;
6533 statistics_histogram_event (cfun, "RPO block visited times",
6534 rpo_state[i].visited);
6535 if (rpo_state[i].visited > max_visited)
6536 max_visited = rpo_state[i].visited;
6538 unsigned nvalues = 0, navail = 0;
6539 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6540 i != avail.m_rpo_avail.end (); ++i)
6542 nvalues++;
6543 navail += (*i).second.length ();
6545 statistics_counter_event (cfun, "RPO blocks", n);
6546 statistics_counter_event (cfun, "RPO blocks visited", nblk);
6547 statistics_counter_event (cfun, "RPO blocks executable", nex);
6548 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6549 statistics_histogram_event (cfun, "RPO num values", nvalues);
6550 statistics_histogram_event (cfun, "RPO num avail", navail);
6551 statistics_histogram_event (cfun, "RPO num lattice",
6552 vn_ssa_aux_hash->elements ());
6553 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6555 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6556 " blocks in total discovering %d executable blocks iterating "
6557 "%d.%d times, a block was visited max. %u times\n",
6558 n, nblk, nex,
6559 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6560 max_visited);
6561 fprintf (dump_file, "RPO tracked %d values available at %d locations "
6562 "and %" PRIu64 " lattice elements\n",
6563 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6566 if (eliminate)
6568 /* When !iterate we already performed elimination during the RPO
6569 walk. */
6570 if (iterate)
6572 /* Elimination for region-based VN needs to be done within the
6573 RPO walk. */
6574 gcc_assert (! do_region);
6575 /* Note we can't use avail.walk here because that gets confused
6576 by the existing availability and it will be less efficient
6577 as well. */
6578 todo |= eliminate_with_rpo_vn (NULL);
6580 else
6581 todo |= avail.eliminate_cleanup (do_region);
6584 vn_valueize = NULL;
6585 rpo_avail = NULL;
6587 XDELETEVEC (bb_to_rpo);
6588 XDELETEVEC (rpo);
6590 return todo;
6593 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6594 on the SEME region specified by ENTRY and EXIT_BBS. */
6596 unsigned
6597 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6599 default_vn_walk_kind = VN_WALKREWRITE;
6600 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6601 free_rpo_vn ();
6602 return todo;
6606 namespace {
6608 const pass_data pass_data_fre =
6610 GIMPLE_PASS, /* type */
6611 "fre", /* name */
6612 OPTGROUP_NONE, /* optinfo_flags */
6613 TV_TREE_FRE, /* tv_id */
6614 ( PROP_cfg | PROP_ssa ), /* properties_required */
6615 0, /* properties_provided */
6616 0, /* properties_destroyed */
6617 0, /* todo_flags_start */
6618 0, /* todo_flags_finish */
6621 class pass_fre : public gimple_opt_pass
6623 public:
6624 pass_fre (gcc::context *ctxt)
6625 : gimple_opt_pass (pass_data_fre, ctxt)
6628 /* opt_pass methods: */
6629 opt_pass * clone () { return new pass_fre (m_ctxt); }
6630 virtual bool gate (function *) { return flag_tree_fre != 0; }
6631 virtual unsigned int execute (function *);
6633 }; // class pass_fre
6635 unsigned int
6636 pass_fre::execute (function *fun)
6638 unsigned todo = 0;
6640 /* At -O[1g] use the cheap non-iterating mode. */
6641 calculate_dominance_info (CDI_DOMINATORS);
6642 if (optimize > 1)
6643 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6645 default_vn_walk_kind = VN_WALKREWRITE;
6646 todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6647 free_rpo_vn ();
6649 if (optimize > 1)
6650 loop_optimizer_finalize ();
6652 return todo;
6655 } // anon namespace
6657 gimple_opt_pass *
6658 make_pass_fre (gcc::context *ctxt)
6660 return new pass_fre (ctxt);
6663 #undef BB_EXECUTABLE