2018-09-03 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob34c193b58f5348b2095ef9a417deec2ab35d3873
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
91 operands).
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
95 some nice properties.
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
113 identities.
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
118 equivalent.
119 TODO:
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
127 structure copies.
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree *last_vuse_ptr;
134 static vn_lookup_kind vn_walk_kind;
135 static vn_lookup_kind default_vn_walk_kind;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
178 /* Return the computed hashcode for phi operation P1. */
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 return vp1->hashcode;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
204 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207 return (vro1->opcode == vro2->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1->type == vro2->type
210 || (vro1->type && vro2->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 TYPE_MAIN_VARIANT (vro2->type))))
213 && expressions_equal_p (vro1->op0, vro2->op0)
214 && expressions_equal_p (vro1->op1, vro2->op1)
215 && expressions_equal_p (vro1->op2, vro2->op2));
218 /* Free a reference operation structure VP. */
220 static inline void
221 free_reference (vn_reference_s *vr)
223 vr->operands.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 static inline hashval_t hash (const vn_reference_s *);
232 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return v == c || vn_reference_eq (v, c);
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type *nary;
258 vn_phi_table_type *phis;
259 vn_reference_table_type *references;
260 } *vn_tables_t;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
267 static inline hashval_t hash (const vn_constant_s *);
268 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
271 /* Hash table hash function for vn_constant_t. */
273 inline hashval_t
274 vn_constant_hasher::hash (const vn_constant_s *vc1)
276 return vc1->hashcode;
279 /* Hash table equality function for vn_constant_t. */
281 inline bool
282 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
284 if (vc1->hashcode != vc2->hashcode)
285 return false;
287 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
290 static hash_table<vn_constant_hasher> *constant_to_value_id;
291 static bitmap constant_value_ids;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack;
299 static vn_reference_t last_inserted_ref;
300 static vn_phi_t last_inserted_phi;
301 static vn_nary_op_t last_inserted_nary;
303 /* Valid hashtables storing information we have proven to be
304 correct. */
305 static vn_tables_t valid_info;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
309 just return it. */
310 tree (*vn_valueize) (tree);
313 /* This represents the top of the VN lattice, which is the universal
314 value. */
316 tree VN_TOP;
318 /* Unique counter for our value ids. */
320 static unsigned int next_value_id;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
329 typedef vn_ssa_aux_t value_type;
330 typedef tree compare_type;
331 static inline hashval_t hash (const value_type &);
332 static inline bool equal (const value_type &, const compare_type &);
333 static inline void mark_deleted (value_type &) {}
334 static inline void mark_empty (value_type &e) { e = NULL; }
335 static inline bool is_deleted (value_type &) { return false; }
336 static inline bool is_empty (value_type &e) { return e == NULL; }
339 hashval_t
340 vn_ssa_aux_hasher::hash (const value_type &entry)
342 return SSA_NAME_VERSION (entry->name);
345 bool
346 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
348 return name == entry->name;
351 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
352 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
353 static struct obstack vn_ssa_aux_obstack;
355 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
356 static unsigned int vn_nary_length_from_stmt (gimple *);
357 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
358 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
359 vn_nary_op_table_type *, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
362 enum tree_code, tree, tree *);
363 static tree vn_lookup_simplify_result (gimple_match_op *);
365 /* Return whether there is value numbering information for a given SSA name. */
367 bool
368 has_VN_INFO (tree name)
370 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
373 vn_ssa_aux_t
374 VN_INFO (tree name)
376 vn_ssa_aux_t *res
377 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
378 INSERT);
379 if (*res != NULL)
380 return *res;
382 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
383 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
384 newinfo->name = name;
385 newinfo->valnum = VN_TOP;
386 /* We are using the visited flag to handle uses with defs not within the
387 region being value-numbered. */
388 newinfo->visited = false;
390 /* Given we create the VN_INFOs on-demand now we have to do initialization
391 different than VN_TOP here. */
392 if (SSA_NAME_IS_DEFAULT_DEF (name))
393 switch (TREE_CODE (SSA_NAME_VAR (name)))
395 case VAR_DECL:
396 /* All undefined vars are VARYING. */
397 newinfo->valnum = name;
398 newinfo->visited = true;
399 break;
401 case PARM_DECL:
402 /* Parameters are VARYING but we can record a condition
403 if we know it is a non-NULL pointer. */
404 newinfo->visited = true;
405 newinfo->valnum = name;
406 if (POINTER_TYPE_P (TREE_TYPE (name))
407 && nonnull_arg_p (SSA_NAME_VAR (name)))
409 tree ops[2];
410 ops[0] = name;
411 ops[1] = build_int_cst (TREE_TYPE (name), 0);
412 vn_nary_op_t nary;
413 /* Allocate from non-unwinding stack. */
414 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
415 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
416 boolean_type_node, ops);
417 nary->predicated_values = 0;
418 nary->u.result = boolean_true_node;
419 vn_nary_op_insert_into (nary, valid_info->nary, true);
420 gcc_assert (nary->unwind_to == NULL);
421 /* Also do not link it into the undo chain. */
422 last_inserted_nary = nary->next;
423 nary->next = (vn_nary_op_t)(void *)-1;
424 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
425 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
426 boolean_type_node, ops);
427 nary->predicated_values = 0;
428 nary->u.result = boolean_false_node;
429 vn_nary_op_insert_into (nary, valid_info->nary, true);
430 gcc_assert (nary->unwind_to == NULL);
431 last_inserted_nary = nary->next;
432 nary->next = (vn_nary_op_t)(void *)-1;
433 if (dump_file && (dump_flags & TDF_DETAILS))
435 fprintf (dump_file, "Recording ");
436 print_generic_expr (dump_file, name, TDF_SLIM);
437 fprintf (dump_file, " != 0\n");
440 break;
442 case RESULT_DECL:
443 /* If the result is passed by invisible reference the default
444 def is initialized, otherwise it's uninitialized. Still
445 undefined is varying. */
446 newinfo->visited = true;
447 newinfo->valnum = name;
448 break;
450 default:
451 gcc_unreachable ();
453 return newinfo;
456 /* Return the SSA value of X. */
458 inline tree
459 SSA_VAL (tree x, bool *visited = NULL)
461 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
462 if (visited)
463 *visited = tem && tem->visited;
464 return tem && tem->visited ? tem->valnum : x;
467 /* Return whether X was visited. */
469 inline bool
470 SSA_VISITED (tree x)
472 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
473 return tem && tem->visited;
476 /* Return the SSA value of the VUSE x, supporting released VDEFs
477 during elimination which will value-number the VDEF to the
478 associated VUSE (but not substitute in the whole lattice). */
480 static inline tree
481 vuse_ssa_val (tree x)
483 if (!x)
484 return NULL_TREE;
488 x = SSA_VAL (x);
489 gcc_assert (x != VN_TOP);
491 while (SSA_NAME_IN_FREE_LIST (x));
493 return x;
497 /* Return the vn_kind the expression computed by the stmt should be
498 associated with. */
500 enum vn_kind
501 vn_get_stmt_kind (gimple *stmt)
503 switch (gimple_code (stmt))
505 case GIMPLE_CALL:
506 return VN_REFERENCE;
507 case GIMPLE_PHI:
508 return VN_PHI;
509 case GIMPLE_ASSIGN:
511 enum tree_code code = gimple_assign_rhs_code (stmt);
512 tree rhs1 = gimple_assign_rhs1 (stmt);
513 switch (get_gimple_rhs_class (code))
515 case GIMPLE_UNARY_RHS:
516 case GIMPLE_BINARY_RHS:
517 case GIMPLE_TERNARY_RHS:
518 return VN_NARY;
519 case GIMPLE_SINGLE_RHS:
520 switch (TREE_CODE_CLASS (code))
522 case tcc_reference:
523 /* VOP-less references can go through unary case. */
524 if ((code == REALPART_EXPR
525 || code == IMAGPART_EXPR
526 || code == VIEW_CONVERT_EXPR
527 || code == BIT_FIELD_REF)
528 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
529 return VN_NARY;
531 /* Fallthrough. */
532 case tcc_declaration:
533 return VN_REFERENCE;
535 case tcc_constant:
536 return VN_CONSTANT;
538 default:
539 if (code == ADDR_EXPR)
540 return (is_gimple_min_invariant (rhs1)
541 ? VN_CONSTANT : VN_REFERENCE);
542 else if (code == CONSTRUCTOR)
543 return VN_NARY;
544 return VN_NONE;
546 default:
547 return VN_NONE;
550 default:
551 return VN_NONE;
555 /* Lookup a value id for CONSTANT and return it. If it does not
556 exist returns 0. */
558 unsigned int
559 get_constant_value_id (tree constant)
561 vn_constant_s **slot;
562 struct vn_constant_s vc;
564 vc.hashcode = vn_hash_constant_with_type (constant);
565 vc.constant = constant;
566 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
567 if (slot)
568 return (*slot)->value_id;
569 return 0;
572 /* Lookup a value id for CONSTANT, and if it does not exist, create a
573 new one and return it. If it does exist, return it. */
575 unsigned int
576 get_or_alloc_constant_value_id (tree constant)
578 vn_constant_s **slot;
579 struct vn_constant_s vc;
580 vn_constant_t vcp;
582 /* If the hashtable isn't initialized we're not running from PRE and thus
583 do not need value-ids. */
584 if (!constant_to_value_id)
585 return 0;
587 vc.hashcode = vn_hash_constant_with_type (constant);
588 vc.constant = constant;
589 slot = constant_to_value_id->find_slot (&vc, INSERT);
590 if (*slot)
591 return (*slot)->value_id;
593 vcp = XNEW (struct vn_constant_s);
594 vcp->hashcode = vc.hashcode;
595 vcp->constant = constant;
596 vcp->value_id = get_next_value_id ();
597 *slot = vcp;
598 bitmap_set_bit (constant_value_ids, vcp->value_id);
599 return vcp->value_id;
602 /* Return true if V is a value id for a constant. */
604 bool
605 value_id_constant_p (unsigned int v)
607 return bitmap_bit_p (constant_value_ids, v);
610 /* Compute the hash for a reference operand VRO1. */
612 static void
613 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
615 hstate.add_int (vro1->opcode);
616 if (vro1->op0)
617 inchash::add_expr (vro1->op0, hstate);
618 if (vro1->op1)
619 inchash::add_expr (vro1->op1, hstate);
620 if (vro1->op2)
621 inchash::add_expr (vro1->op2, hstate);
624 /* Compute a hash for the reference operation VR1 and return it. */
626 static hashval_t
627 vn_reference_compute_hash (const vn_reference_t vr1)
629 inchash::hash hstate;
630 hashval_t result;
631 int i;
632 vn_reference_op_t vro;
633 poly_int64 off = -1;
634 bool deref = false;
636 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
638 if (vro->opcode == MEM_REF)
639 deref = true;
640 else if (vro->opcode != ADDR_EXPR)
641 deref = false;
642 if (maybe_ne (vro->off, -1))
644 if (known_eq (off, -1))
645 off = 0;
646 off += vro->off;
648 else
650 if (maybe_ne (off, -1)
651 && maybe_ne (off, 0))
652 hstate.add_poly_int (off);
653 off = -1;
654 if (deref
655 && vro->opcode == ADDR_EXPR)
657 if (vro->op0)
659 tree op = TREE_OPERAND (vro->op0, 0);
660 hstate.add_int (TREE_CODE (op));
661 inchash::add_expr (op, hstate);
664 else
665 vn_reference_op_compute_hash (vro, hstate);
668 result = hstate.end ();
669 /* ??? We would ICE later if we hash instead of adding that in. */
670 if (vr1->vuse)
671 result += SSA_NAME_VERSION (vr1->vuse);
673 return result;
676 /* Return true if reference operations VR1 and VR2 are equivalent. This
677 means they have the same set of operands and vuses. */
679 bool
680 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
682 unsigned i, j;
684 /* Early out if this is not a hash collision. */
685 if (vr1->hashcode != vr2->hashcode)
686 return false;
688 /* The VOP needs to be the same. */
689 if (vr1->vuse != vr2->vuse)
690 return false;
692 /* If the operands are the same we are done. */
693 if (vr1->operands == vr2->operands)
694 return true;
696 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
697 return false;
699 if (INTEGRAL_TYPE_P (vr1->type)
700 && INTEGRAL_TYPE_P (vr2->type))
702 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
703 return false;
705 else if (INTEGRAL_TYPE_P (vr1->type)
706 && (TYPE_PRECISION (vr1->type)
707 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
708 return false;
709 else if (INTEGRAL_TYPE_P (vr2->type)
710 && (TYPE_PRECISION (vr2->type)
711 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
712 return false;
714 i = 0;
715 j = 0;
718 poly_int64 off1 = 0, off2 = 0;
719 vn_reference_op_t vro1, vro2;
720 vn_reference_op_s tem1, tem2;
721 bool deref1 = false, deref2 = false;
722 for (; vr1->operands.iterate (i, &vro1); i++)
724 if (vro1->opcode == MEM_REF)
725 deref1 = true;
726 /* Do not look through a storage order barrier. */
727 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
728 return false;
729 if (known_eq (vro1->off, -1))
730 break;
731 off1 += vro1->off;
733 for (; vr2->operands.iterate (j, &vro2); j++)
735 if (vro2->opcode == MEM_REF)
736 deref2 = true;
737 /* Do not look through a storage order barrier. */
738 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
739 return false;
740 if (known_eq (vro2->off, -1))
741 break;
742 off2 += vro2->off;
744 if (maybe_ne (off1, off2))
745 return false;
746 if (deref1 && vro1->opcode == ADDR_EXPR)
748 memset (&tem1, 0, sizeof (tem1));
749 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
750 tem1.type = TREE_TYPE (tem1.op0);
751 tem1.opcode = TREE_CODE (tem1.op0);
752 vro1 = &tem1;
753 deref1 = false;
755 if (deref2 && vro2->opcode == ADDR_EXPR)
757 memset (&tem2, 0, sizeof (tem2));
758 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
759 tem2.type = TREE_TYPE (tem2.op0);
760 tem2.opcode = TREE_CODE (tem2.op0);
761 vro2 = &tem2;
762 deref2 = false;
764 if (deref1 != deref2)
765 return false;
766 if (!vn_reference_op_eq (vro1, vro2))
767 return false;
768 ++j;
769 ++i;
771 while (vr1->operands.length () != i
772 || vr2->operands.length () != j);
774 return true;
777 /* Copy the operations present in load/store REF into RESULT, a vector of
778 vn_reference_op_s's. */
780 static void
781 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
783 if (TREE_CODE (ref) == TARGET_MEM_REF)
785 vn_reference_op_s temp;
787 result->reserve (3);
789 memset (&temp, 0, sizeof (temp));
790 temp.type = TREE_TYPE (ref);
791 temp.opcode = TREE_CODE (ref);
792 temp.op0 = TMR_INDEX (ref);
793 temp.op1 = TMR_STEP (ref);
794 temp.op2 = TMR_OFFSET (ref);
795 temp.off = -1;
796 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
797 temp.base = MR_DEPENDENCE_BASE (ref);
798 result->quick_push (temp);
800 memset (&temp, 0, sizeof (temp));
801 temp.type = NULL_TREE;
802 temp.opcode = ERROR_MARK;
803 temp.op0 = TMR_INDEX2 (ref);
804 temp.off = -1;
805 result->quick_push (temp);
807 memset (&temp, 0, sizeof (temp));
808 temp.type = NULL_TREE;
809 temp.opcode = TREE_CODE (TMR_BASE (ref));
810 temp.op0 = TMR_BASE (ref);
811 temp.off = -1;
812 result->quick_push (temp);
813 return;
816 /* For non-calls, store the information that makes up the address. */
817 tree orig = ref;
818 while (ref)
820 vn_reference_op_s temp;
822 memset (&temp, 0, sizeof (temp));
823 temp.type = TREE_TYPE (ref);
824 temp.opcode = TREE_CODE (ref);
825 temp.off = -1;
827 switch (temp.opcode)
829 case MODIFY_EXPR:
830 temp.op0 = TREE_OPERAND (ref, 1);
831 break;
832 case WITH_SIZE_EXPR:
833 temp.op0 = TREE_OPERAND (ref, 1);
834 temp.off = 0;
835 break;
836 case MEM_REF:
837 /* The base address gets its own vn_reference_op_s structure. */
838 temp.op0 = TREE_OPERAND (ref, 1);
839 if (!mem_ref_offset (ref).to_shwi (&temp.off))
840 temp.off = -1;
841 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
842 temp.base = MR_DEPENDENCE_BASE (ref);
843 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
844 break;
845 case BIT_FIELD_REF:
846 /* Record bits, position and storage order. */
847 temp.op0 = TREE_OPERAND (ref, 1);
848 temp.op1 = TREE_OPERAND (ref, 2);
849 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
850 temp.off = -1;
851 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
852 break;
853 case COMPONENT_REF:
854 /* The field decl is enough to unambiguously specify the field,
855 a matching type is not necessary and a mismatching type
856 is always a spurious difference. */
857 temp.type = NULL_TREE;
858 temp.op0 = TREE_OPERAND (ref, 1);
859 temp.op1 = TREE_OPERAND (ref, 2);
861 tree this_offset = component_ref_field_offset (ref);
862 if (this_offset
863 && poly_int_tree_p (this_offset))
865 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
866 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
868 poly_offset_int off
869 = (wi::to_poly_offset (this_offset)
870 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
871 /* Probibit value-numbering zero offset components
872 of addresses the same before the pass folding
873 __builtin_object_size had a chance to run
874 (checking cfun->after_inlining does the
875 trick here). */
876 if (TREE_CODE (orig) != ADDR_EXPR
877 || maybe_ne (off, 0)
878 || cfun->after_inlining)
879 off.to_shwi (&temp.off);
883 break;
884 case ARRAY_RANGE_REF:
885 case ARRAY_REF:
887 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
888 /* Record index as operand. */
889 temp.op0 = TREE_OPERAND (ref, 1);
890 /* Always record lower bounds and element size. */
891 temp.op1 = array_ref_low_bound (ref);
892 /* But record element size in units of the type alignment. */
893 temp.op2 = TREE_OPERAND (ref, 3);
894 temp.align = eltype->type_common.align;
895 if (! temp.op2)
896 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
897 size_int (TYPE_ALIGN_UNIT (eltype)));
898 if (poly_int_tree_p (temp.op0)
899 && poly_int_tree_p (temp.op1)
900 && TREE_CODE (temp.op2) == INTEGER_CST)
902 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
903 - wi::to_poly_offset (temp.op1))
904 * wi::to_offset (temp.op2)
905 * vn_ref_op_align_unit (&temp));
906 off.to_shwi (&temp.off);
909 break;
910 case VAR_DECL:
911 if (DECL_HARD_REGISTER (ref))
913 temp.op0 = ref;
914 break;
916 /* Fallthru. */
917 case PARM_DECL:
918 case CONST_DECL:
919 case RESULT_DECL:
920 /* Canonicalize decls to MEM[&decl] which is what we end up with
921 when valueizing MEM[ptr] with ptr = &decl. */
922 temp.opcode = MEM_REF;
923 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
924 temp.off = 0;
925 result->safe_push (temp);
926 temp.opcode = ADDR_EXPR;
927 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
928 temp.type = TREE_TYPE (temp.op0);
929 temp.off = -1;
930 break;
931 case STRING_CST:
932 case INTEGER_CST:
933 case COMPLEX_CST:
934 case VECTOR_CST:
935 case REAL_CST:
936 case FIXED_CST:
937 case CONSTRUCTOR:
938 case SSA_NAME:
939 temp.op0 = ref;
940 break;
941 case ADDR_EXPR:
942 if (is_gimple_min_invariant (ref))
944 temp.op0 = ref;
945 break;
947 break;
948 /* These are only interesting for their operands, their
949 existence, and their type. They will never be the last
950 ref in the chain of references (IE they require an
951 operand), so we don't have to put anything
952 for op* as it will be handled by the iteration */
953 case REALPART_EXPR:
954 temp.off = 0;
955 break;
956 case VIEW_CONVERT_EXPR:
957 temp.off = 0;
958 temp.reverse = storage_order_barrier_p (ref);
959 break;
960 case IMAGPART_EXPR:
961 /* This is only interesting for its constant offset. */
962 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
963 break;
964 default:
965 gcc_unreachable ();
967 result->safe_push (temp);
969 if (REFERENCE_CLASS_P (ref)
970 || TREE_CODE (ref) == MODIFY_EXPR
971 || TREE_CODE (ref) == WITH_SIZE_EXPR
972 || (TREE_CODE (ref) == ADDR_EXPR
973 && !is_gimple_min_invariant (ref)))
974 ref = TREE_OPERAND (ref, 0);
975 else
976 ref = NULL_TREE;
980 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
981 operands in *OPS, the reference alias set SET and the reference type TYPE.
982 Return true if something useful was produced. */
984 bool
985 ao_ref_init_from_vn_reference (ao_ref *ref,
986 alias_set_type set, tree type,
987 vec<vn_reference_op_s> ops)
989 vn_reference_op_t op;
990 unsigned i;
991 tree base = NULL_TREE;
992 tree *op0_p = &base;
993 poly_offset_int offset = 0;
994 poly_offset_int max_size;
995 poly_offset_int size = -1;
996 tree size_tree = NULL_TREE;
997 alias_set_type base_alias_set = -1;
999 /* First get the final access size from just the outermost expression. */
1000 op = &ops[0];
1001 if (op->opcode == COMPONENT_REF)
1002 size_tree = DECL_SIZE (op->op0);
1003 else if (op->opcode == BIT_FIELD_REF)
1004 size_tree = op->op0;
1005 else
1007 machine_mode mode = TYPE_MODE (type);
1008 if (mode == BLKmode)
1009 size_tree = TYPE_SIZE (type);
1010 else
1011 size = GET_MODE_BITSIZE (mode);
1013 if (size_tree != NULL_TREE
1014 && poly_int_tree_p (size_tree))
1015 size = wi::to_poly_offset (size_tree);
1017 /* Initially, maxsize is the same as the accessed element size.
1018 In the following it will only grow (or become -1). */
1019 max_size = size;
1021 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1022 and find the ultimate containing object. */
1023 FOR_EACH_VEC_ELT (ops, i, op)
1025 switch (op->opcode)
1027 /* These may be in the reference ops, but we cannot do anything
1028 sensible with them here. */
1029 case ADDR_EXPR:
1030 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1031 if (base != NULL_TREE
1032 && TREE_CODE (base) == MEM_REF
1033 && op->op0
1034 && DECL_P (TREE_OPERAND (op->op0, 0)))
1036 vn_reference_op_t pop = &ops[i-1];
1037 base = TREE_OPERAND (op->op0, 0);
1038 if (known_eq (pop->off, -1))
1040 max_size = -1;
1041 offset = 0;
1043 else
1044 offset += pop->off * BITS_PER_UNIT;
1045 op0_p = NULL;
1046 break;
1048 /* Fallthru. */
1049 case CALL_EXPR:
1050 return false;
1052 /* Record the base objects. */
1053 case MEM_REF:
1054 base_alias_set = get_deref_alias_set (op->op0);
1055 *op0_p = build2 (MEM_REF, op->type,
1056 NULL_TREE, op->op0);
1057 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1058 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1059 op0_p = &TREE_OPERAND (*op0_p, 0);
1060 break;
1062 case VAR_DECL:
1063 case PARM_DECL:
1064 case RESULT_DECL:
1065 case SSA_NAME:
1066 *op0_p = op->op0;
1067 op0_p = NULL;
1068 break;
1070 /* And now the usual component-reference style ops. */
1071 case BIT_FIELD_REF:
1072 offset += wi::to_poly_offset (op->op1);
1073 break;
1075 case COMPONENT_REF:
1077 tree field = op->op0;
1078 /* We do not have a complete COMPONENT_REF tree here so we
1079 cannot use component_ref_field_offset. Do the interesting
1080 parts manually. */
1081 tree this_offset = DECL_FIELD_OFFSET (field);
1083 if (op->op1 || !poly_int_tree_p (this_offset))
1084 max_size = -1;
1085 else
1087 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1088 << LOG2_BITS_PER_UNIT);
1089 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1090 offset += woffset;
1092 break;
1095 case ARRAY_RANGE_REF:
1096 case ARRAY_REF:
1097 /* We recorded the lower bound and the element size. */
1098 if (!poly_int_tree_p (op->op0)
1099 || !poly_int_tree_p (op->op1)
1100 || TREE_CODE (op->op2) != INTEGER_CST)
1101 max_size = -1;
1102 else
1104 poly_offset_int woffset
1105 = wi::sext (wi::to_poly_offset (op->op0)
1106 - wi::to_poly_offset (op->op1),
1107 TYPE_PRECISION (TREE_TYPE (op->op0)));
1108 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1109 woffset <<= LOG2_BITS_PER_UNIT;
1110 offset += woffset;
1112 break;
1114 case REALPART_EXPR:
1115 break;
1117 case IMAGPART_EXPR:
1118 offset += size;
1119 break;
1121 case VIEW_CONVERT_EXPR:
1122 break;
1124 case STRING_CST:
1125 case INTEGER_CST:
1126 case COMPLEX_CST:
1127 case VECTOR_CST:
1128 case REAL_CST:
1129 case CONSTRUCTOR:
1130 case CONST_DECL:
1131 return false;
1133 default:
1134 return false;
1138 if (base == NULL_TREE)
1139 return false;
1141 ref->ref = NULL_TREE;
1142 ref->base = base;
1143 ref->ref_alias_set = set;
1144 if (base_alias_set != -1)
1145 ref->base_alias_set = base_alias_set;
1146 else
1147 ref->base_alias_set = get_alias_set (base);
1148 /* We discount volatiles from value-numbering elsewhere. */
1149 ref->volatile_p = false;
1151 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1153 ref->offset = 0;
1154 ref->size = -1;
1155 ref->max_size = -1;
1156 return true;
1159 if (!offset.to_shwi (&ref->offset))
1161 ref->offset = 0;
1162 ref->max_size = -1;
1163 return true;
1166 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1167 ref->max_size = -1;
1169 return true;
1172 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1173 vn_reference_op_s's. */
1175 static void
1176 copy_reference_ops_from_call (gcall *call,
1177 vec<vn_reference_op_s> *result)
1179 vn_reference_op_s temp;
1180 unsigned i;
1181 tree lhs = gimple_call_lhs (call);
1182 int lr;
1184 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1185 different. By adding the lhs here in the vector, we ensure that the
1186 hashcode is different, guaranteeing a different value number. */
1187 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1189 memset (&temp, 0, sizeof (temp));
1190 temp.opcode = MODIFY_EXPR;
1191 temp.type = TREE_TYPE (lhs);
1192 temp.op0 = lhs;
1193 temp.off = -1;
1194 result->safe_push (temp);
1197 /* Copy the type, opcode, function, static chain and EH region, if any. */
1198 memset (&temp, 0, sizeof (temp));
1199 temp.type = gimple_call_return_type (call);
1200 temp.opcode = CALL_EXPR;
1201 temp.op0 = gimple_call_fn (call);
1202 temp.op1 = gimple_call_chain (call);
1203 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1204 temp.op2 = size_int (lr);
1205 temp.off = -1;
1206 result->safe_push (temp);
1208 /* Copy the call arguments. As they can be references as well,
1209 just chain them together. */
1210 for (i = 0; i < gimple_call_num_args (call); ++i)
1212 tree callarg = gimple_call_arg (call, i);
1213 copy_reference_ops_from_ref (callarg, result);
1217 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1218 *I_P to point to the last element of the replacement. */
1219 static bool
1220 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1221 unsigned int *i_p)
1223 unsigned int i = *i_p;
1224 vn_reference_op_t op = &(*ops)[i];
1225 vn_reference_op_t mem_op = &(*ops)[i - 1];
1226 tree addr_base;
1227 poly_int64 addr_offset = 0;
1229 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1230 from .foo.bar to the preceding MEM_REF offset and replace the
1231 address with &OBJ. */
1232 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1233 &addr_offset);
1234 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1235 if (addr_base != TREE_OPERAND (op->op0, 0))
1237 poly_offset_int off
1238 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1239 SIGNED)
1240 + addr_offset);
1241 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1242 op->op0 = build_fold_addr_expr (addr_base);
1243 if (tree_fits_shwi_p (mem_op->op0))
1244 mem_op->off = tree_to_shwi (mem_op->op0);
1245 else
1246 mem_op->off = -1;
1247 return true;
1249 return false;
1252 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1253 *I_P to point to the last element of the replacement. */
1254 static bool
1255 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1256 unsigned int *i_p)
1258 unsigned int i = *i_p;
1259 vn_reference_op_t op = &(*ops)[i];
1260 vn_reference_op_t mem_op = &(*ops)[i - 1];
1261 gimple *def_stmt;
1262 enum tree_code code;
1263 poly_offset_int off;
1265 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1266 if (!is_gimple_assign (def_stmt))
1267 return false;
1269 code = gimple_assign_rhs_code (def_stmt);
1270 if (code != ADDR_EXPR
1271 && code != POINTER_PLUS_EXPR)
1272 return false;
1274 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1276 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1277 from .foo.bar to the preceding MEM_REF offset and replace the
1278 address with &OBJ. */
1279 if (code == ADDR_EXPR)
1281 tree addr, addr_base;
1282 poly_int64 addr_offset;
1284 addr = gimple_assign_rhs1 (def_stmt);
1285 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1286 &addr_offset);
1287 /* If that didn't work because the address isn't invariant propagate
1288 the reference tree from the address operation in case the current
1289 dereference isn't offsetted. */
1290 if (!addr_base
1291 && *i_p == ops->length () - 1
1292 && known_eq (off, 0)
1293 /* This makes us disable this transform for PRE where the
1294 reference ops might be also used for code insertion which
1295 is invalid. */
1296 && default_vn_walk_kind == VN_WALKREWRITE)
1298 auto_vec<vn_reference_op_s, 32> tem;
1299 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1300 /* Make sure to preserve TBAA info. The only objects not
1301 wrapped in MEM_REFs that can have their address taken are
1302 STRING_CSTs. */
1303 if (tem.length () >= 2
1304 && tem[tem.length () - 2].opcode == MEM_REF)
1306 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1307 new_mem_op->op0
1308 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1309 wi::to_poly_wide (new_mem_op->op0));
1311 else
1312 gcc_assert (tem.last ().opcode == STRING_CST);
1313 ops->pop ();
1314 ops->pop ();
1315 ops->safe_splice (tem);
1316 --*i_p;
1317 return true;
1319 if (!addr_base
1320 || TREE_CODE (addr_base) != MEM_REF
1321 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1322 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1323 return false;
1325 off += addr_offset;
1326 off += mem_ref_offset (addr_base);
1327 op->op0 = TREE_OPERAND (addr_base, 0);
1329 else
1331 tree ptr, ptroff;
1332 ptr = gimple_assign_rhs1 (def_stmt);
1333 ptroff = gimple_assign_rhs2 (def_stmt);
1334 if (TREE_CODE (ptr) != SSA_NAME
1335 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1336 /* Make sure to not endlessly recurse.
1337 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1338 happen when we value-number a PHI to its backedge value. */
1339 || SSA_VAL (ptr) == op->op0
1340 || !poly_int_tree_p (ptroff))
1341 return false;
1343 off += wi::to_poly_offset (ptroff);
1344 op->op0 = ptr;
1347 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1348 if (tree_fits_shwi_p (mem_op->op0))
1349 mem_op->off = tree_to_shwi (mem_op->op0);
1350 else
1351 mem_op->off = -1;
1352 /* ??? Can end up with endless recursion here!?
1353 gcc.c-torture/execute/strcmp-1.c */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 op->op0 = SSA_VAL (op->op0);
1356 if (TREE_CODE (op->op0) != SSA_NAME)
1357 op->opcode = TREE_CODE (op->op0);
1359 /* And recurse. */
1360 if (TREE_CODE (op->op0) == SSA_NAME)
1361 vn_reference_maybe_forwprop_address (ops, i_p);
1362 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1363 vn_reference_fold_indirect (ops, i_p);
1364 return true;
1367 /* Optimize the reference REF to a constant if possible or return
1368 NULL_TREE if not. */
1370 tree
1371 fully_constant_vn_reference_p (vn_reference_t ref)
1373 vec<vn_reference_op_s> operands = ref->operands;
1374 vn_reference_op_t op;
1376 /* Try to simplify the translated expression if it is
1377 a call to a builtin function with at most two arguments. */
1378 op = &operands[0];
1379 if (op->opcode == CALL_EXPR
1380 && TREE_CODE (op->op0) == ADDR_EXPR
1381 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1382 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1383 && operands.length () >= 2
1384 && operands.length () <= 3)
1386 vn_reference_op_t arg0, arg1 = NULL;
1387 bool anyconst = false;
1388 arg0 = &operands[1];
1389 if (operands.length () > 2)
1390 arg1 = &operands[2];
1391 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1392 || (arg0->opcode == ADDR_EXPR
1393 && is_gimple_min_invariant (arg0->op0)))
1394 anyconst = true;
1395 if (arg1
1396 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1397 || (arg1->opcode == ADDR_EXPR
1398 && is_gimple_min_invariant (arg1->op0))))
1399 anyconst = true;
1400 if (anyconst)
1402 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1403 arg1 ? 2 : 1,
1404 arg0->op0,
1405 arg1 ? arg1->op0 : NULL);
1406 if (folded
1407 && TREE_CODE (folded) == NOP_EXPR)
1408 folded = TREE_OPERAND (folded, 0);
1409 if (folded
1410 && is_gimple_min_invariant (folded))
1411 return folded;
1415 /* Simplify reads from constants or constant initializers. */
1416 else if (BITS_PER_UNIT == 8
1417 && COMPLETE_TYPE_P (ref->type)
1418 && is_gimple_reg_type (ref->type))
1420 poly_int64 off = 0;
1421 HOST_WIDE_INT size;
1422 if (INTEGRAL_TYPE_P (ref->type))
1423 size = TYPE_PRECISION (ref->type);
1424 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1425 size = tree_to_shwi (TYPE_SIZE (ref->type));
1426 else
1427 return NULL_TREE;
1428 if (size % BITS_PER_UNIT != 0
1429 || size > MAX_BITSIZE_MODE_ANY_MODE)
1430 return NULL_TREE;
1431 size /= BITS_PER_UNIT;
1432 unsigned i;
1433 for (i = 0; i < operands.length (); ++i)
1435 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1437 ++i;
1438 break;
1440 if (known_eq (operands[i].off, -1))
1441 return NULL_TREE;
1442 off += operands[i].off;
1443 if (operands[i].opcode == MEM_REF)
1445 ++i;
1446 break;
1449 vn_reference_op_t base = &operands[--i];
1450 tree ctor = error_mark_node;
1451 tree decl = NULL_TREE;
1452 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1453 ctor = base->op0;
1454 else if (base->opcode == MEM_REF
1455 && base[1].opcode == ADDR_EXPR
1456 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1457 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1458 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1460 decl = TREE_OPERAND (base[1].op0, 0);
1461 if (TREE_CODE (decl) == STRING_CST)
1462 ctor = decl;
1463 else
1464 ctor = ctor_for_folding (decl);
1466 if (ctor == NULL_TREE)
1467 return build_zero_cst (ref->type);
1468 else if (ctor != error_mark_node)
1470 HOST_WIDE_INT const_off;
1471 if (decl)
1473 tree res = fold_ctor_reference (ref->type, ctor,
1474 off * BITS_PER_UNIT,
1475 size * BITS_PER_UNIT, decl);
1476 if (res)
1478 STRIP_USELESS_TYPE_CONVERSION (res);
1479 if (is_gimple_min_invariant (res))
1480 return res;
1483 else if (off.is_constant (&const_off))
1485 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1486 int len = native_encode_expr (ctor, buf, size, const_off);
1487 if (len > 0)
1488 return native_interpret_expr (ref->type, buf, len);
1493 return NULL_TREE;
1496 /* Return true if OPS contain a storage order barrier. */
1498 static bool
1499 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1501 vn_reference_op_t op;
1502 unsigned i;
1504 FOR_EACH_VEC_ELT (ops, i, op)
1505 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1506 return true;
1508 return false;
1511 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1512 structures into their value numbers. This is done in-place, and
1513 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1514 whether any operands were valueized. */
1516 static vec<vn_reference_op_s>
1517 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1518 bool with_avail = false)
1520 vn_reference_op_t vro;
1521 unsigned int i;
1523 *valueized_anything = false;
1525 FOR_EACH_VEC_ELT (orig, i, vro)
1527 if (vro->opcode == SSA_NAME
1528 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1530 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1531 if (tem != vro->op0)
1533 *valueized_anything = true;
1534 vro->op0 = tem;
1536 /* If it transforms from an SSA_NAME to a constant, update
1537 the opcode. */
1538 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1539 vro->opcode = TREE_CODE (vro->op0);
1541 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1543 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1544 if (tem != vro->op1)
1546 *valueized_anything = true;
1547 vro->op1 = tem;
1550 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1552 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1553 if (tem != vro->op2)
1555 *valueized_anything = true;
1556 vro->op2 = tem;
1559 /* If it transforms from an SSA_NAME to an address, fold with
1560 a preceding indirect reference. */
1561 if (i > 0
1562 && vro->op0
1563 && TREE_CODE (vro->op0) == ADDR_EXPR
1564 && orig[i - 1].opcode == MEM_REF)
1566 if (vn_reference_fold_indirect (&orig, &i))
1567 *valueized_anything = true;
1569 else if (i > 0
1570 && vro->opcode == SSA_NAME
1571 && orig[i - 1].opcode == MEM_REF)
1573 if (vn_reference_maybe_forwprop_address (&orig, &i))
1574 *valueized_anything = true;
1576 /* If it transforms a non-constant ARRAY_REF into a constant
1577 one, adjust the constant offset. */
1578 else if (vro->opcode == ARRAY_REF
1579 && known_eq (vro->off, -1)
1580 && poly_int_tree_p (vro->op0)
1581 && poly_int_tree_p (vro->op1)
1582 && TREE_CODE (vro->op2) == INTEGER_CST)
1584 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1585 - wi::to_poly_offset (vro->op1))
1586 * wi::to_offset (vro->op2)
1587 * vn_ref_op_align_unit (vro));
1588 off.to_shwi (&vro->off);
1592 return orig;
1595 static vec<vn_reference_op_s>
1596 valueize_refs (vec<vn_reference_op_s> orig)
1598 bool tem;
1599 return valueize_refs_1 (orig, &tem);
1602 static vec<vn_reference_op_s> shared_lookup_references;
1604 /* Create a vector of vn_reference_op_s structures from REF, a
1605 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1606 this function. *VALUEIZED_ANYTHING will specify whether any
1607 operands were valueized. */
1609 static vec<vn_reference_op_s>
1610 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1612 if (!ref)
1613 return vNULL;
1614 shared_lookup_references.truncate (0);
1615 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1616 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1617 valueized_anything);
1618 return shared_lookup_references;
1621 /* Create a vector of vn_reference_op_s structures from CALL, a
1622 call statement. The vector is shared among all callers of
1623 this function. */
1625 static vec<vn_reference_op_s>
1626 valueize_shared_reference_ops_from_call (gcall *call)
1628 if (!call)
1629 return vNULL;
1630 shared_lookup_references.truncate (0);
1631 copy_reference_ops_from_call (call, &shared_lookup_references);
1632 shared_lookup_references = valueize_refs (shared_lookup_references);
1633 return shared_lookup_references;
1636 /* Lookup a SCCVN reference operation VR in the current hash table.
1637 Returns the resulting value number if it exists in the hash table,
1638 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1639 vn_reference_t stored in the hashtable if something is found. */
1641 static tree
1642 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1644 vn_reference_s **slot;
1645 hashval_t hash;
1647 hash = vr->hashcode;
1648 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1649 if (slot)
1651 if (vnresult)
1652 *vnresult = (vn_reference_t)*slot;
1653 return ((vn_reference_t)*slot)->result;
1656 return NULL_TREE;
1659 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1660 with the current VUSE and performs the expression lookup. */
1662 static void *
1663 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1664 unsigned int cnt, void *vr_)
1666 vn_reference_t vr = (vn_reference_t)vr_;
1667 vn_reference_s **slot;
1668 hashval_t hash;
1670 /* This bounds the stmt walks we perform on reference lookups
1671 to O(1) instead of O(N) where N is the number of dominating
1672 stores. */
1673 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1674 return (void *)-1;
1676 if (last_vuse_ptr)
1677 *last_vuse_ptr = vuse;
1679 /* Fixup vuse and hash. */
1680 if (vr->vuse)
1681 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1682 vr->vuse = vuse_ssa_val (vuse);
1683 if (vr->vuse)
1684 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1686 hash = vr->hashcode;
1687 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1688 if (slot)
1689 return *slot;
1691 return NULL;
1694 /* Lookup an existing or insert a new vn_reference entry into the
1695 value table for the VUSE, SET, TYPE, OPERANDS reference which
1696 has the value VALUE which is either a constant or an SSA name. */
1698 static vn_reference_t
1699 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1700 alias_set_type set,
1701 tree type,
1702 vec<vn_reference_op_s,
1703 va_heap> operands,
1704 tree value)
1706 vn_reference_s vr1;
1707 vn_reference_t result;
1708 unsigned value_id;
1709 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1710 vr1.operands = operands;
1711 vr1.type = type;
1712 vr1.set = set;
1713 vr1.hashcode = vn_reference_compute_hash (&vr1);
1714 if (vn_reference_lookup_1 (&vr1, &result))
1715 return result;
1716 if (TREE_CODE (value) == SSA_NAME)
1717 value_id = VN_INFO (value)->value_id;
1718 else
1719 value_id = get_or_alloc_constant_value_id (value);
1720 return vn_reference_insert_pieces (vuse, set, type,
1721 operands.copy (), value, value_id);
1724 /* Return a value-number for RCODE OPS... either by looking up an existing
1725 value-number for the simplified result or by inserting the operation if
1726 INSERT is true. */
1728 static tree
1729 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1731 tree result = NULL_TREE;
1732 /* We will be creating a value number for
1733 RCODE (OPS...).
1734 So first simplify and lookup this expression to see if it
1735 is already available. */
1736 mprts_hook = vn_lookup_simplify_result;
1737 bool res = false;
1738 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1740 case 1:
1741 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1742 break;
1743 case 2:
1744 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1745 break;
1746 case 3:
1747 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1748 break;
1750 mprts_hook = NULL;
1751 gimple *new_stmt = NULL;
1752 if (res
1753 && gimple_simplified_result_is_gimple_val (res_op))
1755 /* The expression is already available. */
1756 result = res_op->ops[0];
1757 /* Valueize it, simplification returns sth in AVAIL only. */
1758 if (TREE_CODE (result) == SSA_NAME)
1759 result = SSA_VAL (result);
1761 else
1763 tree val = vn_lookup_simplify_result (res_op);
1764 if (!val && insert)
1766 gimple_seq stmts = NULL;
1767 result = maybe_push_res_to_seq (res_op, &stmts);
1768 if (result)
1770 gcc_assert (gimple_seq_singleton_p (stmts));
1771 new_stmt = gimple_seq_first_stmt (stmts);
1774 else
1775 /* The expression is already available. */
1776 result = val;
1778 if (new_stmt)
1780 /* The expression is not yet available, value-number lhs to
1781 the new SSA_NAME we created. */
1782 /* Initialize value-number information properly. */
1783 vn_ssa_aux_t result_info = VN_INFO (result);
1784 result_info->valnum = result;
1785 result_info->value_id = get_next_value_id ();
1786 result_info->visited = 1;
1787 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1788 new_stmt);
1789 result_info->needs_insertion = true;
1790 /* ??? PRE phi-translation inserts NARYs without corresponding
1791 SSA name result. Re-use those but set their result according
1792 to the stmt we just built. */
1793 vn_nary_op_t nary = NULL;
1794 vn_nary_op_lookup_stmt (new_stmt, &nary);
1795 if (nary)
1797 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1798 nary->u.result = gimple_assign_lhs (new_stmt);
1800 /* As all "inserted" statements are singleton SCCs, insert
1801 to the valid table. This is strictly needed to
1802 avoid re-generating new value SSA_NAMEs for the same
1803 expression during SCC iteration over and over (the
1804 optimistic table gets cleared after each iteration).
1805 We do not need to insert into the optimistic table, as
1806 lookups there will fall back to the valid table. */
1807 else
1809 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1810 vn_nary_op_t vno1
1811 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1812 vno1->value_id = result_info->value_id;
1813 vno1->length = length;
1814 vno1->predicated_values = 0;
1815 vno1->u.result = result;
1816 init_vn_nary_op_from_stmt (vno1, new_stmt);
1817 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1818 /* Also do not link it into the undo chain. */
1819 last_inserted_nary = vno1->next;
1820 vno1->next = (vn_nary_op_t)(void *)-1;
1822 if (dump_file && (dump_flags & TDF_DETAILS))
1824 fprintf (dump_file, "Inserting name ");
1825 print_generic_expr (dump_file, result);
1826 fprintf (dump_file, " for expression ");
1827 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1828 fprintf (dump_file, "\n");
1831 return result;
1834 /* Return a value-number for RCODE OPS... either by looking up an existing
1835 value-number for the simplified result or by inserting the operation. */
1837 static tree
1838 vn_nary_build_or_lookup (gimple_match_op *res_op)
1840 return vn_nary_build_or_lookup_1 (res_op, true);
1843 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1844 its value if present. */
1846 tree
1847 vn_nary_simplify (vn_nary_op_t nary)
1849 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1850 return NULL_TREE;
1851 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1852 nary->type, nary->length);
1853 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1854 return vn_nary_build_or_lookup_1 (&op, false);
1857 basic_block vn_context_bb;
1859 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1860 from the statement defining VUSE and if not successful tries to
1861 translate *REFP and VR_ through an aggregate copy at the definition
1862 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1863 of *REF and *VR. If only disambiguation was performed then
1864 *DISAMBIGUATE_ONLY is set to true. */
1866 static void *
1867 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1868 bool *disambiguate_only)
1870 vn_reference_t vr = (vn_reference_t)vr_;
1871 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1872 tree base = ao_ref_base (ref);
1873 HOST_WIDE_INT offseti, maxsizei;
1874 static vec<vn_reference_op_s> lhs_ops;
1875 ao_ref lhs_ref;
1876 bool lhs_ref_ok = false;
1877 poly_int64 copy_size;
1879 /* First try to disambiguate after value-replacing in the definitions LHS. */
1880 if (is_gimple_assign (def_stmt))
1882 tree lhs = gimple_assign_lhs (def_stmt);
1883 bool valueized_anything = false;
1884 /* Avoid re-allocation overhead. */
1885 lhs_ops.truncate (0);
1886 basic_block saved_rpo_bb = vn_context_bb;
1887 vn_context_bb = gimple_bb (def_stmt);
1888 copy_reference_ops_from_ref (lhs, &lhs_ops);
1889 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1890 vn_context_bb = saved_rpo_bb;
1891 if (valueized_anything)
1893 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1894 get_alias_set (lhs),
1895 TREE_TYPE (lhs), lhs_ops);
1896 if (lhs_ref_ok
1897 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1899 *disambiguate_only = true;
1900 return NULL;
1903 else
1905 ao_ref_init (&lhs_ref, lhs);
1906 lhs_ref_ok = true;
1909 /* If we reach a clobbering statement try to skip it and see if
1910 we find a VN result with exactly the same value as the
1911 possible clobber. In this case we can ignore the clobber
1912 and return the found value.
1913 Note that we don't need to worry about partial overlapping
1914 accesses as we then can use TBAA to disambiguate against the
1915 clobbering statement when looking up a load (thus the
1916 VN_WALKREWRITE guard). */
1917 if (vn_walk_kind == VN_WALKREWRITE
1918 && is_gimple_reg_type (TREE_TYPE (lhs))
1919 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1921 tree *saved_last_vuse_ptr = last_vuse_ptr;
1922 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1923 last_vuse_ptr = NULL;
1924 tree saved_vuse = vr->vuse;
1925 hashval_t saved_hashcode = vr->hashcode;
1926 void *res = vn_reference_lookup_2 (ref,
1927 gimple_vuse (def_stmt), 0, vr);
1928 /* Need to restore vr->vuse and vr->hashcode. */
1929 vr->vuse = saved_vuse;
1930 vr->hashcode = saved_hashcode;
1931 last_vuse_ptr = saved_last_vuse_ptr;
1932 if (res && res != (void *)-1)
1934 vn_reference_t vnresult = (vn_reference_t) res;
1935 if (vnresult->result
1936 && operand_equal_p (vnresult->result,
1937 gimple_assign_rhs1 (def_stmt), 0))
1938 return res;
1942 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1943 && gimple_call_num_args (def_stmt) <= 4)
1945 /* For builtin calls valueize its arguments and call the
1946 alias oracle again. Valueization may improve points-to
1947 info of pointers and constify size and position arguments.
1948 Originally this was motivated by PR61034 which has
1949 conditional calls to free falsely clobbering ref because
1950 of imprecise points-to info of the argument. */
1951 tree oldargs[4];
1952 bool valueized_anything = false;
1953 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1955 oldargs[i] = gimple_call_arg (def_stmt, i);
1956 tree val = vn_valueize (oldargs[i]);
1957 if (val != oldargs[i])
1959 gimple_call_set_arg (def_stmt, i, val);
1960 valueized_anything = true;
1963 if (valueized_anything)
1965 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1966 ref);
1967 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1968 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1969 if (!res)
1971 *disambiguate_only = true;
1972 return NULL;
1977 if (*disambiguate_only)
1978 return (void *)-1;
1980 /* If we cannot constrain the size of the reference we cannot
1981 test if anything kills it. */
1982 if (!ref->max_size_known_p ())
1983 return (void *)-1;
1985 poly_int64 offset = ref->offset;
1986 poly_int64 maxsize = ref->max_size;
1988 /* We can't deduce anything useful from clobbers. */
1989 if (gimple_clobber_p (def_stmt))
1990 return (void *)-1;
1992 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1993 from that definition.
1994 1) Memset. */
1995 if (is_gimple_reg_type (vr->type)
1996 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1997 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1998 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1999 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2000 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2001 && offset.is_constant (&offseti)
2002 && offseti % BITS_PER_UNIT == 0))
2003 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2004 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2005 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2007 tree base2;
2008 poly_int64 offset2, size2, maxsize2;
2009 bool reverse;
2010 tree ref2 = gimple_call_arg (def_stmt, 0);
2011 if (TREE_CODE (ref2) == SSA_NAME)
2013 ref2 = SSA_VAL (ref2);
2014 if (TREE_CODE (ref2) == SSA_NAME
2015 && (TREE_CODE (base) != MEM_REF
2016 || TREE_OPERAND (base, 0) != ref2))
2018 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2019 if (gimple_assign_single_p (def_stmt)
2020 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2021 ref2 = gimple_assign_rhs1 (def_stmt);
2024 if (TREE_CODE (ref2) == ADDR_EXPR)
2026 ref2 = TREE_OPERAND (ref2, 0);
2027 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2028 &reverse);
2029 if (!known_size_p (maxsize2)
2030 || !known_eq (maxsize2, size2)
2031 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2032 return (void *)-1;
2034 else if (TREE_CODE (ref2) == SSA_NAME)
2036 poly_int64 soff;
2037 if (TREE_CODE (base) != MEM_REF
2038 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2039 return (void *)-1;
2040 offset += soff;
2041 offset2 = 0;
2042 if (TREE_OPERAND (base, 0) != ref2)
2044 gimple *def = SSA_NAME_DEF_STMT (ref2);
2045 if (is_gimple_assign (def)
2046 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2047 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2048 && poly_int_tree_p (gimple_assign_rhs2 (def))
2049 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2050 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2052 ref2 = gimple_assign_rhs1 (def);
2053 if (TREE_CODE (ref2) == SSA_NAME)
2054 ref2 = SSA_VAL (ref2);
2056 else
2057 return (void *)-1;
2060 else
2061 return (void *)-1;
2062 tree len = gimple_call_arg (def_stmt, 2);
2063 if (known_subrange_p (offset, maxsize, offset2,
2064 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2066 tree val;
2067 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2068 val = build_zero_cst (vr->type);
2069 else if (INTEGRAL_TYPE_P (vr->type)
2070 && known_eq (ref->size, 8))
2072 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2073 vr->type, gimple_call_arg (def_stmt, 1));
2074 val = vn_nary_build_or_lookup (&res_op);
2075 if (!val
2076 || (TREE_CODE (val) == SSA_NAME
2077 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2078 return (void *)-1;
2080 else
2082 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2083 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2084 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2085 len);
2086 val = native_interpret_expr (vr->type, buf, len);
2087 if (!val)
2088 return (void *)-1;
2090 return vn_reference_lookup_or_insert_for_pieces
2091 (vuse, vr->set, vr->type, vr->operands, val);
2095 /* 2) Assignment from an empty CONSTRUCTOR. */
2096 else if (is_gimple_reg_type (vr->type)
2097 && gimple_assign_single_p (def_stmt)
2098 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2099 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2101 tree base2;
2102 poly_int64 offset2, size2, maxsize2;
2103 bool reverse;
2104 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2105 &offset2, &size2, &maxsize2, &reverse);
2106 if (known_size_p (maxsize2)
2107 && operand_equal_p (base, base2, 0)
2108 && known_subrange_p (offset, maxsize, offset2, size2))
2110 tree val = build_zero_cst (vr->type);
2111 return vn_reference_lookup_or_insert_for_pieces
2112 (vuse, vr->set, vr->type, vr->operands, val);
2116 /* 3) Assignment from a constant. We can use folds native encode/interpret
2117 routines to extract the assigned bits. */
2118 else if (known_eq (ref->size, maxsize)
2119 && is_gimple_reg_type (vr->type)
2120 && !contains_storage_order_barrier_p (vr->operands)
2121 && gimple_assign_single_p (def_stmt)
2122 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2123 /* native_encode and native_decode operate on arrays of bytes
2124 and so fundamentally need a compile-time size and offset. */
2125 && maxsize.is_constant (&maxsizei)
2126 && maxsizei % BITS_PER_UNIT == 0
2127 && offset.is_constant (&offseti)
2128 && offseti % BITS_PER_UNIT == 0
2129 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2130 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2131 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2133 tree base2;
2134 HOST_WIDE_INT offset2, size2;
2135 bool reverse;
2136 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2137 &offset2, &size2, &reverse);
2138 if (base2
2139 && !reverse
2140 && size2 % BITS_PER_UNIT == 0
2141 && offset2 % BITS_PER_UNIT == 0
2142 && operand_equal_p (base, base2, 0)
2143 && known_subrange_p (offseti, maxsizei, offset2, size2))
2145 /* We support up to 512-bit values (for V8DFmode). */
2146 unsigned char buffer[64];
2147 int len;
2149 tree rhs = gimple_assign_rhs1 (def_stmt);
2150 if (TREE_CODE (rhs) == SSA_NAME)
2151 rhs = SSA_VAL (rhs);
2152 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2153 buffer, sizeof (buffer),
2154 (offseti - offset2) / BITS_PER_UNIT);
2155 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2157 tree type = vr->type;
2158 /* Make sure to interpret in a type that has a range
2159 covering the whole access size. */
2160 if (INTEGRAL_TYPE_P (vr->type)
2161 && maxsizei != TYPE_PRECISION (vr->type))
2162 type = build_nonstandard_integer_type (maxsizei,
2163 TYPE_UNSIGNED (type));
2164 tree val = native_interpret_expr (type, buffer,
2165 maxsizei / BITS_PER_UNIT);
2166 /* If we chop off bits because the types precision doesn't
2167 match the memory access size this is ok when optimizing
2168 reads but not when called from the DSE code during
2169 elimination. */
2170 if (val
2171 && type != vr->type)
2173 if (! int_fits_type_p (val, vr->type))
2174 val = NULL_TREE;
2175 else
2176 val = fold_convert (vr->type, val);
2179 if (val)
2180 return vn_reference_lookup_or_insert_for_pieces
2181 (vuse, vr->set, vr->type, vr->operands, val);
2186 /* 4) Assignment from an SSA name which definition we may be able
2187 to access pieces from. */
2188 else if (known_eq (ref->size, maxsize)
2189 && is_gimple_reg_type (vr->type)
2190 && !contains_storage_order_barrier_p (vr->operands)
2191 && gimple_assign_single_p (def_stmt)
2192 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2194 tree base2;
2195 poly_int64 offset2, size2, maxsize2;
2196 bool reverse;
2197 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2198 &offset2, &size2, &maxsize2,
2199 &reverse);
2200 if (!reverse
2201 && known_size_p (maxsize2)
2202 && known_eq (maxsize2, size2)
2203 && operand_equal_p (base, base2, 0)
2204 && known_subrange_p (offset, maxsize, offset2, size2)
2205 /* ??? We can't handle bitfield precision extracts without
2206 either using an alternate type for the BIT_FIELD_REF and
2207 then doing a conversion or possibly adjusting the offset
2208 according to endianness. */
2209 && (! INTEGRAL_TYPE_P (vr->type)
2210 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2211 && multiple_p (ref->size, BITS_PER_UNIT))
2213 gimple_match_op op (gimple_match_cond::UNCOND,
2214 BIT_FIELD_REF, vr->type,
2215 vn_valueize (gimple_assign_rhs1 (def_stmt)),
2216 bitsize_int (ref->size),
2217 bitsize_int (offset - offset2));
2218 tree val = vn_nary_build_or_lookup (&op);
2219 if (val
2220 && (TREE_CODE (val) != SSA_NAME
2221 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2223 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2224 (vuse, vr->set, vr->type, vr->operands, val);
2225 return res;
2230 /* 5) For aggregate copies translate the reference through them if
2231 the copy kills ref. */
2232 else if (vn_walk_kind == VN_WALKREWRITE
2233 && gimple_assign_single_p (def_stmt)
2234 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2235 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2236 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2238 tree base2;
2239 int i, j, k;
2240 auto_vec<vn_reference_op_s> rhs;
2241 vn_reference_op_t vro;
2242 ao_ref r;
2244 if (!lhs_ref_ok)
2245 return (void *)-1;
2247 /* See if the assignment kills REF. */
2248 base2 = ao_ref_base (&lhs_ref);
2249 if (!lhs_ref.max_size_known_p ()
2250 || (base != base2
2251 && (TREE_CODE (base) != MEM_REF
2252 || TREE_CODE (base2) != MEM_REF
2253 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2254 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2255 TREE_OPERAND (base2, 1))))
2256 || !stmt_kills_ref_p (def_stmt, ref))
2257 return (void *)-1;
2259 /* Find the common base of ref and the lhs. lhs_ops already
2260 contains valueized operands for the lhs. */
2261 i = vr->operands.length () - 1;
2262 j = lhs_ops.length () - 1;
2263 while (j >= 0 && i >= 0
2264 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2266 i--;
2267 j--;
2270 /* ??? The innermost op should always be a MEM_REF and we already
2271 checked that the assignment to the lhs kills vr. Thus for
2272 aggregate copies using char[] types the vn_reference_op_eq
2273 may fail when comparing types for compatibility. But we really
2274 don't care here - further lookups with the rewritten operands
2275 will simply fail if we messed up types too badly. */
2276 poly_int64 extra_off = 0;
2277 if (j == 0 && i >= 0
2278 && lhs_ops[0].opcode == MEM_REF
2279 && maybe_ne (lhs_ops[0].off, -1))
2281 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2282 i--, j--;
2283 else if (vr->operands[i].opcode == MEM_REF
2284 && maybe_ne (vr->operands[i].off, -1))
2286 extra_off = vr->operands[i].off - lhs_ops[0].off;
2287 i--, j--;
2291 /* i now points to the first additional op.
2292 ??? LHS may not be completely contained in VR, one or more
2293 VIEW_CONVERT_EXPRs could be in its way. We could at least
2294 try handling outermost VIEW_CONVERT_EXPRs. */
2295 if (j != -1)
2296 return (void *)-1;
2298 /* Punt if the additional ops contain a storage order barrier. */
2299 for (k = i; k >= 0; k--)
2301 vro = &vr->operands[k];
2302 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2303 return (void *)-1;
2306 /* Now re-write REF to be based on the rhs of the assignment. */
2307 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2309 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2310 if (maybe_ne (extra_off, 0))
2312 if (rhs.length () < 2)
2313 return (void *)-1;
2314 int ix = rhs.length () - 2;
2315 if (rhs[ix].opcode != MEM_REF
2316 || known_eq (rhs[ix].off, -1))
2317 return (void *)-1;
2318 rhs[ix].off += extra_off;
2319 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2320 build_int_cst (TREE_TYPE (rhs[ix].op0),
2321 extra_off));
2324 /* We need to pre-pend vr->operands[0..i] to rhs. */
2325 vec<vn_reference_op_s> old = vr->operands;
2326 if (i + 1 + rhs.length () > vr->operands.length ())
2327 vr->operands.safe_grow (i + 1 + rhs.length ());
2328 else
2329 vr->operands.truncate (i + 1 + rhs.length ());
2330 FOR_EACH_VEC_ELT (rhs, j, vro)
2331 vr->operands[i + 1 + j] = *vro;
2332 vr->operands = valueize_refs (vr->operands);
2333 if (old == shared_lookup_references)
2334 shared_lookup_references = vr->operands;
2335 vr->hashcode = vn_reference_compute_hash (vr);
2337 /* Try folding the new reference to a constant. */
2338 tree val = fully_constant_vn_reference_p (vr);
2339 if (val)
2340 return vn_reference_lookup_or_insert_for_pieces
2341 (vuse, vr->set, vr->type, vr->operands, val);
2343 /* Adjust *ref from the new operands. */
2344 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2345 return (void *)-1;
2346 /* This can happen with bitfields. */
2347 if (maybe_ne (ref->size, r.size))
2348 return (void *)-1;
2349 *ref = r;
2351 /* Do not update last seen VUSE after translating. */
2352 last_vuse_ptr = NULL;
2354 /* Keep looking for the adjusted *REF / VR pair. */
2355 return NULL;
2358 /* 6) For memcpy copies translate the reference through them if
2359 the copy kills ref. */
2360 else if (vn_walk_kind == VN_WALKREWRITE
2361 && is_gimple_reg_type (vr->type)
2362 /* ??? Handle BCOPY as well. */
2363 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2364 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2365 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2366 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2367 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2368 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2369 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2370 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2372 tree lhs, rhs;
2373 ao_ref r;
2374 poly_int64 rhs_offset, lhs_offset;
2375 vn_reference_op_s op;
2376 poly_uint64 mem_offset;
2377 poly_int64 at, byte_maxsize;
2379 /* Only handle non-variable, addressable refs. */
2380 if (maybe_ne (ref->size, maxsize)
2381 || !multiple_p (offset, BITS_PER_UNIT, &at)
2382 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2383 return (void *)-1;
2385 /* Extract a pointer base and an offset for the destination. */
2386 lhs = gimple_call_arg (def_stmt, 0);
2387 lhs_offset = 0;
2388 if (TREE_CODE (lhs) == SSA_NAME)
2390 lhs = vn_valueize (lhs);
2391 if (TREE_CODE (lhs) == SSA_NAME)
2393 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2394 if (gimple_assign_single_p (def_stmt)
2395 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2396 lhs = gimple_assign_rhs1 (def_stmt);
2399 if (TREE_CODE (lhs) == ADDR_EXPR)
2401 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2402 &lhs_offset);
2403 if (!tem)
2404 return (void *)-1;
2405 if (TREE_CODE (tem) == MEM_REF
2406 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2408 lhs = TREE_OPERAND (tem, 0);
2409 if (TREE_CODE (lhs) == SSA_NAME)
2410 lhs = vn_valueize (lhs);
2411 lhs_offset += mem_offset;
2413 else if (DECL_P (tem))
2414 lhs = build_fold_addr_expr (tem);
2415 else
2416 return (void *)-1;
2418 if (TREE_CODE (lhs) != SSA_NAME
2419 && TREE_CODE (lhs) != ADDR_EXPR)
2420 return (void *)-1;
2422 /* Extract a pointer base and an offset for the source. */
2423 rhs = gimple_call_arg (def_stmt, 1);
2424 rhs_offset = 0;
2425 if (TREE_CODE (rhs) == SSA_NAME)
2426 rhs = vn_valueize (rhs);
2427 if (TREE_CODE (rhs) == ADDR_EXPR)
2429 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2430 &rhs_offset);
2431 if (!tem)
2432 return (void *)-1;
2433 if (TREE_CODE (tem) == MEM_REF
2434 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2436 rhs = TREE_OPERAND (tem, 0);
2437 rhs_offset += mem_offset;
2439 else if (DECL_P (tem)
2440 || TREE_CODE (tem) == STRING_CST)
2441 rhs = build_fold_addr_expr (tem);
2442 else
2443 return (void *)-1;
2445 if (TREE_CODE (rhs) != SSA_NAME
2446 && TREE_CODE (rhs) != ADDR_EXPR)
2447 return (void *)-1;
2449 /* The bases of the destination and the references have to agree. */
2450 if (TREE_CODE (base) == MEM_REF)
2452 if (TREE_OPERAND (base, 0) != lhs
2453 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2454 return (void *) -1;
2455 at += mem_offset;
2457 else if (!DECL_P (base)
2458 || TREE_CODE (lhs) != ADDR_EXPR
2459 || TREE_OPERAND (lhs, 0) != base)
2460 return (void *)-1;
2462 /* If the access is completely outside of the memcpy destination
2463 area there is no aliasing. */
2464 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2465 return NULL;
2466 /* And the access has to be contained within the memcpy destination. */
2467 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2468 return (void *)-1;
2470 /* Make room for 2 operands in the new reference. */
2471 if (vr->operands.length () < 2)
2473 vec<vn_reference_op_s> old = vr->operands;
2474 vr->operands.safe_grow_cleared (2);
2475 if (old == shared_lookup_references)
2476 shared_lookup_references = vr->operands;
2478 else
2479 vr->operands.truncate (2);
2481 /* The looked-through reference is a simple MEM_REF. */
2482 memset (&op, 0, sizeof (op));
2483 op.type = vr->type;
2484 op.opcode = MEM_REF;
2485 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2486 op.off = at - lhs_offset + rhs_offset;
2487 vr->operands[0] = op;
2488 op.type = TREE_TYPE (rhs);
2489 op.opcode = TREE_CODE (rhs);
2490 op.op0 = rhs;
2491 op.off = -1;
2492 vr->operands[1] = op;
2493 vr->hashcode = vn_reference_compute_hash (vr);
2495 /* Try folding the new reference to a constant. */
2496 tree val = fully_constant_vn_reference_p (vr);
2497 if (val)
2498 return vn_reference_lookup_or_insert_for_pieces
2499 (vuse, vr->set, vr->type, vr->operands, val);
2501 /* Adjust *ref from the new operands. */
2502 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2503 return (void *)-1;
2504 /* This can happen with bitfields. */
2505 if (maybe_ne (ref->size, r.size))
2506 return (void *)-1;
2507 *ref = r;
2509 /* Do not update last seen VUSE after translating. */
2510 last_vuse_ptr = NULL;
2512 /* Keep looking for the adjusted *REF / VR pair. */
2513 return NULL;
2516 /* Bail out and stop walking. */
2517 return (void *)-1;
2520 /* Return a reference op vector from OP that can be used for
2521 vn_reference_lookup_pieces. The caller is responsible for releasing
2522 the vector. */
2524 vec<vn_reference_op_s>
2525 vn_reference_operands_for_lookup (tree op)
2527 bool valueized;
2528 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2531 /* Lookup a reference operation by it's parts, in the current hash table.
2532 Returns the resulting value number if it exists in the hash table,
2533 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2534 vn_reference_t stored in the hashtable if something is found. */
2536 tree
2537 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2538 vec<vn_reference_op_s> operands,
2539 vn_reference_t *vnresult, vn_lookup_kind kind)
2541 struct vn_reference_s vr1;
2542 vn_reference_t tmp;
2543 tree cst;
2545 if (!vnresult)
2546 vnresult = &tmp;
2547 *vnresult = NULL;
2549 vr1.vuse = vuse_ssa_val (vuse);
2550 shared_lookup_references.truncate (0);
2551 shared_lookup_references.safe_grow (operands.length ());
2552 memcpy (shared_lookup_references.address (),
2553 operands.address (),
2554 sizeof (vn_reference_op_s)
2555 * operands.length ());
2556 vr1.operands = operands = shared_lookup_references
2557 = valueize_refs (shared_lookup_references);
2558 vr1.type = type;
2559 vr1.set = set;
2560 vr1.hashcode = vn_reference_compute_hash (&vr1);
2561 if ((cst = fully_constant_vn_reference_p (&vr1)))
2562 return cst;
2564 vn_reference_lookup_1 (&vr1, vnresult);
2565 if (!*vnresult
2566 && kind != VN_NOWALK
2567 && vr1.vuse)
2569 ao_ref r;
2570 vn_walk_kind = kind;
2571 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2572 *vnresult =
2573 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2574 vn_reference_lookup_2,
2575 vn_reference_lookup_3,
2576 vuse_ssa_val, &vr1);
2577 gcc_checking_assert (vr1.operands == shared_lookup_references);
2580 if (*vnresult)
2581 return (*vnresult)->result;
2583 return NULL_TREE;
2586 /* Lookup OP in the current hash table, and return the resulting value
2587 number if it exists in the hash table. Return NULL_TREE if it does
2588 not exist in the hash table or if the result field of the structure
2589 was NULL.. VNRESULT will be filled in with the vn_reference_t
2590 stored in the hashtable if one exists. When TBAA_P is false assume
2591 we are looking up a store and treat it as having alias-set zero. */
2593 tree
2594 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2595 vn_reference_t *vnresult, bool tbaa_p)
2597 vec<vn_reference_op_s> operands;
2598 struct vn_reference_s vr1;
2599 tree cst;
2600 bool valuezied_anything;
2602 if (vnresult)
2603 *vnresult = NULL;
2605 vr1.vuse = vuse_ssa_val (vuse);
2606 vr1.operands = operands
2607 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2608 vr1.type = TREE_TYPE (op);
2609 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2610 vr1.hashcode = vn_reference_compute_hash (&vr1);
2611 if ((cst = fully_constant_vn_reference_p (&vr1)))
2612 return cst;
2614 if (kind != VN_NOWALK
2615 && vr1.vuse)
2617 vn_reference_t wvnresult;
2618 ao_ref r;
2619 /* Make sure to use a valueized reference if we valueized anything.
2620 Otherwise preserve the full reference for advanced TBAA. */
2621 if (!valuezied_anything
2622 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2623 vr1.operands))
2624 ao_ref_init (&r, op);
2625 if (! tbaa_p)
2626 r.ref_alias_set = r.base_alias_set = 0;
2627 vn_walk_kind = kind;
2628 wvnresult =
2629 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2630 vn_reference_lookup_2,
2631 vn_reference_lookup_3,
2632 vuse_ssa_val, &vr1);
2633 gcc_checking_assert (vr1.operands == shared_lookup_references);
2634 if (wvnresult)
2636 if (vnresult)
2637 *vnresult = wvnresult;
2638 return wvnresult->result;
2641 return NULL_TREE;
2644 return vn_reference_lookup_1 (&vr1, vnresult);
2647 /* Lookup CALL in the current hash table and return the entry in
2648 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2650 void
2651 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2652 vn_reference_t vr)
2654 if (vnresult)
2655 *vnresult = NULL;
2657 tree vuse = gimple_vuse (call);
2659 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2660 vr->operands = valueize_shared_reference_ops_from_call (call);
2661 vr->type = gimple_expr_type (call);
2662 vr->set = 0;
2663 vr->hashcode = vn_reference_compute_hash (vr);
2664 vn_reference_lookup_1 (vr, vnresult);
2667 /* Insert OP into the current hash table with a value number of RESULT. */
2669 static void
2670 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2672 vn_reference_s **slot;
2673 vn_reference_t vr1;
2674 bool tem;
2676 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2677 if (TREE_CODE (result) == SSA_NAME)
2678 vr1->value_id = VN_INFO (result)->value_id;
2679 else
2680 vr1->value_id = get_or_alloc_constant_value_id (result);
2681 vr1->vuse = vuse_ssa_val (vuse);
2682 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2683 vr1->type = TREE_TYPE (op);
2684 vr1->set = get_alias_set (op);
2685 vr1->hashcode = vn_reference_compute_hash (vr1);
2686 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2687 vr1->result_vdef = vdef;
2689 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2690 INSERT);
2692 /* Because IL walking on reference lookup can end up visiting
2693 a def that is only to be visited later in iteration order
2694 when we are about to make an irreducible region reducible
2695 the def can be effectively processed and its ref being inserted
2696 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2697 but save a lookup if we deal with already inserted refs here. */
2698 if (*slot)
2700 /* We cannot assert that we have the same value either because
2701 when disentangling an irreducible region we may end up visiting
2702 a use before the corresponding def. That's a missed optimization
2703 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2704 if (dump_file && (dump_flags & TDF_DETAILS)
2705 && !operand_equal_p ((*slot)->result, vr1->result, 0))
2707 fprintf (dump_file, "Keeping old value ");
2708 print_generic_expr (dump_file, (*slot)->result);
2709 fprintf (dump_file, " because of collision\n");
2711 free_reference (vr1);
2712 obstack_free (&vn_tables_obstack, vr1);
2713 return;
2716 *slot = vr1;
2717 vr1->next = last_inserted_ref;
2718 last_inserted_ref = vr1;
2721 /* Insert a reference by it's pieces into the current hash table with
2722 a value number of RESULT. Return the resulting reference
2723 structure we created. */
2725 vn_reference_t
2726 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2727 vec<vn_reference_op_s> operands,
2728 tree result, unsigned int value_id)
2731 vn_reference_s **slot;
2732 vn_reference_t vr1;
2734 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2735 vr1->value_id = value_id;
2736 vr1->vuse = vuse_ssa_val (vuse);
2737 vr1->operands = valueize_refs (operands);
2738 vr1->type = type;
2739 vr1->set = set;
2740 vr1->hashcode = vn_reference_compute_hash (vr1);
2741 if (result && TREE_CODE (result) == SSA_NAME)
2742 result = SSA_VAL (result);
2743 vr1->result = result;
2745 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2746 INSERT);
2748 /* At this point we should have all the things inserted that we have
2749 seen before, and we should never try inserting something that
2750 already exists. */
2751 gcc_assert (!*slot);
2753 *slot = vr1;
2754 vr1->next = last_inserted_ref;
2755 last_inserted_ref = vr1;
2756 return vr1;
2759 /* Compute and return the hash value for nary operation VBO1. */
2761 static hashval_t
2762 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2764 inchash::hash hstate;
2765 unsigned i;
2767 for (i = 0; i < vno1->length; ++i)
2768 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2769 vno1->op[i] = SSA_VAL (vno1->op[i]);
2771 if (((vno1->length == 2
2772 && commutative_tree_code (vno1->opcode))
2773 || (vno1->length == 3
2774 && commutative_ternary_tree_code (vno1->opcode)))
2775 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2776 std::swap (vno1->op[0], vno1->op[1]);
2777 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2778 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2780 std::swap (vno1->op[0], vno1->op[1]);
2781 vno1->opcode = swap_tree_comparison (vno1->opcode);
2784 hstate.add_int (vno1->opcode);
2785 for (i = 0; i < vno1->length; ++i)
2786 inchash::add_expr (vno1->op[i], hstate);
2788 return hstate.end ();
2791 /* Compare nary operations VNO1 and VNO2 and return true if they are
2792 equivalent. */
2794 bool
2795 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2797 unsigned i;
2799 if (vno1->hashcode != vno2->hashcode)
2800 return false;
2802 if (vno1->length != vno2->length)
2803 return false;
2805 if (vno1->opcode != vno2->opcode
2806 || !types_compatible_p (vno1->type, vno2->type))
2807 return false;
2809 for (i = 0; i < vno1->length; ++i)
2810 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2811 return false;
2813 /* BIT_INSERT_EXPR has an implict operand as the type precision
2814 of op1. Need to check to make sure they are the same. */
2815 if (vno1->opcode == BIT_INSERT_EXPR
2816 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2817 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2818 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2819 return false;
2821 return true;
2824 /* Initialize VNO from the pieces provided. */
2826 static void
2827 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2828 enum tree_code code, tree type, tree *ops)
2830 vno->opcode = code;
2831 vno->length = length;
2832 vno->type = type;
2833 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2836 /* Initialize VNO from OP. */
2838 static void
2839 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2841 unsigned i;
2843 vno->opcode = TREE_CODE (op);
2844 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2845 vno->type = TREE_TYPE (op);
2846 for (i = 0; i < vno->length; ++i)
2847 vno->op[i] = TREE_OPERAND (op, i);
2850 /* Return the number of operands for a vn_nary ops structure from STMT. */
2852 static unsigned int
2853 vn_nary_length_from_stmt (gimple *stmt)
2855 switch (gimple_assign_rhs_code (stmt))
2857 case REALPART_EXPR:
2858 case IMAGPART_EXPR:
2859 case VIEW_CONVERT_EXPR:
2860 return 1;
2862 case BIT_FIELD_REF:
2863 return 3;
2865 case CONSTRUCTOR:
2866 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2868 default:
2869 return gimple_num_ops (stmt) - 1;
2873 /* Initialize VNO from STMT. */
2875 static void
2876 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2878 unsigned i;
2880 vno->opcode = gimple_assign_rhs_code (stmt);
2881 vno->type = gimple_expr_type (stmt);
2882 switch (vno->opcode)
2884 case REALPART_EXPR:
2885 case IMAGPART_EXPR:
2886 case VIEW_CONVERT_EXPR:
2887 vno->length = 1;
2888 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2889 break;
2891 case BIT_FIELD_REF:
2892 vno->length = 3;
2893 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2894 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2895 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2896 break;
2898 case CONSTRUCTOR:
2899 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2900 for (i = 0; i < vno->length; ++i)
2901 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2902 break;
2904 default:
2905 gcc_checking_assert (!gimple_assign_single_p (stmt));
2906 vno->length = gimple_num_ops (stmt) - 1;
2907 for (i = 0; i < vno->length; ++i)
2908 vno->op[i] = gimple_op (stmt, i + 1);
2912 /* Compute the hashcode for VNO and look for it in the hash table;
2913 return the resulting value number if it exists in the hash table.
2914 Return NULL_TREE if it does not exist in the hash table or if the
2915 result field of the operation is NULL. VNRESULT will contain the
2916 vn_nary_op_t from the hashtable if it exists. */
2918 static tree
2919 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2921 vn_nary_op_s **slot;
2923 if (vnresult)
2924 *vnresult = NULL;
2926 vno->hashcode = vn_nary_op_compute_hash (vno);
2927 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2928 if (!slot)
2929 return NULL_TREE;
2930 if (vnresult)
2931 *vnresult = *slot;
2932 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
2935 /* Lookup a n-ary operation by its pieces and return the resulting value
2936 number if it exists in the hash table. Return NULL_TREE if it does
2937 not exist in the hash table or if the result field of the operation
2938 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2939 if it exists. */
2941 tree
2942 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2943 tree type, tree *ops, vn_nary_op_t *vnresult)
2945 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2946 sizeof_vn_nary_op (length));
2947 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2948 return vn_nary_op_lookup_1 (vno1, vnresult);
2951 /* Lookup OP in the current hash table, and return the resulting value
2952 number if it exists in the hash table. Return NULL_TREE if it does
2953 not exist in the hash table or if the result field of the operation
2954 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2955 if it exists. */
2957 tree
2958 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2960 vn_nary_op_t vno1
2961 = XALLOCAVAR (struct vn_nary_op_s,
2962 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2963 init_vn_nary_op_from_op (vno1, op);
2964 return vn_nary_op_lookup_1 (vno1, vnresult);
2967 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2968 value number if it exists in the hash table. Return NULL_TREE if
2969 it does not exist in the hash table. VNRESULT will contain the
2970 vn_nary_op_t from the hashtable if it exists. */
2972 tree
2973 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2975 vn_nary_op_t vno1
2976 = XALLOCAVAR (struct vn_nary_op_s,
2977 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2978 init_vn_nary_op_from_stmt (vno1, stmt);
2979 return vn_nary_op_lookup_1 (vno1, vnresult);
2982 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2984 static vn_nary_op_t
2985 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2987 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2990 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2991 obstack. */
2993 static vn_nary_op_t
2994 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2996 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
2998 vno1->value_id = value_id;
2999 vno1->length = length;
3000 vno1->predicated_values = 0;
3001 vno1->u.result = result;
3003 return vno1;
3006 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3007 VNO->HASHCODE first. */
3009 static vn_nary_op_t
3010 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3011 bool compute_hash)
3013 vn_nary_op_s **slot;
3015 if (compute_hash)
3017 vno->hashcode = vn_nary_op_compute_hash (vno);
3018 gcc_assert (! vno->predicated_values
3019 || (! vno->u.values->next
3020 && vno->u.values->valid_dominated_by_p[0] != EXIT_BLOCK
3021 && vno->u.values->valid_dominated_by_p[1] == EXIT_BLOCK));
3024 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3025 vno->unwind_to = *slot;
3026 if (*slot)
3028 /* Prefer non-predicated values.
3029 ??? Only if those are constant, otherwise, with constant predicated
3030 value, turn them into predicated values with entry-block validity
3031 (??? but we always find the first valid result currently). */
3032 if ((*slot)->predicated_values
3033 && ! vno->predicated_values)
3035 /* ??? We cannot remove *slot from the unwind stack list.
3036 For the moment we deal with this by skipping not found
3037 entries but this isn't ideal ... */
3038 *slot = vno;
3039 /* ??? Maintain a stack of states we can unwind in
3040 vn_nary_op_s? But how far do we unwind? In reality
3041 we need to push change records somewhere... Or not
3042 unwind vn_nary_op_s and linking them but instead
3043 unwind the results "list", linking that, which also
3044 doesn't move on hashtable resize. */
3045 /* We can also have a ->unwind_to recording *slot there.
3046 That way we can make u.values a fixed size array with
3047 recording the number of entries but of course we then
3048 have always N copies for each unwind_to-state. Or we
3049 make sure to only ever append and each unwinding will
3050 pop off one entry (but how to deal with predicated
3051 replaced with non-predicated here?) */
3052 vno->next = last_inserted_nary;
3053 last_inserted_nary = vno;
3054 return vno;
3056 else if (vno->predicated_values
3057 && ! (*slot)->predicated_values)
3058 return *slot;
3059 else if (vno->predicated_values
3060 && (*slot)->predicated_values)
3062 /* ??? Factor this all into a insert_single_predicated_value
3063 routine. */
3064 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3065 basic_block vno_bb
3066 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3067 vn_pval *nval = vno->u.values;
3068 vn_pval **next = &vno->u.values;
3069 bool found = false;
3070 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3072 if (expressions_equal_p (val->result, vno->u.values->result))
3074 found = true;
3075 for (unsigned i = 0; i < val->n; ++i)
3077 basic_block val_bb
3078 = BASIC_BLOCK_FOR_FN (cfun,
3079 val->valid_dominated_by_p[i]);
3080 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3081 /* Value registered with more generic predicate. */
3082 return *slot;
3083 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3084 /* Shouldn't happen, we insert in RPO order. */
3085 gcc_unreachable ();
3087 /* Append value. */
3088 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3089 sizeof (vn_pval)
3090 + val->n * sizeof (int));
3091 (*next)->next = NULL;
3092 (*next)->result = val->result;
3093 (*next)->n = val->n + 1;
3094 memcpy ((*next)->valid_dominated_by_p,
3095 val->valid_dominated_by_p,
3096 val->n * sizeof (int));
3097 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3098 next = &(*next)->next;
3099 if (dump_file && (dump_flags & TDF_DETAILS))
3100 fprintf (dump_file, "Appending predicate to value.\n");
3101 continue;
3103 /* Copy other predicated values. */
3104 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3105 sizeof (vn_pval)
3106 + (val->n-1) * sizeof (int));
3107 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3108 (*next)->next = NULL;
3109 next = &(*next)->next;
3111 if (!found)
3112 *next = nval;
3114 *slot = vno;
3115 vno->next = last_inserted_nary;
3116 last_inserted_nary = vno;
3117 return vno;
3120 /* While we do not want to insert things twice it's awkward to
3121 avoid it in the case where visit_nary_op pattern-matches stuff
3122 and ends up simplifying the replacement to itself. We then
3123 get two inserts, one from visit_nary_op and one from
3124 vn_nary_build_or_lookup.
3125 So allow inserts with the same value number. */
3126 if ((*slot)->u.result == vno->u.result)
3127 return *slot;
3130 /* ??? There's also optimistic vs. previous commited state merging
3131 that is problematic for the case of unwinding. */
3133 /* ??? We should return NULL if we do not use 'vno' and have the
3134 caller release it. */
3135 gcc_assert (!*slot);
3137 *slot = vno;
3138 vno->next = last_inserted_nary;
3139 last_inserted_nary = vno;
3140 return vno;
3143 /* Insert a n-ary operation into the current hash table using it's
3144 pieces. Return the vn_nary_op_t structure we created and put in
3145 the hashtable. */
3147 vn_nary_op_t
3148 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3149 tree type, tree *ops,
3150 tree result, unsigned int value_id)
3152 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3153 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3154 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3157 static vn_nary_op_t
3158 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3159 tree type, tree *ops,
3160 tree result, unsigned int value_id,
3161 edge pred_e)
3163 /* ??? Currently tracking BBs. */
3164 if (! single_pred_p (pred_e->dest))
3166 /* Never record for backedges. */
3167 if (pred_e->flags & EDGE_DFS_BACK)
3168 return NULL;
3169 edge_iterator ei;
3170 edge e;
3171 int cnt = 0;
3172 /* Ignore backedges. */
3173 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3174 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3175 cnt++;
3176 if (cnt != 1)
3177 return NULL;
3179 if (dump_file && (dump_flags & TDF_DETAILS)
3180 /* ??? Fix dumping, but currently we only get comparisons. */
3181 && TREE_CODE_CLASS (code) == tcc_comparison)
3183 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3184 pred_e->dest->index);
3185 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3186 fprintf (dump_file, " %s ", get_tree_code_name (code));
3187 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3188 fprintf (dump_file, " == %s\n",
3189 integer_zerop (result) ? "false" : "true");
3191 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3192 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3193 vno1->predicated_values = 1;
3194 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3195 sizeof (vn_pval));
3196 vno1->u.values->next = NULL;
3197 vno1->u.values->result = result;
3198 vno1->u.values->n = 1;
3199 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3200 vno1->u.values->valid_dominated_by_p[1] = EXIT_BLOCK;
3201 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3204 static bool
3205 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3207 static tree
3208 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3210 if (! vno->predicated_values)
3211 return vno->u.result;
3212 for (vn_pval *val = vno->u.values; val; val = val->next)
3213 for (unsigned i = 0; i < val->n; ++i)
3214 if (dominated_by_p_w_unex (bb,
3215 BASIC_BLOCK_FOR_FN
3216 (cfun, val->valid_dominated_by_p[i])))
3217 return val->result;
3218 return NULL_TREE;
3221 /* Insert OP into the current hash table with a value number of
3222 RESULT. Return the vn_nary_op_t structure we created and put in
3223 the hashtable. */
3225 vn_nary_op_t
3226 vn_nary_op_insert (tree op, tree result)
3228 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3229 vn_nary_op_t vno1;
3231 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3232 init_vn_nary_op_from_op (vno1, op);
3233 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3236 /* Insert the rhs of STMT into the current hash table with a value number of
3237 RESULT. */
3239 static vn_nary_op_t
3240 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3242 vn_nary_op_t vno1
3243 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3244 result, VN_INFO (result)->value_id);
3245 init_vn_nary_op_from_stmt (vno1, stmt);
3246 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3249 /* Compute a hashcode for PHI operation VP1 and return it. */
3251 static inline hashval_t
3252 vn_phi_compute_hash (vn_phi_t vp1)
3254 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3255 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3256 tree phi1op;
3257 tree type;
3258 edge e;
3259 edge_iterator ei;
3261 /* If all PHI arguments are constants we need to distinguish
3262 the PHI node via its type. */
3263 type = vp1->type;
3264 hstate.merge_hash (vn_hash_type (type));
3266 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3268 /* Don't hash backedge values they need to be handled as VN_TOP
3269 for optimistic value-numbering. */
3270 if (e->flags & EDGE_DFS_BACK)
3271 continue;
3273 phi1op = vp1->phiargs[e->dest_idx];
3274 if (phi1op == VN_TOP)
3275 continue;
3276 inchash::add_expr (phi1op, hstate);
3279 return hstate.end ();
3283 /* Return true if COND1 and COND2 represent the same condition, set
3284 *INVERTED_P if one needs to be inverted to make it the same as
3285 the other. */
3287 static bool
3288 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3289 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3291 enum tree_code code1 = gimple_cond_code (cond1);
3292 enum tree_code code2 = gimple_cond_code (cond2);
3294 *inverted_p = false;
3295 if (code1 == code2)
3297 else if (code1 == swap_tree_comparison (code2))
3298 std::swap (lhs2, rhs2);
3299 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3300 *inverted_p = true;
3301 else if (code1 == invert_tree_comparison
3302 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3304 std::swap (lhs2, rhs2);
3305 *inverted_p = true;
3307 else
3308 return false;
3310 return ((expressions_equal_p (lhs1, lhs2)
3311 && expressions_equal_p (rhs1, rhs2))
3312 || (commutative_tree_code (code1)
3313 && expressions_equal_p (lhs1, rhs2)
3314 && expressions_equal_p (rhs1, lhs2)));
3317 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3319 static int
3320 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3322 if (vp1->hashcode != vp2->hashcode)
3323 return false;
3325 if (vp1->block != vp2->block)
3327 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3328 return false;
3330 switch (EDGE_COUNT (vp1->block->preds))
3332 case 1:
3333 /* Single-arg PHIs are just copies. */
3334 break;
3336 case 2:
3338 /* Rule out backedges into the PHI. */
3339 if (vp1->block->loop_father->header == vp1->block
3340 || vp2->block->loop_father->header == vp2->block)
3341 return false;
3343 /* If the PHI nodes do not have compatible types
3344 they are not the same. */
3345 if (!types_compatible_p (vp1->type, vp2->type))
3346 return false;
3348 basic_block idom1
3349 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3350 basic_block idom2
3351 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3352 /* If the immediate dominator end in switch stmts multiple
3353 values may end up in the same PHI arg via intermediate
3354 CFG merges. */
3355 if (EDGE_COUNT (idom1->succs) != 2
3356 || EDGE_COUNT (idom2->succs) != 2)
3357 return false;
3359 /* Verify the controlling stmt is the same. */
3360 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3361 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3362 if (! last1 || ! last2)
3363 return false;
3364 bool inverted_p;
3365 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3366 last2, vp2->cclhs, vp2->ccrhs,
3367 &inverted_p))
3368 return false;
3370 /* Get at true/false controlled edges into the PHI. */
3371 edge te1, te2, fe1, fe2;
3372 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3373 &te1, &fe1)
3374 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3375 &te2, &fe2))
3376 return false;
3378 /* Swap edges if the second condition is the inverted of the
3379 first. */
3380 if (inverted_p)
3381 std::swap (te2, fe2);
3383 /* ??? Handle VN_TOP specially. */
3384 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3385 vp2->phiargs[te2->dest_idx])
3386 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3387 vp2->phiargs[fe2->dest_idx]))
3388 return false;
3390 return true;
3393 default:
3394 return false;
3398 /* If the PHI nodes do not have compatible types
3399 they are not the same. */
3400 if (!types_compatible_p (vp1->type, vp2->type))
3401 return false;
3403 /* Any phi in the same block will have it's arguments in the
3404 same edge order, because of how we store phi nodes. */
3405 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3407 tree phi1op = vp1->phiargs[i];
3408 tree phi2op = vp2->phiargs[i];
3409 if (phi1op == VN_TOP || phi2op == VN_TOP)
3410 continue;
3411 if (!expressions_equal_p (phi1op, phi2op))
3412 return false;
3415 return true;
3418 /* Lookup PHI in the current hash table, and return the resulting
3419 value number if it exists in the hash table. Return NULL_TREE if
3420 it does not exist in the hash table. */
3422 static tree
3423 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3425 vn_phi_s **slot;
3426 struct vn_phi_s *vp1;
3427 edge e;
3428 edge_iterator ei;
3430 vp1 = XALLOCAVAR (struct vn_phi_s,
3431 sizeof (struct vn_phi_s)
3432 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3434 /* Canonicalize the SSA_NAME's to their value number. */
3435 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3437 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3438 if (TREE_CODE (def) == SSA_NAME
3439 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3440 def = SSA_VAL (def);
3441 vp1->phiargs[e->dest_idx] = def;
3443 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3444 vp1->block = gimple_bb (phi);
3445 /* Extract values of the controlling condition. */
3446 vp1->cclhs = NULL_TREE;
3447 vp1->ccrhs = NULL_TREE;
3448 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3449 if (EDGE_COUNT (idom1->succs) == 2)
3450 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3452 /* ??? We want to use SSA_VAL here. But possibly not
3453 allow VN_TOP. */
3454 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3455 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3457 vp1->hashcode = vn_phi_compute_hash (vp1);
3458 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3459 if (!slot)
3460 return NULL_TREE;
3461 return (*slot)->result;
3464 /* Insert PHI into the current hash table with a value number of
3465 RESULT. */
3467 static vn_phi_t
3468 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3470 vn_phi_s **slot;
3471 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3472 sizeof (vn_phi_s)
3473 + ((gimple_phi_num_args (phi) - 1)
3474 * sizeof (tree)));
3475 edge e;
3476 edge_iterator ei;
3478 /* Canonicalize the SSA_NAME's to their value number. */
3479 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3481 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3482 if (TREE_CODE (def) == SSA_NAME
3483 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3484 def = SSA_VAL (def);
3485 vp1->phiargs[e->dest_idx] = def;
3487 vp1->value_id = VN_INFO (result)->value_id;
3488 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3489 vp1->block = gimple_bb (phi);
3490 /* Extract values of the controlling condition. */
3491 vp1->cclhs = NULL_TREE;
3492 vp1->ccrhs = NULL_TREE;
3493 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3494 if (EDGE_COUNT (idom1->succs) == 2)
3495 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3497 /* ??? We want to use SSA_VAL here. But possibly not
3498 allow VN_TOP. */
3499 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3500 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3502 vp1->result = result;
3503 vp1->hashcode = vn_phi_compute_hash (vp1);
3505 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3506 gcc_assert (!*slot);
3508 *slot = vp1;
3509 vp1->next = last_inserted_phi;
3510 last_inserted_phi = vp1;
3511 return vp1;
3515 /* Return true if BB1 is dominated by BB2 taking into account edges
3516 that are not executable. */
3518 static bool
3519 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3521 edge_iterator ei;
3522 edge e;
3524 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3525 return true;
3527 /* Before iterating we'd like to know if there exists a
3528 (executable) path from bb2 to bb1 at all, if not we can
3529 directly return false. For now simply iterate once. */
3531 /* Iterate to the single executable bb1 predecessor. */
3532 if (EDGE_COUNT (bb1->preds) > 1)
3534 edge prede = NULL;
3535 FOR_EACH_EDGE (e, ei, bb1->preds)
3536 if (e->flags & EDGE_EXECUTABLE)
3538 if (prede)
3540 prede = NULL;
3541 break;
3543 prede = e;
3545 if (prede)
3547 bb1 = prede->src;
3549 /* Re-do the dominance check with changed bb1. */
3550 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3551 return true;
3555 /* Iterate to the single executable bb2 successor. */
3556 edge succe = NULL;
3557 FOR_EACH_EDGE (e, ei, bb2->succs)
3558 if (e->flags & EDGE_EXECUTABLE)
3560 if (succe)
3562 succe = NULL;
3563 break;
3565 succe = e;
3567 if (succe)
3569 /* Verify the reached block is only reached through succe.
3570 If there is only one edge we can spare us the dominator
3571 check and iterate directly. */
3572 if (EDGE_COUNT (succe->dest->preds) > 1)
3574 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3575 if (e != succe
3576 && (e->flags & EDGE_EXECUTABLE))
3578 succe = NULL;
3579 break;
3582 if (succe)
3584 bb2 = succe->dest;
3586 /* Re-do the dominance check with changed bb2. */
3587 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3588 return true;
3592 /* We could now iterate updating bb1 / bb2. */
3593 return false;
3596 /* Set the value number of FROM to TO, return true if it has changed
3597 as a result. */
3599 static inline bool
3600 set_ssa_val_to (tree from, tree to)
3602 vn_ssa_aux_t from_info = VN_INFO (from);
3603 tree currval = from_info->valnum; // SSA_VAL (from)
3604 poly_int64 toff, coff;
3606 /* The only thing we allow as value numbers are ssa_names
3607 and invariants. So assert that here. We don't allow VN_TOP
3608 as visiting a stmt should produce a value-number other than
3609 that.
3610 ??? Still VN_TOP can happen for unreachable code, so force
3611 it to varying in that case. Not all code is prepared to
3612 get VN_TOP on valueization. */
3613 if (to == VN_TOP)
3615 /* ??? When iterating and visiting PHI <undef, backedge-value>
3616 for the first time we rightfully get VN_TOP and we need to
3617 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3618 With SCCVN we were simply lucky we iterated the other PHI
3619 cycles first and thus visited the backedge-value DEF. */
3620 if (currval == VN_TOP)
3621 goto set_and_exit;
3622 if (dump_file && (dump_flags & TDF_DETAILS))
3623 fprintf (dump_file, "Forcing value number to varying on "
3624 "receiving VN_TOP\n");
3625 to = from;
3628 gcc_checking_assert (to != NULL_TREE
3629 && ((TREE_CODE (to) == SSA_NAME
3630 && (to == from || SSA_VAL (to) == to))
3631 || is_gimple_min_invariant (to)));
3633 if (from != to)
3635 if (currval == from)
3637 if (dump_file && (dump_flags & TDF_DETAILS))
3639 fprintf (dump_file, "Not changing value number of ");
3640 print_generic_expr (dump_file, from);
3641 fprintf (dump_file, " from VARYING to ");
3642 print_generic_expr (dump_file, to);
3643 fprintf (dump_file, "\n");
3645 return false;
3647 else if (currval != VN_TOP
3648 && ! is_gimple_min_invariant (currval)
3649 && ! ssa_undefined_value_p (currval, false)
3650 && is_gimple_min_invariant (to))
3652 if (dump_file && (dump_flags & TDF_DETAILS))
3654 fprintf (dump_file, "Forcing VARYING instead of changing "
3655 "value number of ");
3656 print_generic_expr (dump_file, from);
3657 fprintf (dump_file, " from ");
3658 print_generic_expr (dump_file, currval);
3659 fprintf (dump_file, " (non-constant) to ");
3660 print_generic_expr (dump_file, to);
3661 fprintf (dump_file, " (constant)\n");
3663 to = from;
3665 else if (TREE_CODE (to) == SSA_NAME
3666 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3667 to = from;
3670 set_and_exit:
3671 if (dump_file && (dump_flags & TDF_DETAILS))
3673 fprintf (dump_file, "Setting value number of ");
3674 print_generic_expr (dump_file, from);
3675 fprintf (dump_file, " to ");
3676 print_generic_expr (dump_file, to);
3679 if (currval != to
3680 && !operand_equal_p (currval, to, 0)
3681 /* Different undefined SSA names are not actually different. See
3682 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3683 && !(TREE_CODE (currval) == SSA_NAME
3684 && TREE_CODE (to) == SSA_NAME
3685 && ssa_undefined_value_p (currval, false)
3686 && ssa_undefined_value_p (to, false))
3687 /* ??? For addresses involving volatile objects or types operand_equal_p
3688 does not reliably detect ADDR_EXPRs as equal. We know we are only
3689 getting invariant gimple addresses here, so can use
3690 get_addr_base_and_unit_offset to do this comparison. */
3691 && !(TREE_CODE (currval) == ADDR_EXPR
3692 && TREE_CODE (to) == ADDR_EXPR
3693 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3694 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3695 && known_eq (coff, toff)))
3697 if (dump_file && (dump_flags & TDF_DETAILS))
3698 fprintf (dump_file, " (changed)\n");
3699 from_info->valnum = to;
3700 return true;
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3703 fprintf (dump_file, "\n");
3704 return false;
3707 /* Set all definitions in STMT to value number to themselves.
3708 Return true if a value number changed. */
3710 static bool
3711 defs_to_varying (gimple *stmt)
3713 bool changed = false;
3714 ssa_op_iter iter;
3715 def_operand_p defp;
3717 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3719 tree def = DEF_FROM_PTR (defp);
3720 changed |= set_ssa_val_to (def, def);
3722 return changed;
3725 /* Visit a copy between LHS and RHS, return true if the value number
3726 changed. */
3728 static bool
3729 visit_copy (tree lhs, tree rhs)
3731 /* Valueize. */
3732 rhs = SSA_VAL (rhs);
3734 return set_ssa_val_to (lhs, rhs);
3737 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3738 is the same. */
3740 static tree
3741 valueized_wider_op (tree wide_type, tree op)
3743 if (TREE_CODE (op) == SSA_NAME)
3744 op = vn_valueize (op);
3746 /* Either we have the op widened available. */
3747 tree ops[3] = {};
3748 ops[0] = op;
3749 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3750 wide_type, ops, NULL);
3751 if (tem)
3752 return tem;
3754 /* Or the op is truncated from some existing value. */
3755 if (TREE_CODE (op) == SSA_NAME)
3757 gimple *def = SSA_NAME_DEF_STMT (op);
3758 if (is_gimple_assign (def)
3759 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3761 tem = gimple_assign_rhs1 (def);
3762 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3764 if (TREE_CODE (tem) == SSA_NAME)
3765 tem = vn_valueize (tem);
3766 return tem;
3771 /* For constants simply extend it. */
3772 if (TREE_CODE (op) == INTEGER_CST)
3773 return wide_int_to_tree (wide_type, wi::to_wide (op));
3775 return NULL_TREE;
3778 /* Visit a nary operator RHS, value number it, and return true if the
3779 value number of LHS has changed as a result. */
3781 static bool
3782 visit_nary_op (tree lhs, gassign *stmt)
3784 vn_nary_op_t vnresult;
3785 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3786 if (! result && vnresult)
3787 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3788 if (result)
3789 return set_ssa_val_to (lhs, result);
3791 /* Do some special pattern matching for redundancies of operations
3792 in different types. */
3793 enum tree_code code = gimple_assign_rhs_code (stmt);
3794 tree type = TREE_TYPE (lhs);
3795 tree rhs1 = gimple_assign_rhs1 (stmt);
3796 switch (code)
3798 CASE_CONVERT:
3799 /* Match arithmetic done in a different type where we can easily
3800 substitute the result from some earlier sign-changed or widened
3801 operation. */
3802 if (INTEGRAL_TYPE_P (type)
3803 && TREE_CODE (rhs1) == SSA_NAME
3804 /* We only handle sign-changes or zero-extension -> & mask. */
3805 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3806 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3807 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3809 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3810 if (def
3811 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3812 || gimple_assign_rhs_code (def) == MINUS_EXPR
3813 || gimple_assign_rhs_code (def) == MULT_EXPR))
3815 tree ops[3] = {};
3816 /* Either we have the op widened available. */
3817 ops[0] = valueized_wider_op (type,
3818 gimple_assign_rhs1 (def));
3819 if (ops[0])
3820 ops[1] = valueized_wider_op (type,
3821 gimple_assign_rhs2 (def));
3822 if (ops[0] && ops[1])
3824 ops[0] = vn_nary_op_lookup_pieces
3825 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3826 /* We have wider operation available. */
3827 if (ops[0])
3829 unsigned lhs_prec = TYPE_PRECISION (type);
3830 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3831 if (lhs_prec == rhs_prec)
3833 gimple_match_op match_op (gimple_match_cond::UNCOND,
3834 NOP_EXPR, type, ops[0]);
3835 result = vn_nary_build_or_lookup (&match_op);
3836 if (result)
3838 bool changed = set_ssa_val_to (lhs, result);
3839 vn_nary_op_insert_stmt (stmt, result);
3840 return changed;
3843 else
3845 tree mask = wide_int_to_tree
3846 (type, wi::mask (rhs_prec, false, lhs_prec));
3847 gimple_match_op match_op (gimple_match_cond::UNCOND,
3848 BIT_AND_EXPR,
3849 TREE_TYPE (lhs),
3850 ops[0], mask);
3851 result = vn_nary_build_or_lookup (&match_op);
3852 if (result)
3854 bool changed = set_ssa_val_to (lhs, result);
3855 vn_nary_op_insert_stmt (stmt, result);
3856 return changed;
3863 default:;
3866 bool changed = set_ssa_val_to (lhs, lhs);
3867 vn_nary_op_insert_stmt (stmt, lhs);
3868 return changed;
3871 /* Visit a call STMT storing into LHS. Return true if the value number
3872 of the LHS has changed as a result. */
3874 static bool
3875 visit_reference_op_call (tree lhs, gcall *stmt)
3877 bool changed = false;
3878 struct vn_reference_s vr1;
3879 vn_reference_t vnresult = NULL;
3880 tree vdef = gimple_vdef (stmt);
3882 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3883 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3884 lhs = NULL_TREE;
3886 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3887 if (vnresult)
3889 if (vnresult->result_vdef && vdef)
3890 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3891 else if (vdef)
3892 /* If the call was discovered to be pure or const reflect
3893 that as far as possible. */
3894 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3896 if (!vnresult->result && lhs)
3897 vnresult->result = lhs;
3899 if (vnresult->result && lhs)
3900 changed |= set_ssa_val_to (lhs, vnresult->result);
3902 else
3904 vn_reference_t vr2;
3905 vn_reference_s **slot;
3906 tree vdef_val = vdef;
3907 if (vdef)
3909 /* If we value numbered an indirect functions function to
3910 one not clobbering memory value number its VDEF to its
3911 VUSE. */
3912 tree fn = gimple_call_fn (stmt);
3913 if (fn && TREE_CODE (fn) == SSA_NAME)
3915 fn = SSA_VAL (fn);
3916 if (TREE_CODE (fn) == ADDR_EXPR
3917 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3918 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3919 & (ECF_CONST | ECF_PURE)))
3920 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3922 changed |= set_ssa_val_to (vdef, vdef_val);
3924 if (lhs)
3925 changed |= set_ssa_val_to (lhs, lhs);
3926 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3927 vr2->vuse = vr1.vuse;
3928 /* As we are not walking the virtual operand chain we know the
3929 shared_lookup_references are still original so we can re-use
3930 them here. */
3931 vr2->operands = vr1.operands.copy ();
3932 vr2->type = vr1.type;
3933 vr2->set = vr1.set;
3934 vr2->hashcode = vr1.hashcode;
3935 vr2->result = lhs;
3936 vr2->result_vdef = vdef_val;
3937 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3938 INSERT);
3939 gcc_assert (!*slot);
3940 *slot = vr2;
3941 vr2->next = last_inserted_ref;
3942 last_inserted_ref = vr2;
3945 return changed;
3948 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3949 and return true if the value number of the LHS has changed as a result. */
3951 static bool
3952 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3954 bool changed = false;
3955 tree last_vuse;
3956 tree result;
3958 last_vuse = gimple_vuse (stmt);
3959 last_vuse_ptr = &last_vuse;
3960 result = vn_reference_lookup (op, gimple_vuse (stmt),
3961 default_vn_walk_kind, NULL, true);
3962 last_vuse_ptr = NULL;
3964 /* We handle type-punning through unions by value-numbering based
3965 on offset and size of the access. Be prepared to handle a
3966 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3967 if (result
3968 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3970 /* We will be setting the value number of lhs to the value number
3971 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3972 So first simplify and lookup this expression to see if it
3973 is already available. */
3974 gimple_match_op res_op (gimple_match_cond::UNCOND,
3975 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3976 result = vn_nary_build_or_lookup (&res_op);
3977 /* When building the conversion fails avoid inserting the reference
3978 again. */
3979 if (!result)
3980 return set_ssa_val_to (lhs, lhs);
3983 if (result)
3984 changed = set_ssa_val_to (lhs, result);
3985 else
3987 changed = set_ssa_val_to (lhs, lhs);
3988 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3991 return changed;
3995 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3996 and return true if the value number of the LHS has changed as a result. */
3998 static bool
3999 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
4001 bool changed = false;
4002 vn_reference_t vnresult = NULL;
4003 tree assign;
4004 bool resultsame = false;
4005 tree vuse = gimple_vuse (stmt);
4006 tree vdef = gimple_vdef (stmt);
4008 if (TREE_CODE (op) == SSA_NAME)
4009 op = SSA_VAL (op);
4011 /* First we want to lookup using the *vuses* from the store and see
4012 if there the last store to this location with the same address
4013 had the same value.
4015 The vuses represent the memory state before the store. If the
4016 memory state, address, and value of the store is the same as the
4017 last store to this location, then this store will produce the
4018 same memory state as that store.
4020 In this case the vdef versions for this store are value numbered to those
4021 vuse versions, since they represent the same memory state after
4022 this store.
4024 Otherwise, the vdefs for the store are used when inserting into
4025 the table, since the store generates a new memory state. */
4027 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4028 if (vnresult
4029 && vnresult->result)
4031 tree result = vnresult->result;
4032 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4033 || result == SSA_VAL (result));
4034 resultsame = expressions_equal_p (result, op);
4035 if (resultsame)
4037 /* If the TBAA state isn't compatible for downstream reads
4038 we cannot value-number the VDEFs the same. */
4039 alias_set_type set = get_alias_set (lhs);
4040 if (vnresult->set != set
4041 && ! alias_set_subset_of (set, vnresult->set))
4042 resultsame = false;
4046 if (!resultsame)
4048 /* Only perform the following when being called from PRE
4049 which embeds tail merging. */
4050 if (default_vn_walk_kind == VN_WALK)
4052 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4053 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4054 if (vnresult)
4056 VN_INFO (vdef)->visited = true;
4057 return set_ssa_val_to (vdef, vnresult->result_vdef);
4061 if (dump_file && (dump_flags & TDF_DETAILS))
4063 fprintf (dump_file, "No store match\n");
4064 fprintf (dump_file, "Value numbering store ");
4065 print_generic_expr (dump_file, lhs);
4066 fprintf (dump_file, " to ");
4067 print_generic_expr (dump_file, op);
4068 fprintf (dump_file, "\n");
4070 /* Have to set value numbers before insert, since insert is
4071 going to valueize the references in-place. */
4072 if (vdef)
4073 changed |= set_ssa_val_to (vdef, vdef);
4075 /* Do not insert structure copies into the tables. */
4076 if (is_gimple_min_invariant (op)
4077 || is_gimple_reg (op))
4078 vn_reference_insert (lhs, op, vdef, NULL);
4080 /* Only perform the following when being called from PRE
4081 which embeds tail merging. */
4082 if (default_vn_walk_kind == VN_WALK)
4084 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4085 vn_reference_insert (assign, lhs, vuse, vdef);
4088 else
4090 /* We had a match, so value number the vdef to have the value
4091 number of the vuse it came from. */
4093 if (dump_file && (dump_flags & TDF_DETAILS))
4094 fprintf (dump_file, "Store matched earlier value, "
4095 "value numbering store vdefs to matching vuses.\n");
4097 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4100 return changed;
4103 /* Visit and value number PHI, return true if the value number
4104 changed. When BACKEDGES_VARYING_P is true then assume all
4105 backedge values are varying. When INSERTED is not NULL then
4106 this is just a ahead query for a possible iteration, set INSERTED
4107 to true if we'd insert into the hashtable. */
4109 static bool
4110 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4112 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4113 tree backedge_name = NULL_TREE;
4114 tree sameval_base = NULL_TREE;
4115 poly_int64 soff, doff;
4116 unsigned n_executable = 0;
4117 bool allsame = true;
4118 edge_iterator ei;
4119 edge e;
4121 /* TODO: We could check for this in initialization, and replace this
4122 with a gcc_assert. */
4123 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4124 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4126 /* We track whether a PHI was CSEd to to avoid excessive iterations
4127 that would be necessary only because the PHI changed arguments
4128 but not value. */
4129 if (!inserted)
4130 gimple_set_plf (phi, GF_PLF_1, false);
4132 /* See if all non-TOP arguments have the same value. TOP is
4133 equivalent to everything, so we can ignore it. */
4134 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4135 if (e->flags & EDGE_EXECUTABLE)
4137 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4139 ++n_executable;
4140 if (TREE_CODE (def) == SSA_NAME)
4142 if (e->flags & EDGE_DFS_BACK)
4143 backedge_name = def;
4144 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
4145 def = SSA_VAL (def);
4147 if (def == VN_TOP)
4149 /* Ignore undefined defs for sameval but record one. */
4150 else if (TREE_CODE (def) == SSA_NAME
4151 && ! virtual_operand_p (def)
4152 && ssa_undefined_value_p (def, false))
4153 seen_undef = def;
4154 else if (sameval == VN_TOP)
4155 sameval = def;
4156 else if (!expressions_equal_p (def, sameval))
4158 /* We know we're arriving only with invariant addresses here,
4159 try harder comparing them. We can do some caching here
4160 which we cannot do in expressions_equal_p. */
4161 if (TREE_CODE (def) == ADDR_EXPR
4162 && TREE_CODE (sameval) == ADDR_EXPR
4163 && sameval_base != (void *)-1)
4165 if (!sameval_base)
4166 sameval_base = get_addr_base_and_unit_offset
4167 (TREE_OPERAND (sameval, 0), &soff);
4168 if (!sameval_base)
4169 sameval_base = (tree)(void *)-1;
4170 else if ((get_addr_base_and_unit_offset
4171 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4172 && known_eq (soff, doff))
4173 continue;
4175 allsame = false;
4176 break;
4180 /* If the value we want to use is the backedge and that wasn't visited
4181 yet drop to VARYING. */
4182 if (backedge_name
4183 && sameval == backedge_name
4184 && !SSA_VISITED (backedge_name))
4185 result = PHI_RESULT (phi);
4186 /* If none of the edges was executable keep the value-number at VN_TOP,
4187 if only a single edge is exectuable use its value. */
4188 else if (n_executable <= 1)
4189 result = seen_undef ? seen_undef : sameval;
4190 /* If we saw only undefined values and VN_TOP use one of the
4191 undefined values. */
4192 else if (sameval == VN_TOP)
4193 result = seen_undef ? seen_undef : sameval;
4194 /* First see if it is equivalent to a phi node in this block. We prefer
4195 this as it allows IV elimination - see PRs 66502 and 67167. */
4196 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4198 if (!inserted
4199 && TREE_CODE (result) == SSA_NAME
4200 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4202 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4203 if (dump_file && (dump_flags & TDF_DETAILS))
4205 fprintf (dump_file, "Marking CSEd to PHI node ");
4206 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4207 0, TDF_SLIM);
4208 fprintf (dump_file, "\n");
4212 /* If all values are the same use that, unless we've seen undefined
4213 values as well and the value isn't constant.
4214 CCP/copyprop have the same restriction to not remove uninit warnings. */
4215 else if (allsame
4216 && (! seen_undef || is_gimple_min_invariant (sameval)))
4217 result = sameval;
4218 else
4220 result = PHI_RESULT (phi);
4221 /* Only insert PHIs that are varying, for constant value numbers
4222 we mess up equivalences otherwise as we are only comparing
4223 the immediate controlling predicates. */
4224 vn_phi_insert (phi, result, backedges_varying_p);
4225 if (inserted)
4226 *inserted = true;
4229 return set_ssa_val_to (PHI_RESULT (phi), result);
4232 /* Try to simplify RHS using equivalences and constant folding. */
4234 static tree
4235 try_to_simplify (gassign *stmt)
4237 enum tree_code code = gimple_assign_rhs_code (stmt);
4238 tree tem;
4240 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4241 in this case, there is no point in doing extra work. */
4242 if (code == SSA_NAME)
4243 return NULL_TREE;
4245 /* First try constant folding based on our current lattice. */
4246 mprts_hook = vn_lookup_simplify_result;
4247 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4248 mprts_hook = NULL;
4249 if (tem
4250 && (TREE_CODE (tem) == SSA_NAME
4251 || is_gimple_min_invariant (tem)))
4252 return tem;
4254 return NULL_TREE;
4257 /* Visit and value number STMT, return true if the value number
4258 changed. */
4260 static bool
4261 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4263 bool changed = false;
4265 if (dump_file && (dump_flags & TDF_DETAILS))
4267 fprintf (dump_file, "Value numbering stmt = ");
4268 print_gimple_stmt (dump_file, stmt, 0);
4271 if (gimple_code (stmt) == GIMPLE_PHI)
4272 changed = visit_phi (stmt, NULL, backedges_varying_p);
4273 else if (gimple_has_volatile_ops (stmt))
4274 changed = defs_to_varying (stmt);
4275 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4277 enum tree_code code = gimple_assign_rhs_code (ass);
4278 tree lhs = gimple_assign_lhs (ass);
4279 tree rhs1 = gimple_assign_rhs1 (ass);
4280 tree simplified;
4282 /* Shortcut for copies. Simplifying copies is pointless,
4283 since we copy the expression and value they represent. */
4284 if (code == SSA_NAME
4285 && TREE_CODE (lhs) == SSA_NAME)
4287 changed = visit_copy (lhs, rhs1);
4288 goto done;
4290 simplified = try_to_simplify (ass);
4291 if (simplified)
4293 if (dump_file && (dump_flags & TDF_DETAILS))
4295 fprintf (dump_file, "RHS ");
4296 print_gimple_expr (dump_file, ass, 0);
4297 fprintf (dump_file, " simplified to ");
4298 print_generic_expr (dump_file, simplified);
4299 fprintf (dump_file, "\n");
4302 /* Setting value numbers to constants will occasionally
4303 screw up phi congruence because constants are not
4304 uniquely associated with a single ssa name that can be
4305 looked up. */
4306 if (simplified
4307 && is_gimple_min_invariant (simplified)
4308 && TREE_CODE (lhs) == SSA_NAME)
4310 changed = set_ssa_val_to (lhs, simplified);
4311 goto done;
4313 else if (simplified
4314 && TREE_CODE (simplified) == SSA_NAME
4315 && TREE_CODE (lhs) == SSA_NAME)
4317 changed = visit_copy (lhs, simplified);
4318 goto done;
4321 if ((TREE_CODE (lhs) == SSA_NAME
4322 /* We can substitute SSA_NAMEs that are live over
4323 abnormal edges with their constant value. */
4324 && !(gimple_assign_copy_p (ass)
4325 && is_gimple_min_invariant (rhs1))
4326 && !(simplified
4327 && is_gimple_min_invariant (simplified))
4328 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4329 /* Stores or copies from SSA_NAMEs that are live over
4330 abnormal edges are a problem. */
4331 || (code == SSA_NAME
4332 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4333 changed = defs_to_varying (ass);
4334 else if (REFERENCE_CLASS_P (lhs)
4335 || DECL_P (lhs))
4336 changed = visit_reference_op_store (lhs, rhs1, ass);
4337 else if (TREE_CODE (lhs) == SSA_NAME)
4339 if ((gimple_assign_copy_p (ass)
4340 && is_gimple_min_invariant (rhs1))
4341 || (simplified
4342 && is_gimple_min_invariant (simplified)))
4344 if (simplified)
4345 changed = set_ssa_val_to (lhs, simplified);
4346 else
4347 changed = set_ssa_val_to (lhs, rhs1);
4349 else
4351 /* Visit the original statement. */
4352 switch (vn_get_stmt_kind (ass))
4354 case VN_NARY:
4355 changed = visit_nary_op (lhs, ass);
4356 break;
4357 case VN_REFERENCE:
4358 changed = visit_reference_op_load (lhs, rhs1, ass);
4359 break;
4360 default:
4361 changed = defs_to_varying (ass);
4362 break;
4366 else
4367 changed = defs_to_varying (ass);
4369 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4371 tree lhs = gimple_call_lhs (call_stmt);
4372 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4374 /* Try constant folding based on our current lattice. */
4375 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4376 vn_valueize);
4377 if (simplified)
4379 if (dump_file && (dump_flags & TDF_DETAILS))
4381 fprintf (dump_file, "call ");
4382 print_gimple_expr (dump_file, call_stmt, 0);
4383 fprintf (dump_file, " simplified to ");
4384 print_generic_expr (dump_file, simplified);
4385 fprintf (dump_file, "\n");
4388 /* Setting value numbers to constants will occasionally
4389 screw up phi congruence because constants are not
4390 uniquely associated with a single ssa name that can be
4391 looked up. */
4392 if (simplified
4393 && is_gimple_min_invariant (simplified))
4395 changed = set_ssa_val_to (lhs, simplified);
4396 if (gimple_vdef (call_stmt))
4397 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4398 SSA_VAL (gimple_vuse (call_stmt)));
4399 goto done;
4401 else if (simplified
4402 && TREE_CODE (simplified) == SSA_NAME)
4404 changed = visit_copy (lhs, simplified);
4405 if (gimple_vdef (call_stmt))
4406 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4407 SSA_VAL (gimple_vuse (call_stmt)));
4408 goto done;
4410 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4412 changed = defs_to_varying (call_stmt);
4413 goto done;
4417 /* Pick up flags from a devirtualization target. */
4418 tree fn = gimple_call_fn (stmt);
4419 int extra_fnflags = 0;
4420 if (fn && TREE_CODE (fn) == SSA_NAME)
4422 fn = SSA_VAL (fn);
4423 if (TREE_CODE (fn) == ADDR_EXPR
4424 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4425 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4427 if (!gimple_call_internal_p (call_stmt)
4428 && (/* Calls to the same function with the same vuse
4429 and the same operands do not necessarily return the same
4430 value, unless they're pure or const. */
4431 ((gimple_call_flags (call_stmt) | extra_fnflags)
4432 & (ECF_PURE | ECF_CONST))
4433 /* If calls have a vdef, subsequent calls won't have
4434 the same incoming vuse. So, if 2 calls with vdef have the
4435 same vuse, we know they're not subsequent.
4436 We can value number 2 calls to the same function with the
4437 same vuse and the same operands which are not subsequent
4438 the same, because there is no code in the program that can
4439 compare the 2 values... */
4440 || (gimple_vdef (call_stmt)
4441 /* ... unless the call returns a pointer which does
4442 not alias with anything else. In which case the
4443 information that the values are distinct are encoded
4444 in the IL. */
4445 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4446 /* Only perform the following when being called from PRE
4447 which embeds tail merging. */
4448 && default_vn_walk_kind == VN_WALK)))
4449 changed = visit_reference_op_call (lhs, call_stmt);
4450 else
4451 changed = defs_to_varying (call_stmt);
4453 else
4454 changed = defs_to_varying (stmt);
4455 done:
4456 return changed;
4460 /* Allocate a value number table. */
4462 static void
4463 allocate_vn_table (vn_tables_t table, unsigned size)
4465 table->phis = new vn_phi_table_type (size);
4466 table->nary = new vn_nary_op_table_type (size);
4467 table->references = new vn_reference_table_type (size);
4470 /* Free a value number table. */
4472 static void
4473 free_vn_table (vn_tables_t table)
4475 /* Walk over elements and release vectors. */
4476 vn_reference_iterator_type hir;
4477 vn_reference_t vr;
4478 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4479 vr->operands.release ();
4480 delete table->phis;
4481 table->phis = NULL;
4482 delete table->nary;
4483 table->nary = NULL;
4484 delete table->references;
4485 table->references = NULL;
4488 /* Set *ID according to RESULT. */
4490 static void
4491 set_value_id_for_result (tree result, unsigned int *id)
4493 if (result && TREE_CODE (result) == SSA_NAME)
4494 *id = VN_INFO (result)->value_id;
4495 else if (result && is_gimple_min_invariant (result))
4496 *id = get_or_alloc_constant_value_id (result);
4497 else
4498 *id = get_next_value_id ();
4501 /* Set the value ids in the valid hash tables. */
4503 static void
4504 set_hashtable_value_ids (void)
4506 vn_nary_op_iterator_type hin;
4507 vn_phi_iterator_type hip;
4508 vn_reference_iterator_type hir;
4509 vn_nary_op_t vno;
4510 vn_reference_t vr;
4511 vn_phi_t vp;
4513 /* Now set the value ids of the things we had put in the hash
4514 table. */
4516 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4517 if (! vno->predicated_values)
4518 set_value_id_for_result (vno->u.result, &vno->value_id);
4520 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4521 set_value_id_for_result (vp->result, &vp->value_id);
4523 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4524 hir)
4525 set_value_id_for_result (vr->result, &vr->value_id);
4528 /* Return the maximum value id we have ever seen. */
4530 unsigned int
4531 get_max_value_id (void)
4533 return next_value_id;
4536 /* Return the next unique value id. */
4538 unsigned int
4539 get_next_value_id (void)
4541 return next_value_id++;
4545 /* Compare two expressions E1 and E2 and return true if they are equal. */
4547 bool
4548 expressions_equal_p (tree e1, tree e2)
4550 /* The obvious case. */
4551 if (e1 == e2)
4552 return true;
4554 /* If either one is VN_TOP consider them equal. */
4555 if (e1 == VN_TOP || e2 == VN_TOP)
4556 return true;
4558 /* If only one of them is null, they cannot be equal. */
4559 if (!e1 || !e2)
4560 return false;
4562 /* Now perform the actual comparison. */
4563 if (TREE_CODE (e1) == TREE_CODE (e2)
4564 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4565 return true;
4567 return false;
4571 /* Return true if the nary operation NARY may trap. This is a copy
4572 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4574 bool
4575 vn_nary_may_trap (vn_nary_op_t nary)
4577 tree type;
4578 tree rhs2 = NULL_TREE;
4579 bool honor_nans = false;
4580 bool honor_snans = false;
4581 bool fp_operation = false;
4582 bool honor_trapv = false;
4583 bool handled, ret;
4584 unsigned i;
4586 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4587 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4588 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4590 type = nary->type;
4591 fp_operation = FLOAT_TYPE_P (type);
4592 if (fp_operation)
4594 honor_nans = flag_trapping_math && !flag_finite_math_only;
4595 honor_snans = flag_signaling_nans != 0;
4597 else if (INTEGRAL_TYPE_P (type)
4598 && TYPE_OVERFLOW_TRAPS (type))
4599 honor_trapv = true;
4601 if (nary->length >= 2)
4602 rhs2 = nary->op[1];
4603 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4604 honor_trapv,
4605 honor_nans, honor_snans, rhs2,
4606 &handled);
4607 if (handled
4608 && ret)
4609 return true;
4611 for (i = 0; i < nary->length; ++i)
4612 if (tree_could_trap_p (nary->op[i]))
4613 return true;
4615 return false;
4619 class eliminate_dom_walker : public dom_walker
4621 public:
4622 eliminate_dom_walker (cdi_direction, bitmap);
4623 ~eliminate_dom_walker ();
4625 virtual edge before_dom_children (basic_block);
4626 virtual void after_dom_children (basic_block);
4628 virtual tree eliminate_avail (basic_block, tree op);
4629 virtual void eliminate_push_avail (basic_block, tree op);
4630 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
4632 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
4634 unsigned eliminate_cleanup (bool region_p = false);
4636 bool do_pre;
4637 unsigned int el_todo;
4638 unsigned int eliminations;
4639 unsigned int insertions;
4641 /* SSA names that had their defs inserted by PRE if do_pre. */
4642 bitmap inserted_exprs;
4644 /* Blocks with statements that have had their EH properties changed. */
4645 bitmap need_eh_cleanup;
4647 /* Blocks with statements that have had their AB properties changed. */
4648 bitmap need_ab_cleanup;
4650 /* Local state for the eliminate domwalk. */
4651 auto_vec<gimple *> to_remove;
4652 auto_vec<gimple *> to_fixup;
4653 auto_vec<tree> avail;
4654 auto_vec<tree> avail_stack;
4657 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4658 bitmap inserted_exprs_)
4659 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4660 el_todo (0), eliminations (0), insertions (0),
4661 inserted_exprs (inserted_exprs_)
4663 need_eh_cleanup = BITMAP_ALLOC (NULL);
4664 need_ab_cleanup = BITMAP_ALLOC (NULL);
4667 eliminate_dom_walker::~eliminate_dom_walker ()
4669 BITMAP_FREE (need_eh_cleanup);
4670 BITMAP_FREE (need_ab_cleanup);
4673 /* Return a leader for OP that is available at the current point of the
4674 eliminate domwalk. */
4676 tree
4677 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4679 tree valnum = VN_INFO (op)->valnum;
4680 if (TREE_CODE (valnum) == SSA_NAME)
4682 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4683 return valnum;
4684 if (avail.length () > SSA_NAME_VERSION (valnum))
4685 return avail[SSA_NAME_VERSION (valnum)];
4687 else if (is_gimple_min_invariant (valnum))
4688 return valnum;
4689 return NULL_TREE;
4692 /* At the current point of the eliminate domwalk make OP available. */
4694 void
4695 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4697 tree valnum = VN_INFO (op)->valnum;
4698 if (TREE_CODE (valnum) == SSA_NAME)
4700 if (avail.length () <= SSA_NAME_VERSION (valnum))
4701 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4702 tree pushop = op;
4703 if (avail[SSA_NAME_VERSION (valnum)])
4704 pushop = avail[SSA_NAME_VERSION (valnum)];
4705 avail_stack.safe_push (pushop);
4706 avail[SSA_NAME_VERSION (valnum)] = op;
4710 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4711 the leader for the expression if insertion was successful. */
4713 tree
4714 eliminate_dom_walker::eliminate_insert (basic_block bb,
4715 gimple_stmt_iterator *gsi, tree val)
4717 /* We can insert a sequence with a single assignment only. */
4718 gimple_seq stmts = VN_INFO (val)->expr;
4719 if (!gimple_seq_singleton_p (stmts))
4720 return NULL_TREE;
4721 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4722 if (!stmt
4723 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4724 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4725 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4726 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4727 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4728 return NULL_TREE;
4730 tree op = gimple_assign_rhs1 (stmt);
4731 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4732 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4733 op = TREE_OPERAND (op, 0);
4734 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4735 if (!leader)
4736 return NULL_TREE;
4738 tree res;
4739 stmts = NULL;
4740 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4741 res = gimple_build (&stmts, BIT_FIELD_REF,
4742 TREE_TYPE (val), leader,
4743 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4744 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4745 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4746 res = gimple_build (&stmts, BIT_AND_EXPR,
4747 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4748 else
4749 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4750 TREE_TYPE (val), leader);
4751 if (TREE_CODE (res) != SSA_NAME
4752 || SSA_NAME_IS_DEFAULT_DEF (res)
4753 || gimple_bb (SSA_NAME_DEF_STMT (res)))
4755 gimple_seq_discard (stmts);
4757 /* During propagation we have to treat SSA info conservatively
4758 and thus we can end up simplifying the inserted expression
4759 at elimination time to sth not defined in stmts. */
4760 /* But then this is a redundancy we failed to detect. Which means
4761 res now has two values. That doesn't play well with how
4762 we track availability here, so give up. */
4763 if (dump_file && (dump_flags & TDF_DETAILS))
4765 if (TREE_CODE (res) == SSA_NAME)
4766 res = eliminate_avail (bb, res);
4767 if (res)
4769 fprintf (dump_file, "Failed to insert expression for value ");
4770 print_generic_expr (dump_file, val);
4771 fprintf (dump_file, " which is really fully redundant to ");
4772 print_generic_expr (dump_file, res);
4773 fprintf (dump_file, "\n");
4777 return NULL_TREE;
4779 else
4781 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4782 VN_INFO (res)->valnum = val;
4783 VN_INFO (res)->visited = true;
4786 insertions++;
4787 if (dump_file && (dump_flags & TDF_DETAILS))
4789 fprintf (dump_file, "Inserted ");
4790 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4793 return res;
4796 void
4797 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4799 tree sprime = NULL_TREE;
4800 gimple *stmt = gsi_stmt (*gsi);
4801 tree lhs = gimple_get_lhs (stmt);
4802 if (lhs && TREE_CODE (lhs) == SSA_NAME
4803 && !gimple_has_volatile_ops (stmt)
4804 /* See PR43491. Do not replace a global register variable when
4805 it is a the RHS of an assignment. Do replace local register
4806 variables since gcc does not guarantee a local variable will
4807 be allocated in register.
4808 ??? The fix isn't effective here. This should instead
4809 be ensured by not value-numbering them the same but treating
4810 them like volatiles? */
4811 && !(gimple_assign_single_p (stmt)
4812 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4813 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4814 && is_global_var (gimple_assign_rhs1 (stmt)))))
4816 sprime = eliminate_avail (b, lhs);
4817 if (!sprime)
4819 /* If there is no existing usable leader but SCCVN thinks
4820 it has an expression it wants to use as replacement,
4821 insert that. */
4822 tree val = VN_INFO (lhs)->valnum;
4823 if (val != VN_TOP
4824 && TREE_CODE (val) == SSA_NAME
4825 && VN_INFO (val)->needs_insertion
4826 && VN_INFO (val)->expr != NULL
4827 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
4828 eliminate_push_avail (b, sprime);
4831 /* If this now constitutes a copy duplicate points-to
4832 and range info appropriately. This is especially
4833 important for inserted code. See tree-ssa-copy.c
4834 for similar code. */
4835 if (sprime
4836 && TREE_CODE (sprime) == SSA_NAME)
4838 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4839 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4840 && SSA_NAME_PTR_INFO (lhs)
4841 && ! SSA_NAME_PTR_INFO (sprime))
4843 duplicate_ssa_name_ptr_info (sprime,
4844 SSA_NAME_PTR_INFO (lhs));
4845 if (b != sprime_b)
4846 mark_ptr_info_alignment_unknown
4847 (SSA_NAME_PTR_INFO (sprime));
4849 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4850 && SSA_NAME_RANGE_INFO (lhs)
4851 && ! SSA_NAME_RANGE_INFO (sprime)
4852 && b == sprime_b)
4853 duplicate_ssa_name_range_info (sprime,
4854 SSA_NAME_RANGE_TYPE (lhs),
4855 SSA_NAME_RANGE_INFO (lhs));
4858 /* Inhibit the use of an inserted PHI on a loop header when
4859 the address of the memory reference is a simple induction
4860 variable. In other cases the vectorizer won't do anything
4861 anyway (either it's loop invariant or a complicated
4862 expression). */
4863 if (sprime
4864 && TREE_CODE (sprime) == SSA_NAME
4865 && do_pre
4866 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
4867 && loop_outer (b->loop_father)
4868 && has_zero_uses (sprime)
4869 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4870 && gimple_assign_load_p (stmt))
4872 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4873 basic_block def_bb = gimple_bb (def_stmt);
4874 if (gimple_code (def_stmt) == GIMPLE_PHI
4875 && def_bb->loop_father->header == def_bb)
4877 loop_p loop = def_bb->loop_father;
4878 ssa_op_iter iter;
4879 tree op;
4880 bool found = false;
4881 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4883 affine_iv iv;
4884 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4885 if (def_bb
4886 && flow_bb_inside_loop_p (loop, def_bb)
4887 && simple_iv (loop, loop, op, &iv, true))
4889 found = true;
4890 break;
4893 if (found)
4895 if (dump_file && (dump_flags & TDF_DETAILS))
4897 fprintf (dump_file, "Not replacing ");
4898 print_gimple_expr (dump_file, stmt, 0);
4899 fprintf (dump_file, " with ");
4900 print_generic_expr (dump_file, sprime);
4901 fprintf (dump_file, " which would add a loop"
4902 " carried dependence to loop %d\n",
4903 loop->num);
4905 /* Don't keep sprime available. */
4906 sprime = NULL_TREE;
4911 if (sprime)
4913 /* If we can propagate the value computed for LHS into
4914 all uses don't bother doing anything with this stmt. */
4915 if (may_propagate_copy (lhs, sprime))
4917 /* Mark it for removal. */
4918 to_remove.safe_push (stmt);
4920 /* ??? Don't count copy/constant propagations. */
4921 if (gimple_assign_single_p (stmt)
4922 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4923 || gimple_assign_rhs1 (stmt) == sprime))
4924 return;
4926 if (dump_file && (dump_flags & TDF_DETAILS))
4928 fprintf (dump_file, "Replaced ");
4929 print_gimple_expr (dump_file, stmt, 0);
4930 fprintf (dump_file, " with ");
4931 print_generic_expr (dump_file, sprime);
4932 fprintf (dump_file, " in all uses of ");
4933 print_gimple_stmt (dump_file, stmt, 0);
4936 eliminations++;
4937 return;
4940 /* If this is an assignment from our leader (which
4941 happens in the case the value-number is a constant)
4942 then there is nothing to do. */
4943 if (gimple_assign_single_p (stmt)
4944 && sprime == gimple_assign_rhs1 (stmt))
4945 return;
4947 /* Else replace its RHS. */
4948 bool can_make_abnormal_goto
4949 = is_gimple_call (stmt)
4950 && stmt_can_make_abnormal_goto (stmt);
4952 if (dump_file && (dump_flags & TDF_DETAILS))
4954 fprintf (dump_file, "Replaced ");
4955 print_gimple_expr (dump_file, stmt, 0);
4956 fprintf (dump_file, " with ");
4957 print_generic_expr (dump_file, sprime);
4958 fprintf (dump_file, " in ");
4959 print_gimple_stmt (dump_file, stmt, 0);
4962 eliminations++;
4963 gimple *orig_stmt = stmt;
4964 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4965 TREE_TYPE (sprime)))
4966 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4967 tree vdef = gimple_vdef (stmt);
4968 tree vuse = gimple_vuse (stmt);
4969 propagate_tree_value_into_stmt (gsi, sprime);
4970 stmt = gsi_stmt (*gsi);
4971 update_stmt (stmt);
4972 /* In case the VDEF on the original stmt was released, value-number
4973 it to the VUSE. This is to make vuse_ssa_val able to skip
4974 released virtual operands. */
4975 if (vdef != gimple_vdef (stmt))
4977 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
4978 VN_INFO (vdef)->valnum = vuse;
4981 /* If we removed EH side-effects from the statement, clean
4982 its EH information. */
4983 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4985 bitmap_set_bit (need_eh_cleanup,
4986 gimple_bb (stmt)->index);
4987 if (dump_file && (dump_flags & TDF_DETAILS))
4988 fprintf (dump_file, " Removed EH side-effects.\n");
4991 /* Likewise for AB side-effects. */
4992 if (can_make_abnormal_goto
4993 && !stmt_can_make_abnormal_goto (stmt))
4995 bitmap_set_bit (need_ab_cleanup,
4996 gimple_bb (stmt)->index);
4997 if (dump_file && (dump_flags & TDF_DETAILS))
4998 fprintf (dump_file, " Removed AB side-effects.\n");
5001 return;
5005 /* If the statement is a scalar store, see if the expression
5006 has the same value number as its rhs. If so, the store is
5007 dead. */
5008 if (gimple_assign_single_p (stmt)
5009 && !gimple_has_volatile_ops (stmt)
5010 && !is_gimple_reg (gimple_assign_lhs (stmt))
5011 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5012 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5014 tree val;
5015 tree rhs = gimple_assign_rhs1 (stmt);
5016 vn_reference_t vnresult;
5017 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5018 &vnresult, false);
5019 if (TREE_CODE (rhs) == SSA_NAME)
5020 rhs = VN_INFO (rhs)->valnum;
5021 if (val
5022 && operand_equal_p (val, rhs, 0))
5024 /* We can only remove the later store if the former aliases
5025 at least all accesses the later one does or if the store
5026 was to readonly memory storing the same value. */
5027 alias_set_type set = get_alias_set (lhs);
5028 if (! vnresult
5029 || vnresult->set == set
5030 || alias_set_subset_of (set, vnresult->set))
5032 if (dump_file && (dump_flags & TDF_DETAILS))
5034 fprintf (dump_file, "Deleted redundant store ");
5035 print_gimple_stmt (dump_file, stmt, 0);
5038 /* Queue stmt for removal. */
5039 to_remove.safe_push (stmt);
5040 return;
5045 /* If this is a control statement value numbering left edges
5046 unexecuted on force the condition in a way consistent with
5047 that. */
5048 if (gcond *cond = dyn_cast <gcond *> (stmt))
5050 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5051 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5053 if (dump_file && (dump_flags & TDF_DETAILS))
5055 fprintf (dump_file, "Removing unexecutable edge from ");
5056 print_gimple_stmt (dump_file, stmt, 0);
5058 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5059 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5060 gimple_cond_make_true (cond);
5061 else
5062 gimple_cond_make_false (cond);
5063 update_stmt (cond);
5064 el_todo |= TODO_cleanup_cfg;
5065 return;
5069 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5070 bool was_noreturn = (is_gimple_call (stmt)
5071 && gimple_call_noreturn_p (stmt));
5072 tree vdef = gimple_vdef (stmt);
5073 tree vuse = gimple_vuse (stmt);
5075 /* If we didn't replace the whole stmt (or propagate the result
5076 into all uses), replace all uses on this stmt with their
5077 leaders. */
5078 bool modified = false;
5079 use_operand_p use_p;
5080 ssa_op_iter iter;
5081 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5083 tree use = USE_FROM_PTR (use_p);
5084 /* ??? The call code above leaves stmt operands un-updated. */
5085 if (TREE_CODE (use) != SSA_NAME)
5086 continue;
5087 tree sprime;
5088 if (SSA_NAME_IS_DEFAULT_DEF (use))
5089 /* ??? For default defs BB shouldn't matter, but we have to
5090 solve the inconsistency between rpo eliminate and
5091 dom eliminate avail valueization first. */
5092 sprime = eliminate_avail (b, use);
5093 else
5094 /* Look for sth available at the definition block of the argument.
5095 This avoids inconsistencies between availability there which
5096 decides if the stmt can be removed and availability at the
5097 use site. The SSA property ensures that things available
5098 at the definition are also available at uses. */
5099 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5100 if (sprime && sprime != use
5101 && may_propagate_copy (use, sprime)
5102 /* We substitute into debug stmts to avoid excessive
5103 debug temporaries created by removed stmts, but we need
5104 to avoid doing so for inserted sprimes as we never want
5105 to create debug temporaries for them. */
5106 && (!inserted_exprs
5107 || TREE_CODE (sprime) != SSA_NAME
5108 || !is_gimple_debug (stmt)
5109 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5111 propagate_value (use_p, sprime);
5112 modified = true;
5116 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5117 into which is a requirement for the IPA devirt machinery. */
5118 gimple *old_stmt = stmt;
5119 if (modified)
5121 /* If a formerly non-invariant ADDR_EXPR is turned into an
5122 invariant one it was on a separate stmt. */
5123 if (gimple_assign_single_p (stmt)
5124 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5125 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5126 gimple_stmt_iterator prev = *gsi;
5127 gsi_prev (&prev);
5128 if (fold_stmt (gsi))
5130 /* fold_stmt may have created new stmts inbetween
5131 the previous stmt and the folded stmt. Mark
5132 all defs created there as varying to not confuse
5133 the SCCVN machinery as we're using that even during
5134 elimination. */
5135 if (gsi_end_p (prev))
5136 prev = gsi_start_bb (b);
5137 else
5138 gsi_next (&prev);
5139 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5142 tree def;
5143 ssa_op_iter dit;
5144 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5145 dit, SSA_OP_ALL_DEFS)
5146 /* As existing DEFs may move between stmts
5147 only process new ones. */
5148 if (! has_VN_INFO (def))
5150 VN_INFO (def)->valnum = def;
5151 VN_INFO (def)->visited = true;
5153 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5154 break;
5155 gsi_next (&prev);
5157 while (1);
5159 stmt = gsi_stmt (*gsi);
5160 /* In case we folded the stmt away schedule the NOP for removal. */
5161 if (gimple_nop_p (stmt))
5162 to_remove.safe_push (stmt);
5165 /* Visit indirect calls and turn them into direct calls if
5166 possible using the devirtualization machinery. Do this before
5167 checking for required EH/abnormal/noreturn cleanup as devird
5168 may expose more of those. */
5169 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5171 tree fn = gimple_call_fn (call_stmt);
5172 if (fn
5173 && flag_devirtualize
5174 && virtual_method_call_p (fn))
5176 tree otr_type = obj_type_ref_class (fn);
5177 unsigned HOST_WIDE_INT otr_tok
5178 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5179 tree instance;
5180 ipa_polymorphic_call_context context (current_function_decl,
5181 fn, stmt, &instance);
5182 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5183 otr_type, stmt);
5184 bool final;
5185 vec <cgraph_node *> targets
5186 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5187 otr_tok, context, &final);
5188 if (dump_file)
5189 dump_possible_polymorphic_call_targets (dump_file,
5190 obj_type_ref_class (fn),
5191 otr_tok, context);
5192 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5194 tree fn;
5195 if (targets.length () == 1)
5196 fn = targets[0]->decl;
5197 else
5198 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5199 if (dump_enabled_p ())
5201 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5202 "converting indirect call to "
5203 "function %s\n",
5204 lang_hooks.decl_printable_name (fn, 2));
5206 gimple_call_set_fndecl (call_stmt, fn);
5207 /* If changing the call to __builtin_unreachable
5208 or similar noreturn function, adjust gimple_call_fntype
5209 too. */
5210 if (gimple_call_noreturn_p (call_stmt)
5211 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5212 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5213 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5214 == void_type_node))
5215 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5216 maybe_remove_unused_call_args (cfun, call_stmt);
5217 modified = true;
5222 if (modified)
5224 /* When changing a call into a noreturn call, cfg cleanup
5225 is needed to fix up the noreturn call. */
5226 if (!was_noreturn
5227 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5228 to_fixup.safe_push (stmt);
5229 /* When changing a condition or switch into one we know what
5230 edge will be executed, schedule a cfg cleanup. */
5231 if ((gimple_code (stmt) == GIMPLE_COND
5232 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5233 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5234 || (gimple_code (stmt) == GIMPLE_SWITCH
5235 && TREE_CODE (gimple_switch_index
5236 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5237 el_todo |= TODO_cleanup_cfg;
5238 /* If we removed EH side-effects from the statement, clean
5239 its EH information. */
5240 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5242 bitmap_set_bit (need_eh_cleanup,
5243 gimple_bb (stmt)->index);
5244 if (dump_file && (dump_flags & TDF_DETAILS))
5245 fprintf (dump_file, " Removed EH side-effects.\n");
5247 /* Likewise for AB side-effects. */
5248 if (can_make_abnormal_goto
5249 && !stmt_can_make_abnormal_goto (stmt))
5251 bitmap_set_bit (need_ab_cleanup,
5252 gimple_bb (stmt)->index);
5253 if (dump_file && (dump_flags & TDF_DETAILS))
5254 fprintf (dump_file, " Removed AB side-effects.\n");
5256 update_stmt (stmt);
5257 /* In case the VDEF on the original stmt was released, value-number
5258 it to the VUSE. This is to make vuse_ssa_val able to skip
5259 released virtual operands. */
5260 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
5261 VN_INFO (vdef)->valnum = vuse;
5264 /* Make new values available - for fully redundant LHS we
5265 continue with the next stmt above and skip this. */
5266 def_operand_p defp;
5267 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5268 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5271 /* Perform elimination for the basic-block B during the domwalk. */
5273 edge
5274 eliminate_dom_walker::before_dom_children (basic_block b)
5276 /* Mark new bb. */
5277 avail_stack.safe_push (NULL_TREE);
5279 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5280 if (!(b->flags & BB_EXECUTABLE))
5281 return NULL;
5283 vn_context_bb = b;
5285 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5287 gphi *phi = gsi.phi ();
5288 tree res = PHI_RESULT (phi);
5290 if (virtual_operand_p (res))
5292 gsi_next (&gsi);
5293 continue;
5296 tree sprime = eliminate_avail (b, res);
5297 if (sprime
5298 && sprime != res)
5300 if (dump_file && (dump_flags & TDF_DETAILS))
5302 fprintf (dump_file, "Replaced redundant PHI node defining ");
5303 print_generic_expr (dump_file, res);
5304 fprintf (dump_file, " with ");
5305 print_generic_expr (dump_file, sprime);
5306 fprintf (dump_file, "\n");
5309 /* If we inserted this PHI node ourself, it's not an elimination. */
5310 if (! inserted_exprs
5311 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5312 eliminations++;
5314 /* If we will propagate into all uses don't bother to do
5315 anything. */
5316 if (may_propagate_copy (res, sprime))
5318 /* Mark the PHI for removal. */
5319 to_remove.safe_push (phi);
5320 gsi_next (&gsi);
5321 continue;
5324 remove_phi_node (&gsi, false);
5326 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5327 sprime = fold_convert (TREE_TYPE (res), sprime);
5328 gimple *stmt = gimple_build_assign (res, sprime);
5329 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5330 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5331 continue;
5334 eliminate_push_avail (b, res);
5335 gsi_next (&gsi);
5338 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5339 !gsi_end_p (gsi);
5340 gsi_next (&gsi))
5341 eliminate_stmt (b, &gsi);
5343 /* Replace destination PHI arguments. */
5344 edge_iterator ei;
5345 edge e;
5346 FOR_EACH_EDGE (e, ei, b->succs)
5347 if (e->flags & EDGE_EXECUTABLE)
5348 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5349 !gsi_end_p (gsi);
5350 gsi_next (&gsi))
5352 gphi *phi = gsi.phi ();
5353 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5354 tree arg = USE_FROM_PTR (use_p);
5355 if (TREE_CODE (arg) != SSA_NAME
5356 || virtual_operand_p (arg))
5357 continue;
5358 tree sprime = eliminate_avail (b, arg);
5359 if (sprime && may_propagate_copy (arg, sprime))
5360 propagate_value (use_p, sprime);
5363 vn_context_bb = NULL;
5365 return NULL;
5368 /* Make no longer available leaders no longer available. */
5370 void
5371 eliminate_dom_walker::after_dom_children (basic_block)
5373 tree entry;
5374 while ((entry = avail_stack.pop ()) != NULL_TREE)
5376 tree valnum = VN_INFO (entry)->valnum;
5377 tree old = avail[SSA_NAME_VERSION (valnum)];
5378 if (old == entry)
5379 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5380 else
5381 avail[SSA_NAME_VERSION (valnum)] = entry;
5385 /* Remove queued stmts and perform delayed cleanups. */
5387 unsigned
5388 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5390 statistics_counter_event (cfun, "Eliminated", eliminations);
5391 statistics_counter_event (cfun, "Insertions", insertions);
5393 /* We cannot remove stmts during BB walk, especially not release SSA
5394 names there as this confuses the VN machinery. The stmts ending
5395 up in to_remove are either stores or simple copies.
5396 Remove stmts in reverse order to make debug stmt creation possible. */
5397 while (!to_remove.is_empty ())
5399 bool do_release_defs = true;
5400 gimple *stmt = to_remove.pop ();
5402 /* When we are value-numbering a region we do not require exit PHIs to
5403 be present so we have to make sure to deal with uses outside of the
5404 region of stmts that we thought are eliminated.
5405 ??? Note we may be confused by uses in dead regions we didn't run
5406 elimination on. Rather than checking individual uses we accept
5407 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5408 contains such example). */
5409 if (region_p)
5411 if (gphi *phi = dyn_cast <gphi *> (stmt))
5413 tree lhs = gimple_phi_result (phi);
5414 if (!has_zero_uses (lhs))
5416 if (dump_file && (dump_flags & TDF_DETAILS))
5417 fprintf (dump_file, "Keeping eliminated stmt live "
5418 "as copy because of out-of-region uses\n");
5419 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5420 gimple *copy = gimple_build_assign (lhs, sprime);
5421 gimple_stmt_iterator gsi
5422 = gsi_after_labels (gimple_bb (stmt));
5423 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5424 do_release_defs = false;
5427 else if (tree lhs = gimple_get_lhs (stmt))
5428 if (TREE_CODE (lhs) == SSA_NAME
5429 && !has_zero_uses (lhs))
5431 if (dump_file && (dump_flags & TDF_DETAILS))
5432 fprintf (dump_file, "Keeping eliminated stmt live "
5433 "as copy because of out-of-region uses\n");
5434 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5435 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5436 if (is_gimple_assign (stmt))
5438 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5439 update_stmt (gsi_stmt (gsi));
5440 continue;
5442 else
5444 gimple *copy = gimple_build_assign (lhs, sprime);
5445 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5446 do_release_defs = false;
5451 if (dump_file && (dump_flags & TDF_DETAILS))
5453 fprintf (dump_file, "Removing dead stmt ");
5454 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5457 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5458 if (gimple_code (stmt) == GIMPLE_PHI)
5459 remove_phi_node (&gsi, do_release_defs);
5460 else
5462 basic_block bb = gimple_bb (stmt);
5463 unlink_stmt_vdef (stmt);
5464 if (gsi_remove (&gsi, true))
5465 bitmap_set_bit (need_eh_cleanup, bb->index);
5466 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5467 bitmap_set_bit (need_ab_cleanup, bb->index);
5468 if (do_release_defs)
5469 release_defs (stmt);
5472 /* Removing a stmt may expose a forwarder block. */
5473 el_todo |= TODO_cleanup_cfg;
5476 /* Fixup stmts that became noreturn calls. This may require splitting
5477 blocks and thus isn't possible during the dominator walk. Do this
5478 in reverse order so we don't inadvertedly remove a stmt we want to
5479 fixup by visiting a dominating now noreturn call first. */
5480 while (!to_fixup.is_empty ())
5482 gimple *stmt = to_fixup.pop ();
5484 if (dump_file && (dump_flags & TDF_DETAILS))
5486 fprintf (dump_file, "Fixing up noreturn call ");
5487 print_gimple_stmt (dump_file, stmt, 0);
5490 if (fixup_noreturn_call (stmt))
5491 el_todo |= TODO_cleanup_cfg;
5494 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5495 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5497 if (do_eh_cleanup)
5498 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5500 if (do_ab_cleanup)
5501 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5503 if (do_eh_cleanup || do_ab_cleanup)
5504 el_todo |= TODO_cleanup_cfg;
5506 return el_todo;
5509 /* Eliminate fully redundant computations. */
5511 unsigned
5512 eliminate_with_rpo_vn (bitmap inserted_exprs)
5514 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5516 walker.walk (cfun->cfg->x_entry_block_ptr);
5517 return walker.eliminate_cleanup ();
5520 static unsigned
5521 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5522 bool iterate, bool eliminate);
5524 void
5525 run_rpo_vn (vn_lookup_kind kind)
5527 default_vn_walk_kind = kind;
5528 do_rpo_vn (cfun, NULL, NULL, true, false);
5530 /* ??? Prune requirement of these. */
5531 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5532 constant_value_ids = BITMAP_ALLOC (NULL);
5534 /* Initialize the value ids and prune out remaining VN_TOPs
5535 from dead code. */
5536 tree name;
5537 unsigned i;
5538 FOR_EACH_SSA_NAME (i, name, cfun)
5540 vn_ssa_aux_t info = VN_INFO (name);
5541 if (!info->visited
5542 || info->valnum == VN_TOP)
5543 info->valnum = name;
5544 if (info->valnum == name)
5545 info->value_id = get_next_value_id ();
5546 else if (is_gimple_min_invariant (info->valnum))
5547 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5550 /* Propagate. */
5551 FOR_EACH_SSA_NAME (i, name, cfun)
5553 vn_ssa_aux_t info = VN_INFO (name);
5554 if (TREE_CODE (info->valnum) == SSA_NAME
5555 && info->valnum != name
5556 && info->value_id != VN_INFO (info->valnum)->value_id)
5557 info->value_id = VN_INFO (info->valnum)->value_id;
5560 set_hashtable_value_ids ();
5562 if (dump_file && (dump_flags & TDF_DETAILS))
5564 fprintf (dump_file, "Value numbers:\n");
5565 FOR_EACH_SSA_NAME (i, name, cfun)
5567 if (VN_INFO (name)->visited
5568 && SSA_VAL (name) != name)
5570 print_generic_expr (dump_file, name);
5571 fprintf (dump_file, " = ");
5572 print_generic_expr (dump_file, SSA_VAL (name));
5573 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5579 /* Free VN associated data structures. */
5581 void
5582 free_rpo_vn (void)
5584 free_vn_table (valid_info);
5585 XDELETE (valid_info);
5586 obstack_free (&vn_tables_obstack, NULL);
5587 obstack_free (&vn_tables_insert_obstack, NULL);
5589 vn_ssa_aux_iterator_type it;
5590 vn_ssa_aux_t info;
5591 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5592 if (info->needs_insertion)
5593 release_ssa_name (info->name);
5594 obstack_free (&vn_ssa_aux_obstack, NULL);
5595 delete vn_ssa_aux_hash;
5597 delete constant_to_value_id;
5598 constant_to_value_id = NULL;
5599 BITMAP_FREE (constant_value_ids);
5602 /* Adaptor to the elimination engine using RPO availability. */
5604 class rpo_elim : public eliminate_dom_walker
5606 public:
5607 rpo_elim(basic_block entry_)
5608 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
5609 ~rpo_elim();
5611 virtual tree eliminate_avail (basic_block, tree op);
5613 virtual void eliminate_push_avail (basic_block, tree);
5615 basic_block entry;
5616 /* Instead of having a local availability lattice for each
5617 basic-block and availability at X defined as union of
5618 the local availabilities at X and its dominators we're
5619 turning this upside down and track availability per
5620 value given values are usually made available at very
5621 few points (at least one).
5622 So we have a value -> vec<location, leader> map where
5623 LOCATION is specifying the basic-block LEADER is made
5624 available for VALUE. We push to this vector in RPO
5625 order thus for iteration we can simply pop the last
5626 entries.
5627 LOCATION is the basic-block index and LEADER is its
5628 SSA name version. */
5629 /* ??? We'd like to use auto_vec here with embedded storage
5630 but that doesn't play well until we can provide move
5631 constructors and use std::move on hash-table expansion.
5632 So for now this is a bit more expensive than necessary.
5633 We eventually want to switch to a chaining scheme like
5634 for hashtable entries for unwinding which would make
5635 making the vector part of the vn_ssa_aux structure possible. */
5636 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
5637 rpo_avail_t m_rpo_avail;
5640 /* Global RPO state for access from hooks. */
5641 static rpo_elim *rpo_avail;
5643 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5645 static tree
5646 vn_lookup_simplify_result (gimple_match_op *res_op)
5648 if (!res_op->code.is_tree_code ())
5649 return NULL_TREE;
5650 tree *ops = res_op->ops;
5651 unsigned int length = res_op->num_ops;
5652 if (res_op->code == CONSTRUCTOR
5653 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5654 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5655 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5657 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5658 ops = XALLOCAVEC (tree, length);
5659 for (unsigned i = 0; i < length; ++i)
5660 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5662 vn_nary_op_t vnresult = NULL;
5663 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5664 res_op->type, ops, &vnresult);
5665 /* If this is used from expression simplification make sure to
5666 return an available expression. */
5667 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
5668 res = rpo_avail->eliminate_avail (vn_context_bb, res);
5669 return res;
5672 rpo_elim::~rpo_elim ()
5674 /* Release the avail vectors. */
5675 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5676 i != m_rpo_avail.end (); ++i)
5677 (*i).second.release ();
5680 /* Return a leader for OPs value that is valid at BB. */
5682 tree
5683 rpo_elim::eliminate_avail (basic_block bb, tree op)
5685 bool visited;
5686 tree valnum = SSA_VAL (op, &visited);
5687 /* If we didn't visit OP then it must be defined outside of the
5688 region we process and also dominate it. So it is available. */
5689 if (!visited)
5690 return op;
5691 if (TREE_CODE (valnum) == SSA_NAME)
5693 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5694 return valnum;
5695 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5696 if (!av || av->is_empty ())
5697 return NULL_TREE;
5698 int i = av->length () - 1;
5699 if ((*av)[i].first == bb->index)
5700 /* On tramp3d 90% of the cases are here. */
5701 return ssa_name ((*av)[i].second);
5704 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5705 /* ??? During elimination we have to use availability at the
5706 definition site of a use we try to replace. This
5707 is required to not run into inconsistencies because
5708 of dominated_by_p_w_unex behavior and removing a definition
5709 while not replacing all uses.
5710 ??? We could try to consistently walk dominators
5711 ignoring non-executable regions. The nearest common
5712 dominator of bb and abb is where we can stop walking. We
5713 may also be able to "pre-compute" (bits of) the next immediate
5714 (non-)dominator during the RPO walk when marking edges as
5715 executable. */
5716 if (dominated_by_p_w_unex (bb, abb))
5718 tree leader = ssa_name ((*av)[i].second);
5719 /* Prevent eliminations that break loop-closed SSA. */
5720 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5721 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5722 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5723 (leader))->loop_father,
5724 bb))
5725 return NULL_TREE;
5726 if (dump_file && (dump_flags & TDF_DETAILS))
5728 print_generic_expr (dump_file, leader);
5729 fprintf (dump_file, " is available for ");
5730 print_generic_expr (dump_file, valnum);
5731 fprintf (dump_file, "\n");
5733 /* On tramp3d 99% of the _remaining_ cases succeed at
5734 the first enty. */
5735 return leader;
5737 /* ??? Can we somehow skip to the immediate dominator
5738 RPO index (bb_to_rpo)? Again, maybe not worth, on
5739 tramp3d the worst number of elements in the vector is 9. */
5741 while (--i >= 0);
5743 else if (valnum != VN_TOP)
5744 /* valnum is is_gimple_min_invariant. */
5745 return valnum;
5746 return NULL_TREE;
5749 /* Make LEADER a leader for its value at BB. */
5751 void
5752 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5754 tree valnum = VN_INFO (leader)->valnum;
5755 if (valnum == VN_TOP)
5756 return;
5757 if (dump_file && (dump_flags & TDF_DETAILS))
5759 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5760 print_generic_expr (dump_file, leader);
5761 fprintf (dump_file, " for value ");
5762 print_generic_expr (dump_file, valnum);
5763 fprintf (dump_file, "\n");
5765 bool existed;
5766 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5767 if (!existed)
5769 new (&av) vec<std::pair<int, int> >;
5770 av.reserve_exact (2);
5772 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5775 /* Valueization hook for RPO VN plus required state. */
5777 tree
5778 rpo_vn_valueize (tree name)
5780 if (TREE_CODE (name) == SSA_NAME)
5782 vn_ssa_aux_t val = VN_INFO (name);
5783 if (val)
5785 tree tem = val->valnum;
5786 if (tem != VN_TOP && tem != name)
5788 if (TREE_CODE (tem) != SSA_NAME)
5789 return tem;
5790 /* For all values we only valueize to an available leader
5791 which means we can use SSA name info without restriction. */
5792 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5793 if (tem)
5794 return tem;
5798 return name;
5801 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5802 inverted condition. */
5804 static void
5805 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5807 switch (code)
5809 case LT_EXPR:
5810 /* a < b -> a {!,<}= b */
5811 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5812 ops, boolean_true_node, 0, pred_e);
5813 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5814 ops, boolean_true_node, 0, pred_e);
5815 /* a < b -> ! a {>,=} b */
5816 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5817 ops, boolean_false_node, 0, pred_e);
5818 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5819 ops, boolean_false_node, 0, pred_e);
5820 break;
5821 case GT_EXPR:
5822 /* a > b -> a {!,>}= b */
5823 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5824 ops, boolean_true_node, 0, pred_e);
5825 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5826 ops, boolean_true_node, 0, pred_e);
5827 /* a > b -> ! a {<,=} b */
5828 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5829 ops, boolean_false_node, 0, pred_e);
5830 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5831 ops, boolean_false_node, 0, pred_e);
5832 break;
5833 case EQ_EXPR:
5834 /* a == b -> ! a {<,>} b */
5835 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5836 ops, boolean_false_node, 0, pred_e);
5837 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5838 ops, boolean_false_node, 0, pred_e);
5839 break;
5840 case LE_EXPR:
5841 case GE_EXPR:
5842 case NE_EXPR:
5843 /* Nothing besides inverted condition. */
5844 break;
5845 default:;
5849 /* Main stmt worker for RPO VN, process BB. */
5851 static unsigned
5852 process_bb (rpo_elim &avail, basic_block bb,
5853 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
5854 bool do_region, bitmap exit_bbs)
5856 unsigned todo = 0;
5857 edge_iterator ei;
5858 edge e;
5860 vn_context_bb = bb;
5862 /* If we are in loop-closed SSA preserve this state. This is
5863 relevant when called on regions from outside of FRE/PRE. */
5864 bool lc_phi_nodes = false;
5865 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
5866 FOR_EACH_EDGE (e, ei, bb->preds)
5867 if (e->src->loop_father != e->dest->loop_father
5868 && flow_loop_nested_p (e->dest->loop_father,
5869 e->src->loop_father))
5871 lc_phi_nodes = true;
5872 break;
5875 /* Value-number all defs in the basic-block. */
5876 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5877 gsi_next (&gsi))
5879 gphi *phi = gsi.phi ();
5880 tree res = PHI_RESULT (phi);
5881 vn_ssa_aux_t res_info = VN_INFO (res);
5882 if (!bb_visited)
5884 gcc_assert (!res_info->visited);
5885 res_info->valnum = VN_TOP;
5886 res_info->visited = true;
5889 /* When not iterating force backedge values to varying. */
5890 visit_stmt (phi, !iterate_phis);
5891 if (virtual_operand_p (res))
5892 continue;
5894 /* Eliminate */
5895 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5896 how we handle backedges and availability.
5897 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5898 tree val = res_info->valnum;
5899 if (res != val && !iterate && eliminate)
5901 if (tree leader = avail.eliminate_avail (bb, res))
5903 if (leader != res
5904 /* Preserve loop-closed SSA form. */
5905 && (! lc_phi_nodes
5906 || is_gimple_min_invariant (leader)))
5908 if (dump_file && (dump_flags & TDF_DETAILS))
5910 fprintf (dump_file, "Replaced redundant PHI node "
5911 "defining ");
5912 print_generic_expr (dump_file, res);
5913 fprintf (dump_file, " with ");
5914 print_generic_expr (dump_file, leader);
5915 fprintf (dump_file, "\n");
5917 avail.eliminations++;
5919 if (may_propagate_copy (res, leader))
5921 /* Schedule for removal. */
5922 avail.to_remove.safe_push (phi);
5923 continue;
5925 /* ??? Else generate a copy stmt. */
5929 /* Only make defs available that not already are. But make
5930 sure loop-closed SSA PHI node defs are picked up for
5931 downstream uses. */
5932 if (lc_phi_nodes
5933 || res == val
5934 || ! avail.eliminate_avail (bb, res))
5935 avail.eliminate_push_avail (bb, res);
5938 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5939 we do this when processing the last stmt as we have to do this
5940 before elimination which otherwise forces GIMPLE_CONDs to
5941 if (1 != 0) style when seeing non-executable edges. */
5942 if (gsi_end_p (gsi_start_bb (bb)))
5944 FOR_EACH_EDGE (e, ei, bb->succs)
5946 if (e->flags & EDGE_EXECUTABLE)
5947 continue;
5948 if (dump_file && (dump_flags & TDF_DETAILS))
5949 fprintf (dump_file,
5950 "marking outgoing edge %d -> %d executable\n",
5951 e->src->index, e->dest->index);
5952 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
5953 e->flags |= EDGE_EXECUTABLE;
5954 e->dest->flags |= BB_EXECUTABLE;
5957 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5958 !gsi_end_p (gsi); gsi_next (&gsi))
5960 ssa_op_iter i;
5961 tree op;
5962 if (!bb_visited)
5964 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
5966 vn_ssa_aux_t op_info = VN_INFO (op);
5967 gcc_assert (!op_info->visited);
5968 op_info->valnum = VN_TOP;
5969 op_info->visited = true;
5972 /* We somehow have to deal with uses that are not defined
5973 in the processed region. Forcing unvisited uses to
5974 varying here doesn't play well with def-use following during
5975 expression simplification, so we deal with this by checking
5976 the visited flag in SSA_VAL. */
5979 visit_stmt (gsi_stmt (gsi));
5981 gimple *last = gsi_stmt (gsi);
5982 e = NULL;
5983 switch (gimple_code (last))
5985 case GIMPLE_SWITCH:
5986 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
5987 (as_a <gswitch *> (last))));
5988 break;
5989 case GIMPLE_COND:
5991 tree lhs = vn_valueize (gimple_cond_lhs (last));
5992 tree rhs = vn_valueize (gimple_cond_rhs (last));
5993 tree val = gimple_simplify (gimple_cond_code (last),
5994 boolean_type_node, lhs, rhs,
5995 NULL, vn_valueize);
5996 /* If the condition didn't simplfy see if we have recorded
5997 an expression from sofar taken edges. */
5998 if (! val || TREE_CODE (val) != INTEGER_CST)
6000 vn_nary_op_t vnresult;
6001 tree ops[2];
6002 ops[0] = lhs;
6003 ops[1] = rhs;
6004 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6005 boolean_type_node, ops,
6006 &vnresult);
6007 /* Did we get a predicated value? */
6008 if (! val && vnresult && vnresult->predicated_values)
6010 val = vn_nary_op_get_predicated_value (vnresult, bb);
6011 if (val && dump_file && (dump_flags & TDF_DETAILS))
6013 fprintf (dump_file, "Got predicated value ");
6014 print_generic_expr (dump_file, val, TDF_NONE);
6015 fprintf (dump_file, " for ");
6016 print_gimple_stmt (dump_file, last, TDF_SLIM);
6020 if (val)
6021 e = find_taken_edge (bb, val);
6022 if (! e)
6024 /* If we didn't manage to compute the taken edge then
6025 push predicated expressions for the condition itself
6026 and related conditions to the hashtables. This allows
6027 simplification of redundant conditions which is
6028 important as early cleanup. */
6029 edge true_e, false_e;
6030 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6031 enum tree_code code = gimple_cond_code (last);
6032 enum tree_code icode
6033 = invert_tree_comparison (code, HONOR_NANS (lhs));
6034 tree ops[2];
6035 ops[0] = lhs;
6036 ops[1] = rhs;
6037 if (do_region
6038 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6039 true_e = NULL;
6040 if (do_region
6041 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6042 false_e = NULL;
6043 if (true_e)
6044 vn_nary_op_insert_pieces_predicated
6045 (2, code, boolean_type_node, ops,
6046 boolean_true_node, 0, true_e);
6047 if (false_e)
6048 vn_nary_op_insert_pieces_predicated
6049 (2, code, boolean_type_node, ops,
6050 boolean_false_node, 0, false_e);
6051 if (icode != ERROR_MARK)
6053 if (true_e)
6054 vn_nary_op_insert_pieces_predicated
6055 (2, icode, boolean_type_node, ops,
6056 boolean_false_node, 0, true_e);
6057 if (false_e)
6058 vn_nary_op_insert_pieces_predicated
6059 (2, icode, boolean_type_node, ops,
6060 boolean_true_node, 0, false_e);
6062 /* Relax for non-integers, inverted condition handled
6063 above. */
6064 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6066 if (true_e)
6067 insert_related_predicates_on_edge (code, ops, true_e);
6068 if (false_e)
6069 insert_related_predicates_on_edge (icode, ops, false_e);
6072 break;
6074 case GIMPLE_GOTO:
6075 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6076 break;
6077 default:
6078 e = NULL;
6080 if (e)
6082 todo = TODO_cleanup_cfg;
6083 if (!(e->flags & EDGE_EXECUTABLE))
6085 if (dump_file && (dump_flags & TDF_DETAILS))
6086 fprintf (dump_file,
6087 "marking known outgoing %sedge %d -> %d executable\n",
6088 e->flags & EDGE_DFS_BACK ? "back-" : "",
6089 e->src->index, e->dest->index);
6090 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6091 e->flags |= EDGE_EXECUTABLE;
6092 e->dest->flags |= BB_EXECUTABLE;
6095 else if (gsi_one_before_end_p (gsi))
6097 FOR_EACH_EDGE (e, ei, bb->succs)
6099 if (e->flags & EDGE_EXECUTABLE)
6100 continue;
6101 if (dump_file && (dump_flags & TDF_DETAILS))
6102 fprintf (dump_file,
6103 "marking outgoing edge %d -> %d executable\n",
6104 e->src->index, e->dest->index);
6105 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6106 e->flags |= EDGE_EXECUTABLE;
6107 e->dest->flags |= BB_EXECUTABLE;
6111 /* Eliminate. That also pushes to avail. */
6112 if (eliminate && ! iterate)
6113 avail.eliminate_stmt (bb, &gsi);
6114 else
6115 /* If not eliminating, make all not already available defs
6116 available. */
6117 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6118 if (! avail.eliminate_avail (bb, op))
6119 avail.eliminate_push_avail (bb, op);
6122 /* Eliminate in destination PHI arguments. Always substitute in dest
6123 PHIs, even for non-executable edges. This handles region
6124 exits PHIs. */
6125 if (!iterate && eliminate)
6126 FOR_EACH_EDGE (e, ei, bb->succs)
6127 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6128 !gsi_end_p (gsi); gsi_next (&gsi))
6130 gphi *phi = gsi.phi ();
6131 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6132 tree arg = USE_FROM_PTR (use_p);
6133 if (TREE_CODE (arg) != SSA_NAME
6134 || virtual_operand_p (arg))
6135 continue;
6136 tree sprime;
6137 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6139 sprime = SSA_VAL (arg);
6140 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6141 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6143 else
6144 /* Look for sth available at the definition block of the argument.
6145 This avoids inconsistencies between availability there which
6146 decides if the stmt can be removed and availability at the
6147 use site. The SSA property ensures that things available
6148 at the definition are also available at uses. */
6149 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6150 arg);
6151 if (sprime
6152 && sprime != arg
6153 && may_propagate_copy (arg, sprime))
6154 propagate_value (use_p, sprime);
6157 vn_context_bb = NULL;
6158 return todo;
6161 /* Unwind state per basic-block. */
6163 struct unwind_state
6165 /* Times this block has been visited. */
6166 unsigned visited;
6167 /* Whether to handle this as iteration point or whether to treat
6168 incoming backedge PHI values as varying. */
6169 bool iterate;
6170 void *ob_top;
6171 vn_reference_t ref_top;
6172 vn_phi_t phi_top;
6173 vn_nary_op_t nary_top;
6176 /* Unwind the RPO VN state for iteration. */
6178 static void
6179 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6181 gcc_assert (to->iterate);
6182 for (; last_inserted_nary != to->nary_top;
6183 last_inserted_nary = last_inserted_nary->next)
6185 vn_nary_op_t *slot;
6186 slot = valid_info->nary->find_slot_with_hash
6187 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6188 /* Predication causes the need to restore previous state. */
6189 if ((*slot)->unwind_to)
6190 *slot = (*slot)->unwind_to;
6191 else
6192 valid_info->nary->clear_slot (slot);
6194 for (; last_inserted_phi != to->phi_top;
6195 last_inserted_phi = last_inserted_phi->next)
6197 vn_phi_t *slot;
6198 slot = valid_info->phis->find_slot_with_hash
6199 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6200 valid_info->phis->clear_slot (slot);
6202 for (; last_inserted_ref != to->ref_top;
6203 last_inserted_ref = last_inserted_ref->next)
6205 vn_reference_t *slot;
6206 slot = valid_info->references->find_slot_with_hash
6207 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6208 (*slot)->operands.release ();
6209 valid_info->references->clear_slot (slot);
6211 obstack_free (&vn_tables_obstack, to->ob_top);
6213 /* Prune [rpo_idx, ] from avail. */
6214 /* ??? This is O(number-of-values-in-region) which is
6215 O(region-size) rather than O(iteration-piece). */
6216 for (rpo_elim::rpo_avail_t::iterator i
6217 = avail.m_rpo_avail.begin ();
6218 i != avail.m_rpo_avail.end (); ++i)
6220 while (! (*i).second.is_empty ())
6222 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6223 break;
6224 (*i).second.pop ();
6229 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6230 If ITERATE is true then treat backedges optimistically as not
6231 executed and iterate. If ELIMINATE is true then perform
6232 elimination, otherwise leave that to the caller. */
6234 static unsigned
6235 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6236 bool iterate, bool eliminate)
6238 unsigned todo = 0;
6240 /* We currently do not support region-based iteration when
6241 elimination is requested. */
6242 gcc_assert (!entry || !iterate || !eliminate);
6243 /* When iterating we need loop info up-to-date. */
6244 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6246 bool do_region = entry != NULL;
6247 if (!do_region)
6249 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6250 exit_bbs = BITMAP_ALLOC (NULL);
6251 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6254 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6255 int n = rev_post_order_and_mark_dfs_back_seme (fn, entry, exit_bbs,
6256 iterate, rpo);
6257 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6258 for (int i = 0; i < n / 2; ++i)
6259 std::swap (rpo[i], rpo[n-i-1]);
6261 if (!do_region)
6262 BITMAP_FREE (exit_bbs);
6264 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6265 for (int i = 0; i < n; ++i)
6266 bb_to_rpo[rpo[i]] = i;
6268 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6270 rpo_elim avail (entry->dest);
6271 rpo_avail = &avail;
6273 /* Verify we have no extra entries into the region. */
6274 if (flag_checking && do_region)
6276 auto_bb_flag bb_in_region (fn);
6277 for (int i = 0; i < n; ++i)
6279 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6280 bb->flags |= bb_in_region;
6282 /* We can't merge the first two loops because we cannot rely
6283 on EDGE_DFS_BACK for edges not within the region. But if
6284 we decide to always have the bb_in_region flag we can
6285 do the checking during the RPO walk itself (but then it's
6286 also easy to handle MEME conservatively). */
6287 for (int i = 0; i < n; ++i)
6289 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6290 edge e;
6291 edge_iterator ei;
6292 FOR_EACH_EDGE (e, ei, bb->preds)
6293 gcc_assert (e == entry || (e->src->flags & bb_in_region));
6295 for (int i = 0; i < n; ++i)
6297 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6298 bb->flags &= ~bb_in_region;
6302 /* Create the VN state. For the initial size of the various hashtables
6303 use a heuristic based on region size and number of SSA names. */
6304 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6305 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6306 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6308 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6309 gcc_obstack_init (&vn_ssa_aux_obstack);
6311 gcc_obstack_init (&vn_tables_obstack);
6312 gcc_obstack_init (&vn_tables_insert_obstack);
6313 valid_info = XCNEW (struct vn_tables_s);
6314 allocate_vn_table (valid_info, region_size);
6315 last_inserted_ref = NULL;
6316 last_inserted_phi = NULL;
6317 last_inserted_nary = NULL;
6319 vn_valueize = rpo_vn_valueize;
6321 /* Initialize the unwind state and edge/BB executable state. */
6322 for (int i = 0; i < n; ++i)
6324 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6325 rpo_state[i].visited = 0;
6326 bb->flags &= ~BB_EXECUTABLE;
6327 bool has_backedges = false;
6328 edge e;
6329 edge_iterator ei;
6330 FOR_EACH_EDGE (e, ei, bb->preds)
6332 if (e->flags & EDGE_DFS_BACK)
6333 has_backedges = true;
6334 if (! iterate && (e->flags & EDGE_DFS_BACK))
6336 e->flags |= EDGE_EXECUTABLE;
6337 /* ??? Strictly speaking we only need to unconditionally
6338 process a block when it is in an irreducible region,
6339 thus when it may be reachable via the backedge only. */
6340 bb->flags |= BB_EXECUTABLE;
6342 else
6343 e->flags &= ~EDGE_EXECUTABLE;
6345 rpo_state[i].iterate = iterate && has_backedges;
6347 entry->flags |= EDGE_EXECUTABLE;
6348 entry->dest->flags |= BB_EXECUTABLE;
6350 /* As heuristic to improve compile-time we handle only the N innermost
6351 loops and the outermost one optimistically. */
6352 if (iterate)
6354 loop_p loop;
6355 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6356 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6357 if (loop_depth (loop) > max_depth)
6358 for (unsigned i = 2;
6359 i < loop_depth (loop) - max_depth; ++i)
6361 basic_block header = superloop_at_depth (loop, i)->header;
6362 bool non_latch_backedge = false;
6363 edge e;
6364 edge_iterator ei;
6365 FOR_EACH_EDGE (e, ei, header->preds)
6366 if (e->flags & EDGE_DFS_BACK)
6368 e->flags |= EDGE_EXECUTABLE;
6369 e->dest->flags |= BB_EXECUTABLE;
6370 /* There can be a non-latch backedge into the header
6371 which is part of an outer irreducible region. We
6372 cannot avoid iterating this block then. */
6373 if (!dominated_by_p (CDI_DOMINATORS,
6374 e->src, e->dest))
6376 if (dump_file && (dump_flags & TDF_DETAILS))
6377 fprintf (dump_file, "non-latch backedge %d -> %d "
6378 "forces iteration of loop %d\n",
6379 e->src->index, e->dest->index, loop->num);
6380 non_latch_backedge = true;
6383 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
6387 /* Go and process all blocks, iterating as necessary. */
6388 int idx = 0;
6389 uint64_t nblk = 0;
6392 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6394 /* If the block has incoming backedges remember unwind state. This
6395 is required even for non-executable blocks since in irreducible
6396 regions we might reach them via the backedge and re-start iterating
6397 from there.
6398 Note we can individually mark blocks with incoming backedges to
6399 not iterate where we then handle PHIs conservatively. We do that
6400 heuristically to reduce compile-time for degenerate cases. */
6401 if (rpo_state[idx].iterate)
6403 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6404 rpo_state[idx].ref_top = last_inserted_ref;
6405 rpo_state[idx].phi_top = last_inserted_phi;
6406 rpo_state[idx].nary_top = last_inserted_nary;
6409 if (!(bb->flags & BB_EXECUTABLE))
6411 if (dump_file && (dump_flags & TDF_DETAILS))
6412 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6413 idx, bb->index);
6414 idx++;
6415 continue;
6418 if (dump_file && (dump_flags & TDF_DETAILS))
6419 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6420 nblk++;
6421 todo |= process_bb (avail, bb,
6422 rpo_state[idx].visited != 0,
6423 rpo_state[idx].iterate,
6424 iterate, eliminate, do_region, exit_bbs);
6425 rpo_state[idx].visited++;
6427 if (iterate)
6429 /* Verify if changed values flow over executable outgoing backedges
6430 and those change destination PHI values (that's the thing we
6431 can easily verify). Reduce over all such edges to the farthest
6432 away PHI. */
6433 int iterate_to = -1;
6434 edge_iterator ei;
6435 edge e;
6436 FOR_EACH_EDGE (e, ei, bb->succs)
6437 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6438 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6439 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6441 if (dump_file && (dump_flags & TDF_DETAILS))
6442 fprintf (dump_file, "Looking for changed values of backedge "
6443 "%d->%d destination PHIs\n",
6444 e->src->index, e->dest->index);
6445 vn_context_bb = e->dest;
6446 gphi_iterator gsi;
6447 for (gsi = gsi_start_phis (e->dest);
6448 !gsi_end_p (gsi); gsi_next (&gsi))
6450 bool inserted = false;
6451 /* While we'd ideally just iterate on value changes
6452 we CSE PHIs and do that even across basic-block
6453 boundaries. So even hashtable state changes can
6454 be important (which is roughly equivalent to
6455 PHI argument value changes). To not excessively
6456 iterate because of that we track whether a PHI
6457 was CSEd to with GF_PLF_1. */
6458 bool phival_changed;
6459 if ((phival_changed = visit_phi (gsi.phi (),
6460 &inserted, false))
6461 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6463 if (!phival_changed
6464 && dump_file && (dump_flags & TDF_DETAILS))
6465 fprintf (dump_file, "PHI was CSEd and hashtable "
6466 "state (changed)\n");
6467 int destidx = bb_to_rpo[e->dest->index];
6468 if (iterate_to == -1
6469 || destidx < iterate_to)
6470 iterate_to = destidx;
6471 break;
6474 vn_context_bb = NULL;
6476 if (iterate_to != -1)
6478 do_unwind (&rpo_state[iterate_to], iterate_to,
6479 avail, bb_to_rpo);
6480 idx = iterate_to;
6481 if (dump_file && (dump_flags & TDF_DETAILS))
6482 fprintf (dump_file, "Iterating to %d BB%d\n",
6483 iterate_to, rpo[iterate_to]);
6484 continue;
6488 idx++;
6490 while (idx < n);
6492 /* If statistics or dump file active. */
6493 int nex = 0;
6494 unsigned max_visited = 1;
6495 for (int i = 0; i < n; ++i)
6497 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6498 if (bb->flags & BB_EXECUTABLE)
6499 nex++;
6500 statistics_histogram_event (cfun, "RPO block visited times",
6501 rpo_state[i].visited);
6502 if (rpo_state[i].visited > max_visited)
6503 max_visited = rpo_state[i].visited;
6505 unsigned nvalues = 0, navail = 0;
6506 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6507 i != avail.m_rpo_avail.end (); ++i)
6509 nvalues++;
6510 navail += (*i).second.length ();
6512 statistics_counter_event (cfun, "RPO blocks", n);
6513 statistics_counter_event (cfun, "RPO blocks visited", nblk);
6514 statistics_counter_event (cfun, "RPO blocks executable", nex);
6515 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6516 statistics_histogram_event (cfun, "RPO num values", nvalues);
6517 statistics_histogram_event (cfun, "RPO num avail", navail);
6518 statistics_histogram_event (cfun, "RPO num lattice",
6519 vn_ssa_aux_hash->elements ());
6520 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6522 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6523 " blocks in total discovering %d executable blocks iterating "
6524 "%d.%d times, a block was visited max. %u times\n",
6525 n, nblk, nex,
6526 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6527 max_visited);
6528 fprintf (dump_file, "RPO tracked %d values available at %d locations "
6529 "and %" PRIu64 " lattice elements\n",
6530 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6533 if (eliminate)
6535 /* When !iterate we already performed elimination during the RPO
6536 walk. */
6537 if (iterate)
6539 /* Elimination for region-based VN needs to be done within the
6540 RPO walk. */
6541 gcc_assert (! do_region);
6542 /* Note we can't use avail.walk here because that gets confused
6543 by the existing availability and it will be less efficient
6544 as well. */
6545 todo |= eliminate_with_rpo_vn (NULL);
6547 else
6548 todo |= avail.eliminate_cleanup (do_region);
6551 vn_valueize = NULL;
6552 rpo_avail = NULL;
6554 XDELETEVEC (bb_to_rpo);
6555 XDELETEVEC (rpo);
6557 return todo;
6560 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6561 on the SEME region specified by ENTRY and EXIT_BBS. */
6563 unsigned
6564 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6566 default_vn_walk_kind = VN_WALKREWRITE;
6567 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6568 free_rpo_vn ();
6569 return todo;
6573 namespace {
6575 const pass_data pass_data_fre =
6577 GIMPLE_PASS, /* type */
6578 "fre", /* name */
6579 OPTGROUP_NONE, /* optinfo_flags */
6580 TV_TREE_FRE, /* tv_id */
6581 ( PROP_cfg | PROP_ssa ), /* properties_required */
6582 0, /* properties_provided */
6583 0, /* properties_destroyed */
6584 0, /* todo_flags_start */
6585 0, /* todo_flags_finish */
6588 class pass_fre : public gimple_opt_pass
6590 public:
6591 pass_fre (gcc::context *ctxt)
6592 : gimple_opt_pass (pass_data_fre, ctxt)
6595 /* opt_pass methods: */
6596 opt_pass * clone () { return new pass_fre (m_ctxt); }
6597 virtual bool gate (function *) { return flag_tree_fre != 0; }
6598 virtual unsigned int execute (function *);
6600 }; // class pass_fre
6602 unsigned int
6603 pass_fre::execute (function *fun)
6605 unsigned todo = 0;
6607 /* At -O[1g] use the cheap non-iterating mode. */
6608 calculate_dominance_info (CDI_DOMINATORS);
6609 if (optimize > 1)
6610 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6612 default_vn_walk_kind = VN_WALKREWRITE;
6613 todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6614 free_rpo_vn ();
6616 if (optimize > 1)
6617 loop_optimizer_finalize ();
6619 return todo;
6622 } // anon namespace
6624 gimple_opt_pass *
6625 make_pass_fre (gcc::context *ctxt)
6627 return new pass_fre (ctxt);
6630 #undef BB_EXECUTABLE