2018-08-28 Richard Biener <rguenther@suse.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob9277de0cbdc535687e0916c16cfd25b619f357cf
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "ssa.h"
29 #include "expmed.h"
30 #include "insn-config.h"
31 #include "memmodel.h"
32 #include "emit-rtl.h"
33 #include "cgraph.h"
34 #include "gimple-pretty-print.h"
35 #include "alias.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "cfganal.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
42 #include "tree-eh.h"
43 #include "gimplify.h"
44 #include "flags.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "varasm.h"
49 #include "stmt.h"
50 #include "expr.h"
51 #include "tree-dfa.h"
52 #include "tree-ssa.h"
53 #include "dumpfile.h"
54 #include "cfgloop.h"
55 #include "params.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
91 operands).
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
95 some nice properties.
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
113 identities.
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
118 equivalent.
119 TODO:
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
127 structure copies.
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree *last_vuse_ptr;
134 static vn_lookup_kind vn_walk_kind;
135 static vn_lookup_kind default_vn_walk_kind;
136 bitmap const_parms;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2));
219 /* Free a reference operation structure VP. */
221 static inline void
222 free_reference (vn_reference_s *vr)
224 vr->operands.release ();
228 /* vn_reference hashtable helpers. */
230 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
232 static inline hashval_t hash (const vn_reference_s *);
233 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
236 /* Return the hashcode for a given reference operation P1. */
238 inline hashval_t
239 vn_reference_hasher::hash (const vn_reference_s *vr1)
241 return vr1->hashcode;
244 inline bool
245 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
247 return v == c || vn_reference_eq (v, c);
250 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
251 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
254 /* The set of VN hashtables. */
256 typedef struct vn_tables_s
258 vn_nary_op_table_type *nary;
259 vn_phi_table_type *phis;
260 vn_reference_table_type *references;
261 } *vn_tables_t;
264 /* vn_constant hashtable helpers. */
266 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
268 static inline hashval_t hash (const vn_constant_s *);
269 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
272 /* Hash table hash function for vn_constant_t. */
274 inline hashval_t
275 vn_constant_hasher::hash (const vn_constant_s *vc1)
277 return vc1->hashcode;
280 /* Hash table equality function for vn_constant_t. */
282 inline bool
283 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
285 if (vc1->hashcode != vc2->hashcode)
286 return false;
288 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
291 static hash_table<vn_constant_hasher> *constant_to_value_id;
292 static bitmap constant_value_ids;
295 /* Obstack we allocate the vn-tables elements from. */
296 static obstack vn_tables_obstack;
297 /* Special obstack we never unwind. */
298 static obstack vn_tables_insert_obstack;
300 static vn_reference_t last_inserted_ref;
301 static vn_phi_t last_inserted_phi;
302 static vn_nary_op_t last_inserted_nary;
304 /* Valid hashtables storing information we have proven to be
305 correct. */
306 static vn_tables_t valid_info;
309 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 just return it. */
311 tree (*vn_valueize) (tree);
314 /* This represents the top of the VN lattice, which is the universal
315 value. */
317 tree VN_TOP;
319 /* Unique counter for our value ids. */
321 static unsigned int next_value_id;
324 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
325 are allocated on an obstack for locality reasons, and to free them
326 without looping over the vec. */
328 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
330 typedef vn_ssa_aux_t value_type;
331 typedef tree compare_type;
332 static inline hashval_t hash (const value_type &);
333 static inline bool equal (const value_type &, const compare_type &);
334 static inline void mark_deleted (value_type &) {}
335 static inline void mark_empty (value_type &e) { e = NULL; }
336 static inline bool is_deleted (value_type &) { return false; }
337 static inline bool is_empty (value_type &e) { return e == NULL; }
340 hashval_t
341 vn_ssa_aux_hasher::hash (const value_type &entry)
343 return SSA_NAME_VERSION (entry->name);
346 bool
347 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
349 return name == entry->name;
352 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
353 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
354 static struct obstack vn_ssa_aux_obstack;
356 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
357 static unsigned int vn_nary_length_from_stmt (gimple *);
358 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
359 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
360 vn_nary_op_table_type *, bool);
361 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gimple *);
362 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
363 enum tree_code, tree, tree *);
364 static tree vn_lookup_simplify_result (gimple_match_op *);
366 /* Return whether there is value numbering information for a given SSA name. */
368 bool
369 has_VN_INFO (tree name)
371 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
374 vn_ssa_aux_t
375 VN_INFO (tree name)
377 vn_ssa_aux_t *res
378 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
379 INSERT);
380 if (*res != NULL)
381 return *res;
383 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
384 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
385 newinfo->name = name;
386 newinfo->valnum = VN_TOP;
387 /* We are using the visited flag to handle uses with defs not within the
388 region being value-numbered. */
389 newinfo->visited = false;
391 /* Given we create the VN_INFOs on-demand now we have to do initialization
392 different than VN_TOP here. */
393 if (SSA_NAME_IS_DEFAULT_DEF (name))
394 switch (TREE_CODE (SSA_NAME_VAR (name)))
396 case VAR_DECL:
397 /* All undefined vars are VARYING. */
398 newinfo->valnum = name;
399 newinfo->visited = true;
400 break;
402 case PARM_DECL:
403 /* Parameters are VARYING but we can record a condition
404 if we know it is a non-NULL pointer. */
405 newinfo->visited = true;
406 newinfo->valnum = name;
407 if (POINTER_TYPE_P (TREE_TYPE (name))
408 && nonnull_arg_p (SSA_NAME_VAR (name)))
410 tree ops[2];
411 ops[0] = name;
412 ops[1] = build_int_cst (TREE_TYPE (name), 0);
413 vn_nary_op_t nary;
414 /* Allocate from non-unwinding stack. */
415 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
416 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
417 boolean_type_node, ops);
418 nary->predicated_values = 0;
419 nary->u.result = boolean_true_node;
420 vn_nary_op_insert_into (nary, valid_info->nary, true);
421 gcc_assert (nary->unwind_to == NULL);
422 /* Also do not link it into the undo chain. */
423 last_inserted_nary = nary->next;
424 nary->next = (vn_nary_op_t)(void *)-1;
425 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
426 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
427 boolean_type_node, ops);
428 nary->predicated_values = 0;
429 nary->u.result = boolean_false_node;
430 vn_nary_op_insert_into (nary, valid_info->nary, true);
431 gcc_assert (nary->unwind_to == NULL);
432 last_inserted_nary = nary->next;
433 nary->next = (vn_nary_op_t)(void *)-1;
434 if (dump_file && (dump_flags & TDF_DETAILS))
436 fprintf (dump_file, "Recording ");
437 print_generic_expr (dump_file, name, TDF_SLIM);
438 fprintf (dump_file, " != 0\n");
441 break;
443 case RESULT_DECL:
444 /* If the result is passed by invisible reference the default
445 def is initialized, otherwise it's uninitialized. Still
446 undefined is varying. */
447 newinfo->visited = true;
448 newinfo->valnum = name;
449 break;
451 default:
452 gcc_unreachable ();
454 return newinfo;
457 /* Return the SSA value of X. */
459 inline tree
460 SSA_VAL (tree x)
462 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
463 return tem && tem->visited ? tem->valnum : x;
466 /* Return the SSA value of the VUSE x, supporting released VDEFs
467 during elimination which will value-number the VDEF to the
468 associated VUSE (but not substitute in the whole lattice). */
470 static inline tree
471 vuse_ssa_val (tree x)
473 if (!x)
474 return NULL_TREE;
478 tree tem = SSA_VAL (x);
479 /* stmt walking can walk over a backedge and reach code we didn't
480 value-number yet. */
481 if (tem == VN_TOP)
482 return x;
483 x = tem;
485 while (SSA_NAME_IN_FREE_LIST (x));
487 return x;
491 /* Return the vn_kind the expression computed by the stmt should be
492 associated with. */
494 enum vn_kind
495 vn_get_stmt_kind (gimple *stmt)
497 switch (gimple_code (stmt))
499 case GIMPLE_CALL:
500 return VN_REFERENCE;
501 case GIMPLE_PHI:
502 return VN_PHI;
503 case GIMPLE_ASSIGN:
505 enum tree_code code = gimple_assign_rhs_code (stmt);
506 tree rhs1 = gimple_assign_rhs1 (stmt);
507 switch (get_gimple_rhs_class (code))
509 case GIMPLE_UNARY_RHS:
510 case GIMPLE_BINARY_RHS:
511 case GIMPLE_TERNARY_RHS:
512 return VN_NARY;
513 case GIMPLE_SINGLE_RHS:
514 switch (TREE_CODE_CLASS (code))
516 case tcc_reference:
517 /* VOP-less references can go through unary case. */
518 if ((code == REALPART_EXPR
519 || code == IMAGPART_EXPR
520 || code == VIEW_CONVERT_EXPR
521 || code == BIT_FIELD_REF)
522 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
523 return VN_NARY;
525 /* Fallthrough. */
526 case tcc_declaration:
527 return VN_REFERENCE;
529 case tcc_constant:
530 return VN_CONSTANT;
532 default:
533 if (code == ADDR_EXPR)
534 return (is_gimple_min_invariant (rhs1)
535 ? VN_CONSTANT : VN_REFERENCE);
536 else if (code == CONSTRUCTOR)
537 return VN_NARY;
538 return VN_NONE;
540 default:
541 return VN_NONE;
544 default:
545 return VN_NONE;
549 /* Lookup a value id for CONSTANT and return it. If it does not
550 exist returns 0. */
552 unsigned int
553 get_constant_value_id (tree constant)
555 vn_constant_s **slot;
556 struct vn_constant_s vc;
558 vc.hashcode = vn_hash_constant_with_type (constant);
559 vc.constant = constant;
560 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
561 if (slot)
562 return (*slot)->value_id;
563 return 0;
566 /* Lookup a value id for CONSTANT, and if it does not exist, create a
567 new one and return it. If it does exist, return it. */
569 unsigned int
570 get_or_alloc_constant_value_id (tree constant)
572 vn_constant_s **slot;
573 struct vn_constant_s vc;
574 vn_constant_t vcp;
576 /* If the hashtable isn't initialized we're not running from PRE and thus
577 do not need value-ids. */
578 if (!constant_to_value_id)
579 return 0;
581 vc.hashcode = vn_hash_constant_with_type (constant);
582 vc.constant = constant;
583 slot = constant_to_value_id->find_slot (&vc, INSERT);
584 if (*slot)
585 return (*slot)->value_id;
587 vcp = XNEW (struct vn_constant_s);
588 vcp->hashcode = vc.hashcode;
589 vcp->constant = constant;
590 vcp->value_id = get_next_value_id ();
591 *slot = vcp;
592 bitmap_set_bit (constant_value_ids, vcp->value_id);
593 return vcp->value_id;
596 /* Return true if V is a value id for a constant. */
598 bool
599 value_id_constant_p (unsigned int v)
601 return bitmap_bit_p (constant_value_ids, v);
604 /* Compute the hash for a reference operand VRO1. */
606 static void
607 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
609 hstate.add_int (vro1->opcode);
610 if (vro1->op0)
611 inchash::add_expr (vro1->op0, hstate);
612 if (vro1->op1)
613 inchash::add_expr (vro1->op1, hstate);
614 if (vro1->op2)
615 inchash::add_expr (vro1->op2, hstate);
618 /* Compute a hash for the reference operation VR1 and return it. */
620 static hashval_t
621 vn_reference_compute_hash (const vn_reference_t vr1)
623 inchash::hash hstate;
624 hashval_t result;
625 int i;
626 vn_reference_op_t vro;
627 poly_int64 off = -1;
628 bool deref = false;
630 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
632 if (vro->opcode == MEM_REF)
633 deref = true;
634 else if (vro->opcode != ADDR_EXPR)
635 deref = false;
636 if (maybe_ne (vro->off, -1))
638 if (known_eq (off, -1))
639 off = 0;
640 off += vro->off;
642 else
644 if (maybe_ne (off, -1)
645 && maybe_ne (off, 0))
646 hstate.add_poly_int (off);
647 off = -1;
648 if (deref
649 && vro->opcode == ADDR_EXPR)
651 if (vro->op0)
653 tree op = TREE_OPERAND (vro->op0, 0);
654 hstate.add_int (TREE_CODE (op));
655 inchash::add_expr (op, hstate);
658 else
659 vn_reference_op_compute_hash (vro, hstate);
662 result = hstate.end ();
663 /* ??? We would ICE later if we hash instead of adding that in. */
664 if (vr1->vuse)
665 result += SSA_NAME_VERSION (vr1->vuse);
667 return result;
670 /* Return true if reference operations VR1 and VR2 are equivalent. This
671 means they have the same set of operands and vuses. */
673 bool
674 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
676 unsigned i, j;
678 /* Early out if this is not a hash collision. */
679 if (vr1->hashcode != vr2->hashcode)
680 return false;
682 /* The VOP needs to be the same. */
683 if (vr1->vuse != vr2->vuse)
684 return false;
686 /* If the operands are the same we are done. */
687 if (vr1->operands == vr2->operands)
688 return true;
690 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
691 return false;
693 if (INTEGRAL_TYPE_P (vr1->type)
694 && INTEGRAL_TYPE_P (vr2->type))
696 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
697 return false;
699 else if (INTEGRAL_TYPE_P (vr1->type)
700 && (TYPE_PRECISION (vr1->type)
701 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
702 return false;
703 else if (INTEGRAL_TYPE_P (vr2->type)
704 && (TYPE_PRECISION (vr2->type)
705 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
706 return false;
708 i = 0;
709 j = 0;
712 poly_int64 off1 = 0, off2 = 0;
713 vn_reference_op_t vro1, vro2;
714 vn_reference_op_s tem1, tem2;
715 bool deref1 = false, deref2 = false;
716 for (; vr1->operands.iterate (i, &vro1); i++)
718 if (vro1->opcode == MEM_REF)
719 deref1 = true;
720 /* Do not look through a storage order barrier. */
721 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
722 return false;
723 if (known_eq (vro1->off, -1))
724 break;
725 off1 += vro1->off;
727 for (; vr2->operands.iterate (j, &vro2); j++)
729 if (vro2->opcode == MEM_REF)
730 deref2 = true;
731 /* Do not look through a storage order barrier. */
732 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
733 return false;
734 if (known_eq (vro2->off, -1))
735 break;
736 off2 += vro2->off;
738 if (maybe_ne (off1, off2))
739 return false;
740 if (deref1 && vro1->opcode == ADDR_EXPR)
742 memset (&tem1, 0, sizeof (tem1));
743 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
744 tem1.type = TREE_TYPE (tem1.op0);
745 tem1.opcode = TREE_CODE (tem1.op0);
746 vro1 = &tem1;
747 deref1 = false;
749 if (deref2 && vro2->opcode == ADDR_EXPR)
751 memset (&tem2, 0, sizeof (tem2));
752 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
753 tem2.type = TREE_TYPE (tem2.op0);
754 tem2.opcode = TREE_CODE (tem2.op0);
755 vro2 = &tem2;
756 deref2 = false;
758 if (deref1 != deref2)
759 return false;
760 if (!vn_reference_op_eq (vro1, vro2))
761 return false;
762 ++j;
763 ++i;
765 while (vr1->operands.length () != i
766 || vr2->operands.length () != j);
768 return true;
771 /* Copy the operations present in load/store REF into RESULT, a vector of
772 vn_reference_op_s's. */
774 static void
775 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
777 if (TREE_CODE (ref) == TARGET_MEM_REF)
779 vn_reference_op_s temp;
781 result->reserve (3);
783 memset (&temp, 0, sizeof (temp));
784 temp.type = TREE_TYPE (ref);
785 temp.opcode = TREE_CODE (ref);
786 temp.op0 = TMR_INDEX (ref);
787 temp.op1 = TMR_STEP (ref);
788 temp.op2 = TMR_OFFSET (ref);
789 temp.off = -1;
790 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
791 temp.base = MR_DEPENDENCE_BASE (ref);
792 result->quick_push (temp);
794 memset (&temp, 0, sizeof (temp));
795 temp.type = NULL_TREE;
796 temp.opcode = ERROR_MARK;
797 temp.op0 = TMR_INDEX2 (ref);
798 temp.off = -1;
799 result->quick_push (temp);
801 memset (&temp, 0, sizeof (temp));
802 temp.type = NULL_TREE;
803 temp.opcode = TREE_CODE (TMR_BASE (ref));
804 temp.op0 = TMR_BASE (ref);
805 temp.off = -1;
806 result->quick_push (temp);
807 return;
810 /* For non-calls, store the information that makes up the address. */
811 tree orig = ref;
812 while (ref)
814 vn_reference_op_s temp;
816 memset (&temp, 0, sizeof (temp));
817 temp.type = TREE_TYPE (ref);
818 temp.opcode = TREE_CODE (ref);
819 temp.off = -1;
821 switch (temp.opcode)
823 case MODIFY_EXPR:
824 temp.op0 = TREE_OPERAND (ref, 1);
825 break;
826 case WITH_SIZE_EXPR:
827 temp.op0 = TREE_OPERAND (ref, 1);
828 temp.off = 0;
829 break;
830 case MEM_REF:
831 /* The base address gets its own vn_reference_op_s structure. */
832 temp.op0 = TREE_OPERAND (ref, 1);
833 if (!mem_ref_offset (ref).to_shwi (&temp.off))
834 temp.off = -1;
835 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
836 temp.base = MR_DEPENDENCE_BASE (ref);
837 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
838 break;
839 case BIT_FIELD_REF:
840 /* Record bits, position and storage order. */
841 temp.op0 = TREE_OPERAND (ref, 1);
842 temp.op1 = TREE_OPERAND (ref, 2);
843 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
844 temp.off = -1;
845 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
846 break;
847 case COMPONENT_REF:
848 /* The field decl is enough to unambiguously specify the field,
849 a matching type is not necessary and a mismatching type
850 is always a spurious difference. */
851 temp.type = NULL_TREE;
852 temp.op0 = TREE_OPERAND (ref, 1);
853 temp.op1 = TREE_OPERAND (ref, 2);
855 tree this_offset = component_ref_field_offset (ref);
856 if (this_offset
857 && poly_int_tree_p (this_offset))
859 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
860 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
862 poly_offset_int off
863 = (wi::to_poly_offset (this_offset)
864 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
865 /* Probibit value-numbering zero offset components
866 of addresses the same before the pass folding
867 __builtin_object_size had a chance to run
868 (checking cfun->after_inlining does the
869 trick here). */
870 if (TREE_CODE (orig) != ADDR_EXPR
871 || maybe_ne (off, 0)
872 || cfun->after_inlining)
873 off.to_shwi (&temp.off);
877 break;
878 case ARRAY_RANGE_REF:
879 case ARRAY_REF:
881 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
882 /* Record index as operand. */
883 temp.op0 = TREE_OPERAND (ref, 1);
884 /* Always record lower bounds and element size. */
885 temp.op1 = array_ref_low_bound (ref);
886 /* But record element size in units of the type alignment. */
887 temp.op2 = TREE_OPERAND (ref, 3);
888 temp.align = eltype->type_common.align;
889 if (! temp.op2)
890 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
891 size_int (TYPE_ALIGN_UNIT (eltype)));
892 if (poly_int_tree_p (temp.op0)
893 && poly_int_tree_p (temp.op1)
894 && TREE_CODE (temp.op2) == INTEGER_CST)
896 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
897 - wi::to_poly_offset (temp.op1))
898 * wi::to_offset (temp.op2)
899 * vn_ref_op_align_unit (&temp));
900 off.to_shwi (&temp.off);
903 break;
904 case VAR_DECL:
905 if (DECL_HARD_REGISTER (ref))
907 temp.op0 = ref;
908 break;
910 /* Fallthru. */
911 case PARM_DECL:
912 case CONST_DECL:
913 case RESULT_DECL:
914 /* Canonicalize decls to MEM[&decl] which is what we end up with
915 when valueizing MEM[ptr] with ptr = &decl. */
916 temp.opcode = MEM_REF;
917 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
918 temp.off = 0;
919 result->safe_push (temp);
920 temp.opcode = ADDR_EXPR;
921 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
922 temp.type = TREE_TYPE (temp.op0);
923 temp.off = -1;
924 break;
925 case STRING_CST:
926 case INTEGER_CST:
927 case COMPLEX_CST:
928 case VECTOR_CST:
929 case REAL_CST:
930 case FIXED_CST:
931 case CONSTRUCTOR:
932 case SSA_NAME:
933 temp.op0 = ref;
934 break;
935 case ADDR_EXPR:
936 if (is_gimple_min_invariant (ref))
938 temp.op0 = ref;
939 break;
941 break;
942 /* These are only interesting for their operands, their
943 existence, and their type. They will never be the last
944 ref in the chain of references (IE they require an
945 operand), so we don't have to put anything
946 for op* as it will be handled by the iteration */
947 case REALPART_EXPR:
948 temp.off = 0;
949 break;
950 case VIEW_CONVERT_EXPR:
951 temp.off = 0;
952 temp.reverse = storage_order_barrier_p (ref);
953 break;
954 case IMAGPART_EXPR:
955 /* This is only interesting for its constant offset. */
956 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
957 break;
958 default:
959 gcc_unreachable ();
961 result->safe_push (temp);
963 if (REFERENCE_CLASS_P (ref)
964 || TREE_CODE (ref) == MODIFY_EXPR
965 || TREE_CODE (ref) == WITH_SIZE_EXPR
966 || (TREE_CODE (ref) == ADDR_EXPR
967 && !is_gimple_min_invariant (ref)))
968 ref = TREE_OPERAND (ref, 0);
969 else
970 ref = NULL_TREE;
974 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
975 operands in *OPS, the reference alias set SET and the reference type TYPE.
976 Return true if something useful was produced. */
978 bool
979 ao_ref_init_from_vn_reference (ao_ref *ref,
980 alias_set_type set, tree type,
981 vec<vn_reference_op_s> ops)
983 vn_reference_op_t op;
984 unsigned i;
985 tree base = NULL_TREE;
986 tree *op0_p = &base;
987 poly_offset_int offset = 0;
988 poly_offset_int max_size;
989 poly_offset_int size = -1;
990 tree size_tree = NULL_TREE;
991 alias_set_type base_alias_set = -1;
993 /* First get the final access size from just the outermost expression. */
994 op = &ops[0];
995 if (op->opcode == COMPONENT_REF)
996 size_tree = DECL_SIZE (op->op0);
997 else if (op->opcode == BIT_FIELD_REF)
998 size_tree = op->op0;
999 else
1001 machine_mode mode = TYPE_MODE (type);
1002 if (mode == BLKmode)
1003 size_tree = TYPE_SIZE (type);
1004 else
1005 size = GET_MODE_BITSIZE (mode);
1007 if (size_tree != NULL_TREE
1008 && poly_int_tree_p (size_tree))
1009 size = wi::to_poly_offset (size_tree);
1011 /* Initially, maxsize is the same as the accessed element size.
1012 In the following it will only grow (or become -1). */
1013 max_size = size;
1015 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1016 and find the ultimate containing object. */
1017 FOR_EACH_VEC_ELT (ops, i, op)
1019 switch (op->opcode)
1021 /* These may be in the reference ops, but we cannot do anything
1022 sensible with them here. */
1023 case ADDR_EXPR:
1024 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1025 if (base != NULL_TREE
1026 && TREE_CODE (base) == MEM_REF
1027 && op->op0
1028 && DECL_P (TREE_OPERAND (op->op0, 0)))
1030 vn_reference_op_t pop = &ops[i-1];
1031 base = TREE_OPERAND (op->op0, 0);
1032 if (known_eq (pop->off, -1))
1034 max_size = -1;
1035 offset = 0;
1037 else
1038 offset += pop->off * BITS_PER_UNIT;
1039 op0_p = NULL;
1040 break;
1042 /* Fallthru. */
1043 case CALL_EXPR:
1044 return false;
1046 /* Record the base objects. */
1047 case MEM_REF:
1048 base_alias_set = get_deref_alias_set (op->op0);
1049 *op0_p = build2 (MEM_REF, op->type,
1050 NULL_TREE, op->op0);
1051 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1052 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1053 op0_p = &TREE_OPERAND (*op0_p, 0);
1054 break;
1056 case VAR_DECL:
1057 case PARM_DECL:
1058 case RESULT_DECL:
1059 case SSA_NAME:
1060 *op0_p = op->op0;
1061 op0_p = NULL;
1062 break;
1064 /* And now the usual component-reference style ops. */
1065 case BIT_FIELD_REF:
1066 offset += wi::to_poly_offset (op->op1);
1067 break;
1069 case COMPONENT_REF:
1071 tree field = op->op0;
1072 /* We do not have a complete COMPONENT_REF tree here so we
1073 cannot use component_ref_field_offset. Do the interesting
1074 parts manually. */
1075 tree this_offset = DECL_FIELD_OFFSET (field);
1077 if (op->op1 || !poly_int_tree_p (this_offset))
1078 max_size = -1;
1079 else
1081 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1082 << LOG2_BITS_PER_UNIT);
1083 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1084 offset += woffset;
1086 break;
1089 case ARRAY_RANGE_REF:
1090 case ARRAY_REF:
1091 /* We recorded the lower bound and the element size. */
1092 if (!poly_int_tree_p (op->op0)
1093 || !poly_int_tree_p (op->op1)
1094 || TREE_CODE (op->op2) != INTEGER_CST)
1095 max_size = -1;
1096 else
1098 poly_offset_int woffset
1099 = wi::sext (wi::to_poly_offset (op->op0)
1100 - wi::to_poly_offset (op->op1),
1101 TYPE_PRECISION (TREE_TYPE (op->op0)));
1102 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1103 woffset <<= LOG2_BITS_PER_UNIT;
1104 offset += woffset;
1106 break;
1108 case REALPART_EXPR:
1109 break;
1111 case IMAGPART_EXPR:
1112 offset += size;
1113 break;
1115 case VIEW_CONVERT_EXPR:
1116 break;
1118 case STRING_CST:
1119 case INTEGER_CST:
1120 case COMPLEX_CST:
1121 case VECTOR_CST:
1122 case REAL_CST:
1123 case CONSTRUCTOR:
1124 case CONST_DECL:
1125 return false;
1127 default:
1128 return false;
1132 if (base == NULL_TREE)
1133 return false;
1135 ref->ref = NULL_TREE;
1136 ref->base = base;
1137 ref->ref_alias_set = set;
1138 if (base_alias_set != -1)
1139 ref->base_alias_set = base_alias_set;
1140 else
1141 ref->base_alias_set = get_alias_set (base);
1142 /* We discount volatiles from value-numbering elsewhere. */
1143 ref->volatile_p = false;
1145 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1147 ref->offset = 0;
1148 ref->size = -1;
1149 ref->max_size = -1;
1150 return true;
1153 if (!offset.to_shwi (&ref->offset))
1155 ref->offset = 0;
1156 ref->max_size = -1;
1157 return true;
1160 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1161 ref->max_size = -1;
1163 return true;
1166 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1167 vn_reference_op_s's. */
1169 static void
1170 copy_reference_ops_from_call (gcall *call,
1171 vec<vn_reference_op_s> *result)
1173 vn_reference_op_s temp;
1174 unsigned i;
1175 tree lhs = gimple_call_lhs (call);
1176 int lr;
1178 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1179 different. By adding the lhs here in the vector, we ensure that the
1180 hashcode is different, guaranteeing a different value number. */
1181 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1183 memset (&temp, 0, sizeof (temp));
1184 temp.opcode = MODIFY_EXPR;
1185 temp.type = TREE_TYPE (lhs);
1186 temp.op0 = lhs;
1187 temp.off = -1;
1188 result->safe_push (temp);
1191 /* Copy the type, opcode, function, static chain and EH region, if any. */
1192 memset (&temp, 0, sizeof (temp));
1193 temp.type = gimple_call_return_type (call);
1194 temp.opcode = CALL_EXPR;
1195 temp.op0 = gimple_call_fn (call);
1196 temp.op1 = gimple_call_chain (call);
1197 if (stmt_could_throw_p (call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1198 temp.op2 = size_int (lr);
1199 temp.off = -1;
1200 result->safe_push (temp);
1202 /* Copy the call arguments. As they can be references as well,
1203 just chain them together. */
1204 for (i = 0; i < gimple_call_num_args (call); ++i)
1206 tree callarg = gimple_call_arg (call, i);
1207 copy_reference_ops_from_ref (callarg, result);
1211 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1212 *I_P to point to the last element of the replacement. */
1213 static bool
1214 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1215 unsigned int *i_p)
1217 unsigned int i = *i_p;
1218 vn_reference_op_t op = &(*ops)[i];
1219 vn_reference_op_t mem_op = &(*ops)[i - 1];
1220 tree addr_base;
1221 poly_int64 addr_offset = 0;
1223 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1224 from .foo.bar to the preceding MEM_REF offset and replace the
1225 address with &OBJ. */
1226 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1227 &addr_offset);
1228 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1229 if (addr_base != TREE_OPERAND (op->op0, 0))
1231 poly_offset_int off
1232 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1233 SIGNED)
1234 + addr_offset);
1235 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1236 op->op0 = build_fold_addr_expr (addr_base);
1237 if (tree_fits_shwi_p (mem_op->op0))
1238 mem_op->off = tree_to_shwi (mem_op->op0);
1239 else
1240 mem_op->off = -1;
1241 return true;
1243 return false;
1246 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1247 *I_P to point to the last element of the replacement. */
1248 static bool
1249 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1250 unsigned int *i_p)
1252 unsigned int i = *i_p;
1253 vn_reference_op_t op = &(*ops)[i];
1254 vn_reference_op_t mem_op = &(*ops)[i - 1];
1255 gimple *def_stmt;
1256 enum tree_code code;
1257 poly_offset_int off;
1259 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1260 if (!is_gimple_assign (def_stmt))
1261 return false;
1263 code = gimple_assign_rhs_code (def_stmt);
1264 if (code != ADDR_EXPR
1265 && code != POINTER_PLUS_EXPR)
1266 return false;
1268 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1270 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1271 from .foo.bar to the preceding MEM_REF offset and replace the
1272 address with &OBJ. */
1273 if (code == ADDR_EXPR)
1275 tree addr, addr_base;
1276 poly_int64 addr_offset;
1278 addr = gimple_assign_rhs1 (def_stmt);
1279 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1280 &addr_offset);
1281 /* If that didn't work because the address isn't invariant propagate
1282 the reference tree from the address operation in case the current
1283 dereference isn't offsetted. */
1284 if (!addr_base
1285 && *i_p == ops->length () - 1
1286 && known_eq (off, 0)
1287 /* This makes us disable this transform for PRE where the
1288 reference ops might be also used for code insertion which
1289 is invalid. */
1290 && default_vn_walk_kind == VN_WALKREWRITE)
1292 auto_vec<vn_reference_op_s, 32> tem;
1293 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1294 /* Make sure to preserve TBAA info. The only objects not
1295 wrapped in MEM_REFs that can have their address taken are
1296 STRING_CSTs. */
1297 if (tem.length () >= 2
1298 && tem[tem.length () - 2].opcode == MEM_REF)
1300 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1301 new_mem_op->op0
1302 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1303 wi::to_poly_wide (new_mem_op->op0));
1305 else
1306 gcc_assert (tem.last ().opcode == STRING_CST);
1307 ops->pop ();
1308 ops->pop ();
1309 ops->safe_splice (tem);
1310 --*i_p;
1311 return true;
1313 if (!addr_base
1314 || TREE_CODE (addr_base) != MEM_REF
1315 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1316 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base, 0))))
1317 return false;
1319 off += addr_offset;
1320 off += mem_ref_offset (addr_base);
1321 op->op0 = TREE_OPERAND (addr_base, 0);
1323 else
1325 tree ptr, ptroff;
1326 ptr = gimple_assign_rhs1 (def_stmt);
1327 ptroff = gimple_assign_rhs2 (def_stmt);
1328 if (TREE_CODE (ptr) != SSA_NAME
1329 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1330 /* Make sure to not endlessly recurse.
1331 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1332 happen when we value-number a PHI to its backedge value. */
1333 || SSA_VAL (ptr) == op->op0
1334 || !poly_int_tree_p (ptroff))
1335 return false;
1337 off += wi::to_poly_offset (ptroff);
1338 op->op0 = ptr;
1341 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1342 if (tree_fits_shwi_p (mem_op->op0))
1343 mem_op->off = tree_to_shwi (mem_op->op0);
1344 else
1345 mem_op->off = -1;
1346 /* ??? Can end up with endless recursion here!?
1347 gcc.c-torture/execute/strcmp-1.c */
1348 if (TREE_CODE (op->op0) == SSA_NAME)
1349 op->op0 = SSA_VAL (op->op0);
1350 if (TREE_CODE (op->op0) != SSA_NAME)
1351 op->opcode = TREE_CODE (op->op0);
1353 /* And recurse. */
1354 if (TREE_CODE (op->op0) == SSA_NAME)
1355 vn_reference_maybe_forwprop_address (ops, i_p);
1356 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1357 vn_reference_fold_indirect (ops, i_p);
1358 return true;
1361 /* Optimize the reference REF to a constant if possible or return
1362 NULL_TREE if not. */
1364 tree
1365 fully_constant_vn_reference_p (vn_reference_t ref)
1367 vec<vn_reference_op_s> operands = ref->operands;
1368 vn_reference_op_t op;
1370 /* Try to simplify the translated expression if it is
1371 a call to a builtin function with at most two arguments. */
1372 op = &operands[0];
1373 if (op->opcode == CALL_EXPR
1374 && TREE_CODE (op->op0) == ADDR_EXPR
1375 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1376 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1377 && operands.length () >= 2
1378 && operands.length () <= 3)
1380 vn_reference_op_t arg0, arg1 = NULL;
1381 bool anyconst = false;
1382 arg0 = &operands[1];
1383 if (operands.length () > 2)
1384 arg1 = &operands[2];
1385 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1386 || (arg0->opcode == ADDR_EXPR
1387 && is_gimple_min_invariant (arg0->op0)))
1388 anyconst = true;
1389 if (arg1
1390 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1391 || (arg1->opcode == ADDR_EXPR
1392 && is_gimple_min_invariant (arg1->op0))))
1393 anyconst = true;
1394 if (anyconst)
1396 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1397 arg1 ? 2 : 1,
1398 arg0->op0,
1399 arg1 ? arg1->op0 : NULL);
1400 if (folded
1401 && TREE_CODE (folded) == NOP_EXPR)
1402 folded = TREE_OPERAND (folded, 0);
1403 if (folded
1404 && is_gimple_min_invariant (folded))
1405 return folded;
1409 /* Simplify reads from constants or constant initializers. */
1410 else if (BITS_PER_UNIT == 8
1411 && COMPLETE_TYPE_P (ref->type)
1412 && is_gimple_reg_type (ref->type)
1413 && (!INTEGRAL_TYPE_P (ref->type)
1414 || TYPE_PRECISION (ref->type) % BITS_PER_UNIT == 0))
1416 poly_int64 off = 0;
1417 HOST_WIDE_INT size;
1418 if (INTEGRAL_TYPE_P (ref->type))
1419 size = TYPE_PRECISION (ref->type);
1420 else
1421 size = tree_to_shwi (TYPE_SIZE (ref->type));
1422 if (size % BITS_PER_UNIT != 0
1423 || size > MAX_BITSIZE_MODE_ANY_MODE)
1424 return NULL_TREE;
1425 size /= BITS_PER_UNIT;
1426 unsigned i;
1427 for (i = 0; i < operands.length (); ++i)
1429 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1431 ++i;
1432 break;
1434 if (known_eq (operands[i].off, -1))
1435 return NULL_TREE;
1436 off += operands[i].off;
1437 if (operands[i].opcode == MEM_REF)
1439 ++i;
1440 break;
1443 vn_reference_op_t base = &operands[--i];
1444 tree ctor = error_mark_node;
1445 tree decl = NULL_TREE;
1446 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1447 ctor = base->op0;
1448 else if (base->opcode == MEM_REF
1449 && base[1].opcode == ADDR_EXPR
1450 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1451 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1452 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1454 decl = TREE_OPERAND (base[1].op0, 0);
1455 if (TREE_CODE (decl) == STRING_CST)
1456 ctor = decl;
1457 else
1458 ctor = ctor_for_folding (decl);
1460 if (ctor == NULL_TREE)
1461 return build_zero_cst (ref->type);
1462 else if (ctor != error_mark_node)
1464 HOST_WIDE_INT const_off;
1465 if (decl)
1467 tree res = fold_ctor_reference (ref->type, ctor,
1468 off * BITS_PER_UNIT,
1469 size * BITS_PER_UNIT, decl);
1470 if (res)
1472 STRIP_USELESS_TYPE_CONVERSION (res);
1473 if (is_gimple_min_invariant (res))
1474 return res;
1477 else if (off.is_constant (&const_off))
1479 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1480 int len = native_encode_expr (ctor, buf, size, const_off);
1481 if (len > 0)
1482 return native_interpret_expr (ref->type, buf, len);
1487 return NULL_TREE;
1490 /* Return true if OPS contain a storage order barrier. */
1492 static bool
1493 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1495 vn_reference_op_t op;
1496 unsigned i;
1498 FOR_EACH_VEC_ELT (ops, i, op)
1499 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1500 return true;
1502 return false;
1505 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1506 structures into their value numbers. This is done in-place, and
1507 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1508 whether any operands were valueized. */
1510 static vec<vn_reference_op_s>
1511 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything,
1512 bool with_avail = false)
1514 vn_reference_op_t vro;
1515 unsigned int i;
1517 *valueized_anything = false;
1519 FOR_EACH_VEC_ELT (orig, i, vro)
1521 if (vro->opcode == SSA_NAME
1522 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1524 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1525 if (tem != vro->op0)
1527 *valueized_anything = true;
1528 vro->op0 = tem;
1530 /* If it transforms from an SSA_NAME to a constant, update
1531 the opcode. */
1532 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1533 vro->opcode = TREE_CODE (vro->op0);
1535 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1537 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1538 if (tem != vro->op1)
1540 *valueized_anything = true;
1541 vro->op1 = tem;
1544 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1546 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1547 if (tem != vro->op2)
1549 *valueized_anything = true;
1550 vro->op2 = tem;
1553 /* If it transforms from an SSA_NAME to an address, fold with
1554 a preceding indirect reference. */
1555 if (i > 0
1556 && vro->op0
1557 && TREE_CODE (vro->op0) == ADDR_EXPR
1558 && orig[i - 1].opcode == MEM_REF)
1560 if (vn_reference_fold_indirect (&orig, &i))
1561 *valueized_anything = true;
1563 else if (i > 0
1564 && vro->opcode == SSA_NAME
1565 && orig[i - 1].opcode == MEM_REF)
1567 if (vn_reference_maybe_forwprop_address (&orig, &i))
1568 *valueized_anything = true;
1570 /* If it transforms a non-constant ARRAY_REF into a constant
1571 one, adjust the constant offset. */
1572 else if (vro->opcode == ARRAY_REF
1573 && known_eq (vro->off, -1)
1574 && poly_int_tree_p (vro->op0)
1575 && poly_int_tree_p (vro->op1)
1576 && TREE_CODE (vro->op2) == INTEGER_CST)
1578 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1579 - wi::to_poly_offset (vro->op1))
1580 * wi::to_offset (vro->op2)
1581 * vn_ref_op_align_unit (vro));
1582 off.to_shwi (&vro->off);
1586 return orig;
1589 static vec<vn_reference_op_s>
1590 valueize_refs (vec<vn_reference_op_s> orig)
1592 bool tem;
1593 return valueize_refs_1 (orig, &tem);
1596 static vec<vn_reference_op_s> shared_lookup_references;
1598 /* Create a vector of vn_reference_op_s structures from REF, a
1599 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1600 this function. *VALUEIZED_ANYTHING will specify whether any
1601 operands were valueized. */
1603 static vec<vn_reference_op_s>
1604 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1606 if (!ref)
1607 return vNULL;
1608 shared_lookup_references.truncate (0);
1609 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1610 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1611 valueized_anything);
1612 return shared_lookup_references;
1615 /* Create a vector of vn_reference_op_s structures from CALL, a
1616 call statement. The vector is shared among all callers of
1617 this function. */
1619 static vec<vn_reference_op_s>
1620 valueize_shared_reference_ops_from_call (gcall *call)
1622 if (!call)
1623 return vNULL;
1624 shared_lookup_references.truncate (0);
1625 copy_reference_ops_from_call (call, &shared_lookup_references);
1626 shared_lookup_references = valueize_refs (shared_lookup_references);
1627 return shared_lookup_references;
1630 /* Lookup a SCCVN reference operation VR in the current hash table.
1631 Returns the resulting value number if it exists in the hash table,
1632 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1633 vn_reference_t stored in the hashtable if something is found. */
1635 static tree
1636 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1638 vn_reference_s **slot;
1639 hashval_t hash;
1641 hash = vr->hashcode;
1642 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1643 if (slot)
1645 if (vnresult)
1646 *vnresult = (vn_reference_t)*slot;
1647 return ((vn_reference_t)*slot)->result;
1650 return NULL_TREE;
1653 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1654 with the current VUSE and performs the expression lookup. */
1656 static void *
1657 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1658 unsigned int cnt, void *vr_)
1660 vn_reference_t vr = (vn_reference_t)vr_;
1661 vn_reference_s **slot;
1662 hashval_t hash;
1664 /* This bounds the stmt walks we perform on reference lookups
1665 to O(1) instead of O(N) where N is the number of dominating
1666 stores. */
1667 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1668 return (void *)-1;
1670 if (last_vuse_ptr)
1671 *last_vuse_ptr = vuse;
1673 /* Fixup vuse and hash. */
1674 if (vr->vuse)
1675 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1676 vr->vuse = vuse_ssa_val (vuse);
1677 if (vr->vuse)
1678 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1680 hash = vr->hashcode;
1681 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1682 if (slot)
1683 return *slot;
1685 return NULL;
1688 /* Lookup an existing or insert a new vn_reference entry into the
1689 value table for the VUSE, SET, TYPE, OPERANDS reference which
1690 has the value VALUE which is either a constant or an SSA name. */
1692 static vn_reference_t
1693 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1694 alias_set_type set,
1695 tree type,
1696 vec<vn_reference_op_s,
1697 va_heap> operands,
1698 tree value)
1700 vn_reference_s vr1;
1701 vn_reference_t result;
1702 unsigned value_id;
1703 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1704 vr1.operands = operands;
1705 vr1.type = type;
1706 vr1.set = set;
1707 vr1.hashcode = vn_reference_compute_hash (&vr1);
1708 if (vn_reference_lookup_1 (&vr1, &result))
1709 return result;
1710 if (TREE_CODE (value) == SSA_NAME)
1711 value_id = VN_INFO (value)->value_id;
1712 else
1713 value_id = get_or_alloc_constant_value_id (value);
1714 return vn_reference_insert_pieces (vuse, set, type,
1715 operands.copy (), value, value_id);
1718 /* Return a value-number for RCODE OPS... either by looking up an existing
1719 value-number for the simplified result or by inserting the operation if
1720 INSERT is true. */
1722 static tree
1723 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
1725 tree result = NULL_TREE;
1726 /* We will be creating a value number for
1727 RCODE (OPS...).
1728 So first simplify and lookup this expression to see if it
1729 is already available. */
1730 mprts_hook = vn_lookup_simplify_result;
1731 bool res = false;
1732 switch (TREE_CODE_LENGTH ((tree_code) res_op->code))
1734 case 1:
1735 res = gimple_resimplify1 (NULL, res_op, vn_valueize);
1736 break;
1737 case 2:
1738 res = gimple_resimplify2 (NULL, res_op, vn_valueize);
1739 break;
1740 case 3:
1741 res = gimple_resimplify3 (NULL, res_op, vn_valueize);
1742 break;
1744 mprts_hook = NULL;
1745 gimple *new_stmt = NULL;
1746 if (res
1747 && gimple_simplified_result_is_gimple_val (res_op))
1748 /* The expression is already available. */
1749 result = res_op->ops[0];
1750 else
1752 tree val = vn_lookup_simplify_result (res_op);
1753 if (!val && insert)
1755 gimple_seq stmts = NULL;
1756 result = maybe_push_res_to_seq (res_op, &stmts);
1757 if (result)
1759 gcc_assert (gimple_seq_singleton_p (stmts));
1760 new_stmt = gimple_seq_first_stmt (stmts);
1763 else
1764 /* The expression is already available. */
1765 result = val;
1767 if (new_stmt)
1769 /* The expression is not yet available, value-number lhs to
1770 the new SSA_NAME we created. */
1771 /* Initialize value-number information properly. */
1772 VN_INFO (result)->valnum = result;
1773 VN_INFO (result)->value_id = get_next_value_id ();
1774 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
1775 new_stmt);
1776 VN_INFO (result)->needs_insertion = true;
1777 /* ??? PRE phi-translation inserts NARYs without corresponding
1778 SSA name result. Re-use those but set their result according
1779 to the stmt we just built. */
1780 vn_nary_op_t nary = NULL;
1781 vn_nary_op_lookup_stmt (new_stmt, &nary);
1782 if (nary)
1784 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
1785 nary->u.result = gimple_assign_lhs (new_stmt);
1787 /* As all "inserted" statements are singleton SCCs, insert
1788 to the valid table. This is strictly needed to
1789 avoid re-generating new value SSA_NAMEs for the same
1790 expression during SCC iteration over and over (the
1791 optimistic table gets cleared after each iteration).
1792 We do not need to insert into the optimistic table, as
1793 lookups there will fall back to the valid table. */
1794 else
1796 unsigned int length = vn_nary_length_from_stmt (new_stmt);
1797 vn_nary_op_t vno1
1798 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
1799 vno1->value_id = VN_INFO (result)->value_id;
1800 vno1->length = length;
1801 vno1->predicated_values = 0;
1802 vno1->u.result = result;
1803 init_vn_nary_op_from_stmt (vno1, new_stmt);
1804 vn_nary_op_insert_into (vno1, valid_info->nary, true);
1805 /* Also do not link it into the undo chain. */
1806 last_inserted_nary = vno1->next;
1807 vno1->next = (vn_nary_op_t)(void *)-1;
1809 if (dump_file && (dump_flags & TDF_DETAILS))
1811 fprintf (dump_file, "Inserting name ");
1812 print_generic_expr (dump_file, result);
1813 fprintf (dump_file, " for expression ");
1814 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
1815 fprintf (dump_file, "\n");
1818 return result;
1821 /* Return a value-number for RCODE OPS... either by looking up an existing
1822 value-number for the simplified result or by inserting the operation. */
1824 static tree
1825 vn_nary_build_or_lookup (gimple_match_op *res_op)
1827 return vn_nary_build_or_lookup_1 (res_op, true);
1830 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1831 its value if present. */
1833 tree
1834 vn_nary_simplify (vn_nary_op_t nary)
1836 if (nary->length > gimple_match_op::MAX_NUM_OPS)
1837 return NULL_TREE;
1838 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
1839 nary->type, nary->length);
1840 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
1841 return vn_nary_build_or_lookup_1 (&op, false);
1844 basic_block vn_context_bb;
1846 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1847 from the statement defining VUSE and if not successful tries to
1848 translate *REFP and VR_ through an aggregate copy at the definition
1849 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1850 of *REF and *VR. If only disambiguation was performed then
1851 *DISAMBIGUATE_ONLY is set to true. */
1853 static void *
1854 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_,
1855 bool *disambiguate_only)
1857 vn_reference_t vr = (vn_reference_t)vr_;
1858 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
1859 tree base = ao_ref_base (ref);
1860 HOST_WIDE_INT offseti, maxsizei;
1861 static vec<vn_reference_op_s> lhs_ops;
1862 ao_ref lhs_ref;
1863 bool lhs_ref_ok = false;
1864 poly_int64 copy_size;
1866 /* If the reference is based on a parameter that was determined as
1867 pointing to readonly memory it doesn't change. */
1868 if (TREE_CODE (base) == MEM_REF
1869 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1870 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base, 0))
1871 && bitmap_bit_p (const_parms,
1872 SSA_NAME_VERSION (TREE_OPERAND (base, 0))))
1874 *disambiguate_only = true;
1875 return NULL;
1878 /* First try to disambiguate after value-replacing in the definitions LHS. */
1879 if (is_gimple_assign (def_stmt))
1881 tree lhs = gimple_assign_lhs (def_stmt);
1882 bool valueized_anything = false;
1883 /* Avoid re-allocation overhead. */
1884 lhs_ops.truncate (0);
1885 basic_block saved_rpo_bb = vn_context_bb;
1886 vn_context_bb = gimple_bb (def_stmt);
1887 copy_reference_ops_from_ref (lhs, &lhs_ops);
1888 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything, true);
1889 vn_context_bb = saved_rpo_bb;
1890 if (valueized_anything)
1892 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1893 get_alias_set (lhs),
1894 TREE_TYPE (lhs), lhs_ops);
1895 if (lhs_ref_ok
1896 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1898 *disambiguate_only = true;
1899 return NULL;
1902 else
1904 ao_ref_init (&lhs_ref, lhs);
1905 lhs_ref_ok = true;
1908 /* If we reach a clobbering statement try to skip it and see if
1909 we find a VN result with exactly the same value as the
1910 possible clobber. In this case we can ignore the clobber
1911 and return the found value.
1912 Note that we don't need to worry about partial overlapping
1913 accesses as we then can use TBAA to disambiguate against the
1914 clobbering statement when looking up a load (thus the
1915 VN_WALKREWRITE guard). */
1916 if (vn_walk_kind == VN_WALKREWRITE
1917 && is_gimple_reg_type (TREE_TYPE (lhs))
1918 && types_compatible_p (TREE_TYPE (lhs), vr->type))
1920 tree *saved_last_vuse_ptr = last_vuse_ptr;
1921 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1922 last_vuse_ptr = NULL;
1923 tree saved_vuse = vr->vuse;
1924 hashval_t saved_hashcode = vr->hashcode;
1925 void *res = vn_reference_lookup_2 (ref,
1926 gimple_vuse (def_stmt), 0, vr);
1927 /* Need to restore vr->vuse and vr->hashcode. */
1928 vr->vuse = saved_vuse;
1929 vr->hashcode = saved_hashcode;
1930 last_vuse_ptr = saved_last_vuse_ptr;
1931 if (res && res != (void *)-1)
1933 vn_reference_t vnresult = (vn_reference_t) res;
1934 if (vnresult->result
1935 && operand_equal_p (vnresult->result,
1936 gimple_assign_rhs1 (def_stmt), 0))
1937 return res;
1941 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
1942 && gimple_call_num_args (def_stmt) <= 4)
1944 /* For builtin calls valueize its arguments and call the
1945 alias oracle again. Valueization may improve points-to
1946 info of pointers and constify size and position arguments.
1947 Originally this was motivated by PR61034 which has
1948 conditional calls to free falsely clobbering ref because
1949 of imprecise points-to info of the argument. */
1950 tree oldargs[4];
1951 bool valueized_anything = false;
1952 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1954 oldargs[i] = gimple_call_arg (def_stmt, i);
1955 tree val = vn_valueize (oldargs[i]);
1956 if (val != oldargs[i])
1958 gimple_call_set_arg (def_stmt, i, val);
1959 valueized_anything = true;
1962 if (valueized_anything)
1964 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
1965 ref);
1966 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
1967 gimple_call_set_arg (def_stmt, i, oldargs[i]);
1968 if (!res)
1970 *disambiguate_only = true;
1971 return NULL;
1976 if (*disambiguate_only)
1977 return (void *)-1;
1979 /* If we cannot constrain the size of the reference we cannot
1980 test if anything kills it. */
1981 if (!ref->max_size_known_p ())
1982 return (void *)-1;
1984 poly_int64 offset = ref->offset;
1985 poly_int64 maxsize = ref->max_size;
1987 /* We can't deduce anything useful from clobbers. */
1988 if (gimple_clobber_p (def_stmt))
1989 return (void *)-1;
1991 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1992 from that definition.
1993 1) Memset. */
1994 if (is_gimple_reg_type (vr->type)
1995 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1996 && (integer_zerop (gimple_call_arg (def_stmt, 1))
1997 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
1998 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
1999 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2000 && offset.is_constant (&offseti)
2001 && offseti % BITS_PER_UNIT == 0))
2002 && poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2003 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2004 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2006 tree base2;
2007 poly_int64 offset2, size2, maxsize2;
2008 bool reverse;
2009 tree ref2 = gimple_call_arg (def_stmt, 0);
2010 if (TREE_CODE (ref2) == SSA_NAME)
2012 ref2 = SSA_VAL (ref2);
2013 if (TREE_CODE (ref2) == SSA_NAME
2014 && (TREE_CODE (base) != MEM_REF
2015 || TREE_OPERAND (base, 0) != ref2))
2017 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2018 if (gimple_assign_single_p (def_stmt)
2019 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2020 ref2 = gimple_assign_rhs1 (def_stmt);
2023 if (TREE_CODE (ref2) == ADDR_EXPR)
2025 ref2 = TREE_OPERAND (ref2, 0);
2026 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2027 &reverse);
2028 if (!known_size_p (maxsize2)
2029 || !known_eq (maxsize2, size2)
2030 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2031 return (void *)-1;
2033 else if (TREE_CODE (ref2) == SSA_NAME)
2035 poly_int64 soff;
2036 if (TREE_CODE (base) != MEM_REF
2037 || !(mem_ref_offset (base) << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2038 return (void *)-1;
2039 offset += soff;
2040 offset2 = 0;
2041 if (TREE_OPERAND (base, 0) != ref2)
2043 gimple *def = SSA_NAME_DEF_STMT (ref2);
2044 if (is_gimple_assign (def)
2045 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2046 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2047 && poly_int_tree_p (gimple_assign_rhs2 (def))
2048 && (wi::to_poly_offset (gimple_assign_rhs2 (def))
2049 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2051 ref2 = gimple_assign_rhs1 (def);
2052 if (TREE_CODE (ref2) == SSA_NAME)
2053 ref2 = SSA_VAL (ref2);
2055 else
2056 return (void *)-1;
2059 else
2060 return (void *)-1;
2061 tree len = gimple_call_arg (def_stmt, 2);
2062 if (known_subrange_p (offset, maxsize, offset2,
2063 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2065 tree val;
2066 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2067 val = build_zero_cst (vr->type);
2068 else if (INTEGRAL_TYPE_P (vr->type)
2069 && known_eq (ref->size, 8))
2071 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2072 vr->type, gimple_call_arg (def_stmt, 1));
2073 val = vn_nary_build_or_lookup (&res_op);
2074 if (!val
2075 || (TREE_CODE (val) == SSA_NAME
2076 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2077 return (void *)-1;
2079 else
2081 unsigned len = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type));
2082 unsigned char *buf = XALLOCAVEC (unsigned char, len);
2083 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2084 len);
2085 val = native_interpret_expr (vr->type, buf, len);
2086 if (!val)
2087 return (void *)-1;
2089 return vn_reference_lookup_or_insert_for_pieces
2090 (vuse, vr->set, vr->type, vr->operands, val);
2094 /* 2) Assignment from an empty CONSTRUCTOR. */
2095 else if (is_gimple_reg_type (vr->type)
2096 && gimple_assign_single_p (def_stmt)
2097 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2098 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2100 tree base2;
2101 poly_int64 offset2, size2, maxsize2;
2102 bool reverse;
2103 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2104 &offset2, &size2, &maxsize2, &reverse);
2105 if (known_size_p (maxsize2)
2106 && operand_equal_p (base, base2, 0)
2107 && known_subrange_p (offset, maxsize, offset2, size2))
2109 tree val = build_zero_cst (vr->type);
2110 return vn_reference_lookup_or_insert_for_pieces
2111 (vuse, vr->set, vr->type, vr->operands, val);
2115 /* 3) Assignment from a constant. We can use folds native encode/interpret
2116 routines to extract the assigned bits. */
2117 else if (known_eq (ref->size, maxsize)
2118 && is_gimple_reg_type (vr->type)
2119 && !contains_storage_order_barrier_p (vr->operands)
2120 && gimple_assign_single_p (def_stmt)
2121 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
2122 /* native_encode and native_decode operate on arrays of bytes
2123 and so fundamentally need a compile-time size and offset. */
2124 && maxsize.is_constant (&maxsizei)
2125 && maxsizei % BITS_PER_UNIT == 0
2126 && offset.is_constant (&offseti)
2127 && offseti % BITS_PER_UNIT == 0
2128 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2129 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2130 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2132 tree base2;
2133 HOST_WIDE_INT offset2, size2;
2134 bool reverse;
2135 base2 = get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt),
2136 &offset2, &size2, &reverse);
2137 if (base2
2138 && !reverse
2139 && size2 % BITS_PER_UNIT == 0
2140 && offset2 % BITS_PER_UNIT == 0
2141 && operand_equal_p (base, base2, 0)
2142 && known_subrange_p (offseti, maxsizei, offset2, size2))
2144 /* We support up to 512-bit values (for V8DFmode). */
2145 unsigned char buffer[64];
2146 int len;
2148 tree rhs = gimple_assign_rhs1 (def_stmt);
2149 if (TREE_CODE (rhs) == SSA_NAME)
2150 rhs = SSA_VAL (rhs);
2151 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
2152 buffer, sizeof (buffer),
2153 (offseti - offset2) / BITS_PER_UNIT);
2154 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2156 tree type = vr->type;
2157 /* Make sure to interpret in a type that has a range
2158 covering the whole access size. */
2159 if (INTEGRAL_TYPE_P (vr->type)
2160 && maxsizei != TYPE_PRECISION (vr->type))
2161 type = build_nonstandard_integer_type (maxsizei,
2162 TYPE_UNSIGNED (type));
2163 tree val = native_interpret_expr (type, buffer,
2164 maxsizei / BITS_PER_UNIT);
2165 /* If we chop off bits because the types precision doesn't
2166 match the memory access size this is ok when optimizing
2167 reads but not when called from the DSE code during
2168 elimination. */
2169 if (val
2170 && type != vr->type)
2172 if (! int_fits_type_p (val, vr->type))
2173 val = NULL_TREE;
2174 else
2175 val = fold_convert (vr->type, val);
2178 if (val)
2179 return vn_reference_lookup_or_insert_for_pieces
2180 (vuse, vr->set, vr->type, vr->operands, val);
2185 /* 4) Assignment from an SSA name which definition we may be able
2186 to access pieces from. */
2187 else if (known_eq (ref->size, maxsize)
2188 && is_gimple_reg_type (vr->type)
2189 && !contains_storage_order_barrier_p (vr->operands)
2190 && gimple_assign_single_p (def_stmt)
2191 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
2193 tree base2;
2194 poly_int64 offset2, size2, maxsize2;
2195 bool reverse;
2196 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
2197 &offset2, &size2, &maxsize2,
2198 &reverse);
2199 if (!reverse
2200 && known_size_p (maxsize2)
2201 && known_eq (maxsize2, size2)
2202 && operand_equal_p (base, base2, 0)
2203 && known_subrange_p (offset, maxsize, offset2, size2)
2204 /* ??? We can't handle bitfield precision extracts without
2205 either using an alternate type for the BIT_FIELD_REF and
2206 then doing a conversion or possibly adjusting the offset
2207 according to endianness. */
2208 && (! INTEGRAL_TYPE_P (vr->type)
2209 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
2210 && multiple_p (ref->size, BITS_PER_UNIT))
2212 gimple_match_op op (gimple_match_cond::UNCOND,
2213 BIT_FIELD_REF, vr->type,
2214 vn_valueize (gimple_assign_rhs1 (def_stmt)),
2215 bitsize_int (ref->size),
2216 bitsize_int (offset - offset2));
2217 tree val = vn_nary_build_or_lookup (&op);
2218 if (val
2219 && (TREE_CODE (val) != SSA_NAME
2220 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2222 vn_reference_t res = vn_reference_lookup_or_insert_for_pieces
2223 (vuse, vr->set, vr->type, vr->operands, val);
2224 return res;
2229 /* 5) For aggregate copies translate the reference through them if
2230 the copy kills ref. */
2231 else if (vn_walk_kind == VN_WALKREWRITE
2232 && gimple_assign_single_p (def_stmt)
2233 && (DECL_P (gimple_assign_rhs1 (def_stmt))
2234 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
2235 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
2237 tree base2;
2238 int i, j, k;
2239 auto_vec<vn_reference_op_s> rhs;
2240 vn_reference_op_t vro;
2241 ao_ref r;
2243 if (!lhs_ref_ok)
2244 return (void *)-1;
2246 /* See if the assignment kills REF. */
2247 base2 = ao_ref_base (&lhs_ref);
2248 if (!lhs_ref.max_size_known_p ()
2249 || (base != base2
2250 && (TREE_CODE (base) != MEM_REF
2251 || TREE_CODE (base2) != MEM_REF
2252 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
2253 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
2254 TREE_OPERAND (base2, 1))))
2255 || !stmt_kills_ref_p (def_stmt, ref))
2256 return (void *)-1;
2258 /* Find the common base of ref and the lhs. lhs_ops already
2259 contains valueized operands for the lhs. */
2260 i = vr->operands.length () - 1;
2261 j = lhs_ops.length () - 1;
2262 while (j >= 0 && i >= 0
2263 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
2265 i--;
2266 j--;
2269 /* ??? The innermost op should always be a MEM_REF and we already
2270 checked that the assignment to the lhs kills vr. Thus for
2271 aggregate copies using char[] types the vn_reference_op_eq
2272 may fail when comparing types for compatibility. But we really
2273 don't care here - further lookups with the rewritten operands
2274 will simply fail if we messed up types too badly. */
2275 poly_int64 extra_off = 0;
2276 if (j == 0 && i >= 0
2277 && lhs_ops[0].opcode == MEM_REF
2278 && maybe_ne (lhs_ops[0].off, -1))
2280 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
2281 i--, j--;
2282 else if (vr->operands[i].opcode == MEM_REF
2283 && maybe_ne (vr->operands[i].off, -1))
2285 extra_off = vr->operands[i].off - lhs_ops[0].off;
2286 i--, j--;
2290 /* i now points to the first additional op.
2291 ??? LHS may not be completely contained in VR, one or more
2292 VIEW_CONVERT_EXPRs could be in its way. We could at least
2293 try handling outermost VIEW_CONVERT_EXPRs. */
2294 if (j != -1)
2295 return (void *)-1;
2297 /* Punt if the additional ops contain a storage order barrier. */
2298 for (k = i; k >= 0; k--)
2300 vro = &vr->operands[k];
2301 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
2302 return (void *)-1;
2305 /* Now re-write REF to be based on the rhs of the assignment. */
2306 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
2308 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2309 if (maybe_ne (extra_off, 0))
2311 if (rhs.length () < 2)
2312 return (void *)-1;
2313 int ix = rhs.length () - 2;
2314 if (rhs[ix].opcode != MEM_REF
2315 || known_eq (rhs[ix].off, -1))
2316 return (void *)-1;
2317 rhs[ix].off += extra_off;
2318 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
2319 build_int_cst (TREE_TYPE (rhs[ix].op0),
2320 extra_off));
2323 /* We need to pre-pend vr->operands[0..i] to rhs. */
2324 vec<vn_reference_op_s> old = vr->operands;
2325 if (i + 1 + rhs.length () > vr->operands.length ())
2326 vr->operands.safe_grow (i + 1 + rhs.length ());
2327 else
2328 vr->operands.truncate (i + 1 + rhs.length ());
2329 FOR_EACH_VEC_ELT (rhs, j, vro)
2330 vr->operands[i + 1 + j] = *vro;
2331 vr->operands = valueize_refs (vr->operands);
2332 if (old == shared_lookup_references)
2333 shared_lookup_references = vr->operands;
2334 vr->hashcode = vn_reference_compute_hash (vr);
2336 /* Try folding the new reference to a constant. */
2337 tree val = fully_constant_vn_reference_p (vr);
2338 if (val)
2339 return vn_reference_lookup_or_insert_for_pieces
2340 (vuse, vr->set, vr->type, vr->operands, val);
2342 /* Adjust *ref from the new operands. */
2343 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2344 return (void *)-1;
2345 /* This can happen with bitfields. */
2346 if (maybe_ne (ref->size, r.size))
2347 return (void *)-1;
2348 *ref = r;
2350 /* Do not update last seen VUSE after translating. */
2351 last_vuse_ptr = NULL;
2353 /* Keep looking for the adjusted *REF / VR pair. */
2354 return NULL;
2357 /* 6) For memcpy copies translate the reference through them if
2358 the copy kills ref. */
2359 else if (vn_walk_kind == VN_WALKREWRITE
2360 && is_gimple_reg_type (vr->type)
2361 /* ??? Handle BCOPY as well. */
2362 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
2363 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
2364 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
2365 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2366 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
2367 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
2368 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
2369 && poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size))
2371 tree lhs, rhs;
2372 ao_ref r;
2373 poly_int64 rhs_offset, lhs_offset;
2374 vn_reference_op_s op;
2375 poly_uint64 mem_offset;
2376 poly_int64 at, byte_maxsize;
2378 /* Only handle non-variable, addressable refs. */
2379 if (maybe_ne (ref->size, maxsize)
2380 || !multiple_p (offset, BITS_PER_UNIT, &at)
2381 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
2382 return (void *)-1;
2384 /* Extract a pointer base and an offset for the destination. */
2385 lhs = gimple_call_arg (def_stmt, 0);
2386 lhs_offset = 0;
2387 if (TREE_CODE (lhs) == SSA_NAME)
2389 lhs = vn_valueize (lhs);
2390 if (TREE_CODE (lhs) == SSA_NAME)
2392 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
2393 if (gimple_assign_single_p (def_stmt)
2394 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2395 lhs = gimple_assign_rhs1 (def_stmt);
2398 if (TREE_CODE (lhs) == ADDR_EXPR)
2400 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
2401 &lhs_offset);
2402 if (!tem)
2403 return (void *)-1;
2404 if (TREE_CODE (tem) == MEM_REF
2405 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2407 lhs = TREE_OPERAND (tem, 0);
2408 if (TREE_CODE (lhs) == SSA_NAME)
2409 lhs = vn_valueize (lhs);
2410 lhs_offset += mem_offset;
2412 else if (DECL_P (tem))
2413 lhs = build_fold_addr_expr (tem);
2414 else
2415 return (void *)-1;
2417 if (TREE_CODE (lhs) != SSA_NAME
2418 && TREE_CODE (lhs) != ADDR_EXPR)
2419 return (void *)-1;
2421 /* Extract a pointer base and an offset for the source. */
2422 rhs = gimple_call_arg (def_stmt, 1);
2423 rhs_offset = 0;
2424 if (TREE_CODE (rhs) == SSA_NAME)
2425 rhs = vn_valueize (rhs);
2426 if (TREE_CODE (rhs) == ADDR_EXPR)
2428 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
2429 &rhs_offset);
2430 if (!tem)
2431 return (void *)-1;
2432 if (TREE_CODE (tem) == MEM_REF
2433 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
2435 rhs = TREE_OPERAND (tem, 0);
2436 rhs_offset += mem_offset;
2438 else if (DECL_P (tem)
2439 || TREE_CODE (tem) == STRING_CST)
2440 rhs = build_fold_addr_expr (tem);
2441 else
2442 return (void *)-1;
2444 if (TREE_CODE (rhs) != SSA_NAME
2445 && TREE_CODE (rhs) != ADDR_EXPR)
2446 return (void *)-1;
2448 /* The bases of the destination and the references have to agree. */
2449 if (TREE_CODE (base) == MEM_REF)
2451 if (TREE_OPERAND (base, 0) != lhs
2452 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
2453 return (void *) -1;
2454 at += mem_offset;
2456 else if (!DECL_P (base)
2457 || TREE_CODE (lhs) != ADDR_EXPR
2458 || TREE_OPERAND (lhs, 0) != base)
2459 return (void *)-1;
2461 /* If the access is completely outside of the memcpy destination
2462 area there is no aliasing. */
2463 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
2464 return NULL;
2465 /* And the access has to be contained within the memcpy destination. */
2466 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
2467 return (void *)-1;
2469 /* Make room for 2 operands in the new reference. */
2470 if (vr->operands.length () < 2)
2472 vec<vn_reference_op_s> old = vr->operands;
2473 vr->operands.safe_grow_cleared (2);
2474 if (old == shared_lookup_references)
2475 shared_lookup_references = vr->operands;
2477 else
2478 vr->operands.truncate (2);
2480 /* The looked-through reference is a simple MEM_REF. */
2481 memset (&op, 0, sizeof (op));
2482 op.type = vr->type;
2483 op.opcode = MEM_REF;
2484 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
2485 op.off = at - lhs_offset + rhs_offset;
2486 vr->operands[0] = op;
2487 op.type = TREE_TYPE (rhs);
2488 op.opcode = TREE_CODE (rhs);
2489 op.op0 = rhs;
2490 op.off = -1;
2491 vr->operands[1] = op;
2492 vr->hashcode = vn_reference_compute_hash (vr);
2494 /* Try folding the new reference to a constant. */
2495 tree val = fully_constant_vn_reference_p (vr);
2496 if (val)
2497 return vn_reference_lookup_or_insert_for_pieces
2498 (vuse, vr->set, vr->type, vr->operands, val);
2500 /* Adjust *ref from the new operands. */
2501 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
2502 return (void *)-1;
2503 /* This can happen with bitfields. */
2504 if (maybe_ne (ref->size, r.size))
2505 return (void *)-1;
2506 *ref = r;
2508 /* Do not update last seen VUSE after translating. */
2509 last_vuse_ptr = NULL;
2511 /* Keep looking for the adjusted *REF / VR pair. */
2512 return NULL;
2515 /* Bail out and stop walking. */
2516 return (void *)-1;
2519 /* Return a reference op vector from OP that can be used for
2520 vn_reference_lookup_pieces. The caller is responsible for releasing
2521 the vector. */
2523 vec<vn_reference_op_s>
2524 vn_reference_operands_for_lookup (tree op)
2526 bool valueized;
2527 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
2530 /* Lookup a reference operation by it's parts, in the current hash table.
2531 Returns the resulting value number if it exists in the hash table,
2532 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2533 vn_reference_t stored in the hashtable if something is found. */
2535 tree
2536 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
2537 vec<vn_reference_op_s> operands,
2538 vn_reference_t *vnresult, vn_lookup_kind kind)
2540 struct vn_reference_s vr1;
2541 vn_reference_t tmp;
2542 tree cst;
2544 if (!vnresult)
2545 vnresult = &tmp;
2546 *vnresult = NULL;
2548 vr1.vuse = vuse_ssa_val (vuse);
2549 shared_lookup_references.truncate (0);
2550 shared_lookup_references.safe_grow (operands.length ());
2551 memcpy (shared_lookup_references.address (),
2552 operands.address (),
2553 sizeof (vn_reference_op_s)
2554 * operands.length ());
2555 vr1.operands = operands = shared_lookup_references
2556 = valueize_refs (shared_lookup_references);
2557 vr1.type = type;
2558 vr1.set = set;
2559 vr1.hashcode = vn_reference_compute_hash (&vr1);
2560 if ((cst = fully_constant_vn_reference_p (&vr1)))
2561 return cst;
2563 vn_reference_lookup_1 (&vr1, vnresult);
2564 if (!*vnresult
2565 && kind != VN_NOWALK
2566 && vr1.vuse)
2568 ao_ref r;
2569 vn_walk_kind = kind;
2570 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
2571 *vnresult =
2572 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2573 vn_reference_lookup_2,
2574 vn_reference_lookup_3,
2575 vuse_ssa_val, &vr1);
2576 gcc_checking_assert (vr1.operands == shared_lookup_references);
2579 if (*vnresult)
2580 return (*vnresult)->result;
2582 return NULL_TREE;
2585 /* Lookup OP in the current hash table, and return the resulting value
2586 number if it exists in the hash table. Return NULL_TREE if it does
2587 not exist in the hash table or if the result field of the structure
2588 was NULL.. VNRESULT will be filled in with the vn_reference_t
2589 stored in the hashtable if one exists. When TBAA_P is false assume
2590 we are looking up a store and treat it as having alias-set zero. */
2592 tree
2593 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
2594 vn_reference_t *vnresult, bool tbaa_p)
2596 vec<vn_reference_op_s> operands;
2597 struct vn_reference_s vr1;
2598 tree cst;
2599 bool valuezied_anything;
2601 if (vnresult)
2602 *vnresult = NULL;
2604 vr1.vuse = vuse_ssa_val (vuse);
2605 vr1.operands = operands
2606 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
2607 vr1.type = TREE_TYPE (op);
2608 vr1.set = tbaa_p ? get_alias_set (op) : 0;
2609 vr1.hashcode = vn_reference_compute_hash (&vr1);
2610 if ((cst = fully_constant_vn_reference_p (&vr1)))
2611 return cst;
2613 if (kind != VN_NOWALK
2614 && vr1.vuse)
2616 vn_reference_t wvnresult;
2617 ao_ref r;
2618 /* Make sure to use a valueized reference if we valueized anything.
2619 Otherwise preserve the full reference for advanced TBAA. */
2620 if (!valuezied_anything
2621 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
2622 vr1.operands))
2623 ao_ref_init (&r, op);
2624 if (! tbaa_p)
2625 r.ref_alias_set = r.base_alias_set = 0;
2626 vn_walk_kind = kind;
2627 wvnresult =
2628 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
2629 vn_reference_lookup_2,
2630 vn_reference_lookup_3,
2631 vuse_ssa_val, &vr1);
2632 gcc_checking_assert (vr1.operands == shared_lookup_references);
2633 if (wvnresult)
2635 if (vnresult)
2636 *vnresult = wvnresult;
2637 return wvnresult->result;
2640 return NULL_TREE;
2643 return vn_reference_lookup_1 (&vr1, vnresult);
2646 /* Lookup CALL in the current hash table and return the entry in
2647 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2649 void
2650 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
2651 vn_reference_t vr)
2653 if (vnresult)
2654 *vnresult = NULL;
2656 tree vuse = gimple_vuse (call);
2658 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2659 vr->operands = valueize_shared_reference_ops_from_call (call);
2660 vr->type = gimple_expr_type (call);
2661 vr->set = 0;
2662 vr->hashcode = vn_reference_compute_hash (vr);
2663 vn_reference_lookup_1 (vr, vnresult);
2666 /* Insert OP into the current hash table with a value number of RESULT. */
2668 static void
2669 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2671 vn_reference_s **slot;
2672 vn_reference_t vr1;
2673 bool tem;
2675 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2676 if (TREE_CODE (result) == SSA_NAME)
2677 vr1->value_id = VN_INFO (result)->value_id;
2678 else
2679 vr1->value_id = get_or_alloc_constant_value_id (result);
2680 vr1->vuse = vuse_ssa_val (vuse);
2681 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
2682 vr1->type = TREE_TYPE (op);
2683 vr1->set = get_alias_set (op);
2684 vr1->hashcode = vn_reference_compute_hash (vr1);
2685 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2686 vr1->result_vdef = vdef;
2688 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2689 INSERT);
2691 /* Because IL walking on reference lookup can end up visiting
2692 a def that is only to be visited later in iteration order
2693 when we are about to make an irreducible region reducible
2694 the def can be effectively processed and its ref being inserted
2695 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2696 but save a lookup if we deal with already inserted refs here. */
2697 if (*slot)
2699 gcc_assert (operand_equal_p ((*slot)->result, vr1->result, 0));
2700 free_reference (vr1);
2701 obstack_free (&vn_tables_obstack, vr1);
2702 return;
2705 *slot = vr1;
2706 vr1->next = last_inserted_ref;
2707 last_inserted_ref = vr1;
2710 /* Insert a reference by it's pieces into the current hash table with
2711 a value number of RESULT. Return the resulting reference
2712 structure we created. */
2714 vn_reference_t
2715 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2716 vec<vn_reference_op_s> operands,
2717 tree result, unsigned int value_id)
2720 vn_reference_s **slot;
2721 vn_reference_t vr1;
2723 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
2724 vr1->value_id = value_id;
2725 vr1->vuse = vuse_ssa_val (vuse);
2726 vr1->operands = valueize_refs (operands);
2727 vr1->type = type;
2728 vr1->set = set;
2729 vr1->hashcode = vn_reference_compute_hash (vr1);
2730 if (result && TREE_CODE (result) == SSA_NAME)
2731 result = SSA_VAL (result);
2732 vr1->result = result;
2734 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
2735 INSERT);
2737 /* At this point we should have all the things inserted that we have
2738 seen before, and we should never try inserting something that
2739 already exists. */
2740 gcc_assert (!*slot);
2742 *slot = vr1;
2743 vr1->next = last_inserted_ref;
2744 last_inserted_ref = vr1;
2745 return vr1;
2748 /* Compute and return the hash value for nary operation VBO1. */
2750 static hashval_t
2751 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2753 inchash::hash hstate;
2754 unsigned i;
2756 for (i = 0; i < vno1->length; ++i)
2757 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2758 vno1->op[i] = SSA_VAL (vno1->op[i]);
2760 if (((vno1->length == 2
2761 && commutative_tree_code (vno1->opcode))
2762 || (vno1->length == 3
2763 && commutative_ternary_tree_code (vno1->opcode)))
2764 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2765 std::swap (vno1->op[0], vno1->op[1]);
2766 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
2767 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
2769 std::swap (vno1->op[0], vno1->op[1]);
2770 vno1->opcode = swap_tree_comparison (vno1->opcode);
2773 hstate.add_int (vno1->opcode);
2774 for (i = 0; i < vno1->length; ++i)
2775 inchash::add_expr (vno1->op[i], hstate);
2777 return hstate.end ();
2780 /* Compare nary operations VNO1 and VNO2 and return true if they are
2781 equivalent. */
2783 bool
2784 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
2786 unsigned i;
2788 if (vno1->hashcode != vno2->hashcode)
2789 return false;
2791 if (vno1->length != vno2->length)
2792 return false;
2794 if (vno1->opcode != vno2->opcode
2795 || !types_compatible_p (vno1->type, vno2->type))
2796 return false;
2798 for (i = 0; i < vno1->length; ++i)
2799 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2800 return false;
2802 /* BIT_INSERT_EXPR has an implict operand as the type precision
2803 of op1. Need to check to make sure they are the same. */
2804 if (vno1->opcode == BIT_INSERT_EXPR
2805 && TREE_CODE (vno1->op[1]) == INTEGER_CST
2806 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
2807 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
2808 return false;
2810 return true;
2813 /* Initialize VNO from the pieces provided. */
2815 static void
2816 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2817 enum tree_code code, tree type, tree *ops)
2819 vno->opcode = code;
2820 vno->length = length;
2821 vno->type = type;
2822 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2825 /* Initialize VNO from OP. */
2827 static void
2828 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2830 unsigned i;
2832 vno->opcode = TREE_CODE (op);
2833 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2834 vno->type = TREE_TYPE (op);
2835 for (i = 0; i < vno->length; ++i)
2836 vno->op[i] = TREE_OPERAND (op, i);
2839 /* Return the number of operands for a vn_nary ops structure from STMT. */
2841 static unsigned int
2842 vn_nary_length_from_stmt (gimple *stmt)
2844 switch (gimple_assign_rhs_code (stmt))
2846 case REALPART_EXPR:
2847 case IMAGPART_EXPR:
2848 case VIEW_CONVERT_EXPR:
2849 return 1;
2851 case BIT_FIELD_REF:
2852 return 3;
2854 case CONSTRUCTOR:
2855 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2857 default:
2858 return gimple_num_ops (stmt) - 1;
2862 /* Initialize VNO from STMT. */
2864 static void
2865 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple *stmt)
2867 unsigned i;
2869 vno->opcode = gimple_assign_rhs_code (stmt);
2870 vno->type = gimple_expr_type (stmt);
2871 switch (vno->opcode)
2873 case REALPART_EXPR:
2874 case IMAGPART_EXPR:
2875 case VIEW_CONVERT_EXPR:
2876 vno->length = 1;
2877 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2878 break;
2880 case BIT_FIELD_REF:
2881 vno->length = 3;
2882 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2883 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2884 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2885 break;
2887 case CONSTRUCTOR:
2888 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2889 for (i = 0; i < vno->length; ++i)
2890 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2891 break;
2893 default:
2894 gcc_checking_assert (!gimple_assign_single_p (stmt));
2895 vno->length = gimple_num_ops (stmt) - 1;
2896 for (i = 0; i < vno->length; ++i)
2897 vno->op[i] = gimple_op (stmt, i + 1);
2901 /* Compute the hashcode for VNO and look for it in the hash table;
2902 return the resulting value number if it exists in the hash table.
2903 Return NULL_TREE if it does not exist in the hash table or if the
2904 result field of the operation is NULL. VNRESULT will contain the
2905 vn_nary_op_t from the hashtable if it exists. */
2907 static tree
2908 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2910 vn_nary_op_s **slot;
2912 if (vnresult)
2913 *vnresult = NULL;
2915 vno->hashcode = vn_nary_op_compute_hash (vno);
2916 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
2917 if (!slot)
2918 return NULL_TREE;
2919 if (vnresult)
2920 *vnresult = *slot;
2921 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
2924 /* Lookup a n-ary operation by its pieces and return the resulting value
2925 number if it exists in the hash table. Return NULL_TREE if it does
2926 not exist in the hash table or if the result field of the operation
2927 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2928 if it exists. */
2930 tree
2931 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2932 tree type, tree *ops, vn_nary_op_t *vnresult)
2934 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2935 sizeof_vn_nary_op (length));
2936 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2937 return vn_nary_op_lookup_1 (vno1, vnresult);
2940 /* Lookup OP in the current hash table, and return the resulting value
2941 number if it exists in the hash table. Return NULL_TREE if it does
2942 not exist in the hash table or if the result field of the operation
2943 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2944 if it exists. */
2946 tree
2947 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2949 vn_nary_op_t vno1
2950 = XALLOCAVAR (struct vn_nary_op_s,
2951 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2952 init_vn_nary_op_from_op (vno1, op);
2953 return vn_nary_op_lookup_1 (vno1, vnresult);
2956 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2957 value number if it exists in the hash table. Return NULL_TREE if
2958 it does not exist in the hash table. VNRESULT will contain the
2959 vn_nary_op_t from the hashtable if it exists. */
2961 tree
2962 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
2964 vn_nary_op_t vno1
2965 = XALLOCAVAR (struct vn_nary_op_s,
2966 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2967 init_vn_nary_op_from_stmt (vno1, stmt);
2968 return vn_nary_op_lookup_1 (vno1, vnresult);
2971 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2973 static vn_nary_op_t
2974 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2976 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2979 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2980 obstack. */
2982 static vn_nary_op_t
2983 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2985 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
2987 vno1->value_id = value_id;
2988 vno1->length = length;
2989 vno1->predicated_values = 0;
2990 vno1->u.result = result;
2992 return vno1;
2995 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2996 VNO->HASHCODE first. */
2998 static vn_nary_op_t
2999 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
3000 bool compute_hash)
3002 vn_nary_op_s **slot;
3004 if (compute_hash)
3006 vno->hashcode = vn_nary_op_compute_hash (vno);
3007 gcc_assert (! vno->predicated_values
3008 || (! vno->u.values->next
3009 && vno->u.values->valid_dominated_by_p[0] != EXIT_BLOCK
3010 && vno->u.values->valid_dominated_by_p[1] == EXIT_BLOCK));
3013 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
3014 vno->unwind_to = *slot;
3015 if (*slot)
3017 /* Prefer non-predicated values.
3018 ??? Only if those are constant, otherwise, with constant predicated
3019 value, turn them into predicated values with entry-block validity
3020 (??? but we always find the first valid result currently). */
3021 if ((*slot)->predicated_values
3022 && ! vno->predicated_values)
3024 /* ??? We cannot remove *slot from the unwind stack list.
3025 For the moment we deal with this by skipping not found
3026 entries but this isn't ideal ... */
3027 *slot = vno;
3028 /* ??? Maintain a stack of states we can unwind in
3029 vn_nary_op_s? But how far do we unwind? In reality
3030 we need to push change records somewhere... Or not
3031 unwind vn_nary_op_s and linking them but instead
3032 unwind the results "list", linking that, which also
3033 doesn't move on hashtable resize. */
3034 /* We can also have a ->unwind_to recording *slot there.
3035 That way we can make u.values a fixed size array with
3036 recording the number of entries but of course we then
3037 have always N copies for each unwind_to-state. Or we
3038 make sure to only ever append and each unwinding will
3039 pop off one entry (but how to deal with predicated
3040 replaced with non-predicated here?) */
3041 vno->next = last_inserted_nary;
3042 last_inserted_nary = vno;
3043 return vno;
3045 else if (vno->predicated_values
3046 && ! (*slot)->predicated_values)
3047 return *slot;
3048 else if (vno->predicated_values
3049 && (*slot)->predicated_values)
3051 /* ??? Factor this all into a insert_single_predicated_value
3052 routine. */
3053 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
3054 basic_block vno_bb
3055 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
3056 vn_pval *nval = vno->u.values;
3057 vn_pval **next = &vno->u.values;
3058 bool found = false;
3059 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
3061 if (expressions_equal_p (val->result, vno->u.values->result))
3063 found = true;
3064 for (unsigned i = 0; i < val->n; ++i)
3066 basic_block val_bb
3067 = BASIC_BLOCK_FOR_FN (cfun,
3068 val->valid_dominated_by_p[i]);
3069 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
3070 /* Value registered with more generic predicate. */
3071 return *slot;
3072 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
3073 /* Shouldn't happen, we insert in RPO order. */
3074 gcc_unreachable ();
3076 /* Append value. */
3077 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3078 sizeof (vn_pval)
3079 + val->n * sizeof (int));
3080 (*next)->next = NULL;
3081 (*next)->result = val->result;
3082 (*next)->n = val->n + 1;
3083 memcpy ((*next)->valid_dominated_by_p,
3084 val->valid_dominated_by_p,
3085 val->n * sizeof (int));
3086 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
3087 next = &(*next)->next;
3088 if (dump_file && (dump_flags & TDF_DETAILS))
3089 fprintf (dump_file, "Appending predicate to value.\n");
3090 continue;
3092 /* Copy other predicated values. */
3093 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3094 sizeof (vn_pval)
3095 + (val->n-1) * sizeof (int));
3096 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
3097 (*next)->next = NULL;
3098 next = &(*next)->next;
3100 if (!found)
3101 *next = nval;
3103 *slot = vno;
3104 vno->next = last_inserted_nary;
3105 last_inserted_nary = vno;
3106 return vno;
3109 /* While we do not want to insert things twice it's awkward to
3110 avoid it in the case where visit_nary_op pattern-matches stuff
3111 and ends up simplifying the replacement to itself. We then
3112 get two inserts, one from visit_nary_op and one from
3113 vn_nary_build_or_lookup.
3114 So allow inserts with the same value number. */
3115 if ((*slot)->u.result == vno->u.result)
3116 return *slot;
3119 /* ??? There's also optimistic vs. previous commited state merging
3120 that is problematic for the case of unwinding. */
3122 /* ??? We should return NULL if we do not use 'vno' and have the
3123 caller release it. */
3124 gcc_assert (!*slot);
3126 *slot = vno;
3127 vno->next = last_inserted_nary;
3128 last_inserted_nary = vno;
3129 return vno;
3132 /* Insert a n-ary operation into the current hash table using it's
3133 pieces. Return the vn_nary_op_t structure we created and put in
3134 the hashtable. */
3136 vn_nary_op_t
3137 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
3138 tree type, tree *ops,
3139 tree result, unsigned int value_id)
3141 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
3142 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3143 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3146 static vn_nary_op_t
3147 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
3148 tree type, tree *ops,
3149 tree result, unsigned int value_id,
3150 edge pred_e)
3152 /* ??? Currently tracking BBs. */
3153 if (! single_pred_p (pred_e->dest))
3155 /* Never record for backedges. */
3156 if (pred_e->flags & EDGE_DFS_BACK)
3157 return NULL;
3158 edge_iterator ei;
3159 edge e;
3160 int cnt = 0;
3161 /* Ignore backedges. */
3162 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
3163 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
3164 cnt++;
3165 if (cnt != 1)
3166 return NULL;
3168 if (dump_file && (dump_flags & TDF_DETAILS)
3169 /* ??? Fix dumping, but currently we only get comparisons. */
3170 && TREE_CODE_CLASS (code) == tcc_comparison)
3172 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
3173 pred_e->dest->index);
3174 print_generic_expr (dump_file, ops[0], TDF_SLIM);
3175 fprintf (dump_file, " %s ", get_tree_code_name (code));
3176 print_generic_expr (dump_file, ops[1], TDF_SLIM);
3177 fprintf (dump_file, " == %s\n",
3178 integer_zerop (result) ? "false" : "true");
3180 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
3181 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
3182 vno1->predicated_values = 1;
3183 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
3184 sizeof (vn_pval));
3185 vno1->u.values->next = NULL;
3186 vno1->u.values->result = result;
3187 vno1->u.values->n = 1;
3188 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
3189 vno1->u.values->valid_dominated_by_p[1] = EXIT_BLOCK;
3190 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3193 static bool
3194 dominated_by_p_w_unex (basic_block bb1, basic_block bb2);
3196 static tree
3197 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
3199 if (! vno->predicated_values)
3200 return vno->u.result;
3201 for (vn_pval *val = vno->u.values; val; val = val->next)
3202 for (unsigned i = 0; i < val->n; ++i)
3203 if (dominated_by_p_w_unex (bb,
3204 BASIC_BLOCK_FOR_FN
3205 (cfun, val->valid_dominated_by_p[i])))
3206 return val->result;
3207 return NULL_TREE;
3210 /* Insert OP into the current hash table with a value number of
3211 RESULT. Return the vn_nary_op_t structure we created and put in
3212 the hashtable. */
3214 vn_nary_op_t
3215 vn_nary_op_insert (tree op, tree result)
3217 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
3218 vn_nary_op_t vno1;
3220 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
3221 init_vn_nary_op_from_op (vno1, op);
3222 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3225 /* Insert the rhs of STMT into the current hash table with a value number of
3226 RESULT. */
3228 static vn_nary_op_t
3229 vn_nary_op_insert_stmt (gimple *stmt, tree result)
3231 vn_nary_op_t vno1
3232 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
3233 result, VN_INFO (result)->value_id);
3234 init_vn_nary_op_from_stmt (vno1, stmt);
3235 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
3238 /* Compute a hashcode for PHI operation VP1 and return it. */
3240 static inline hashval_t
3241 vn_phi_compute_hash (vn_phi_t vp1)
3243 inchash::hash hstate (EDGE_COUNT (vp1->block->preds) > 2
3244 ? vp1->block->index : EDGE_COUNT (vp1->block->preds));
3245 tree phi1op;
3246 tree type;
3247 edge e;
3248 edge_iterator ei;
3250 /* If all PHI arguments are constants we need to distinguish
3251 the PHI node via its type. */
3252 type = vp1->type;
3253 hstate.merge_hash (vn_hash_type (type));
3255 FOR_EACH_EDGE (e, ei, vp1->block->preds)
3257 /* Don't hash backedge values they need to be handled as VN_TOP
3258 for optimistic value-numbering. */
3259 if (e->flags & EDGE_DFS_BACK)
3260 continue;
3262 phi1op = vp1->phiargs[e->dest_idx];
3263 if (phi1op == VN_TOP)
3264 continue;
3265 inchash::add_expr (phi1op, hstate);
3268 return hstate.end ();
3272 /* Return true if COND1 and COND2 represent the same condition, set
3273 *INVERTED_P if one needs to be inverted to make it the same as
3274 the other. */
3276 static bool
3277 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
3278 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
3280 enum tree_code code1 = gimple_cond_code (cond1);
3281 enum tree_code code2 = gimple_cond_code (cond2);
3283 *inverted_p = false;
3284 if (code1 == code2)
3286 else if (code1 == swap_tree_comparison (code2))
3287 std::swap (lhs2, rhs2);
3288 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
3289 *inverted_p = true;
3290 else if (code1 == invert_tree_comparison
3291 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
3293 std::swap (lhs2, rhs2);
3294 *inverted_p = true;
3296 else
3297 return false;
3299 return ((expressions_equal_p (lhs1, lhs2)
3300 && expressions_equal_p (rhs1, rhs2))
3301 || (commutative_tree_code (code1)
3302 && expressions_equal_p (lhs1, rhs2)
3303 && expressions_equal_p (rhs1, lhs2)));
3306 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3308 static int
3309 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
3311 if (vp1->hashcode != vp2->hashcode)
3312 return false;
3314 if (vp1->block != vp2->block)
3316 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
3317 return false;
3319 switch (EDGE_COUNT (vp1->block->preds))
3321 case 1:
3322 /* Single-arg PHIs are just copies. */
3323 break;
3325 case 2:
3327 /* Rule out backedges into the PHI. */
3328 if (vp1->block->loop_father->header == vp1->block
3329 || vp2->block->loop_father->header == vp2->block)
3330 return false;
3332 /* If the PHI nodes do not have compatible types
3333 they are not the same. */
3334 if (!types_compatible_p (vp1->type, vp2->type))
3335 return false;
3337 basic_block idom1
3338 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3339 basic_block idom2
3340 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
3341 /* If the immediate dominator end in switch stmts multiple
3342 values may end up in the same PHI arg via intermediate
3343 CFG merges. */
3344 if (EDGE_COUNT (idom1->succs) != 2
3345 || EDGE_COUNT (idom2->succs) != 2)
3346 return false;
3348 /* Verify the controlling stmt is the same. */
3349 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
3350 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
3351 if (! last1 || ! last2)
3352 return false;
3353 bool inverted_p;
3354 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
3355 last2, vp2->cclhs, vp2->ccrhs,
3356 &inverted_p))
3357 return false;
3359 /* Get at true/false controlled edges into the PHI. */
3360 edge te1, te2, fe1, fe2;
3361 if (! extract_true_false_controlled_edges (idom1, vp1->block,
3362 &te1, &fe1)
3363 || ! extract_true_false_controlled_edges (idom2, vp2->block,
3364 &te2, &fe2))
3365 return false;
3367 /* Swap edges if the second condition is the inverted of the
3368 first. */
3369 if (inverted_p)
3370 std::swap (te2, fe2);
3372 /* ??? Handle VN_TOP specially. */
3373 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
3374 vp2->phiargs[te2->dest_idx])
3375 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
3376 vp2->phiargs[fe2->dest_idx]))
3377 return false;
3379 return true;
3382 default:
3383 return false;
3387 /* If the PHI nodes do not have compatible types
3388 they are not the same. */
3389 if (!types_compatible_p (vp1->type, vp2->type))
3390 return false;
3392 /* Any phi in the same block will have it's arguments in the
3393 same edge order, because of how we store phi nodes. */
3394 for (unsigned i = 0; i < EDGE_COUNT (vp1->block->preds); ++i)
3396 tree phi1op = vp1->phiargs[i];
3397 tree phi2op = vp2->phiargs[i];
3398 if (phi1op == VN_TOP || phi2op == VN_TOP)
3399 continue;
3400 if (!expressions_equal_p (phi1op, phi2op))
3401 return false;
3404 return true;
3407 /* Lookup PHI in the current hash table, and return the resulting
3408 value number if it exists in the hash table. Return NULL_TREE if
3409 it does not exist in the hash table. */
3411 static tree
3412 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
3414 vn_phi_s **slot;
3415 struct vn_phi_s *vp1;
3416 edge e;
3417 edge_iterator ei;
3419 vp1 = XALLOCAVAR (struct vn_phi_s,
3420 sizeof (struct vn_phi_s)
3421 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
3423 /* Canonicalize the SSA_NAME's to their value number. */
3424 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3426 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3427 if (TREE_CODE (def) == SSA_NAME
3428 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3429 def = SSA_VAL (def);
3430 vp1->phiargs[e->dest_idx] = def;
3432 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3433 vp1->block = gimple_bb (phi);
3434 /* Extract values of the controlling condition. */
3435 vp1->cclhs = NULL_TREE;
3436 vp1->ccrhs = NULL_TREE;
3437 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3438 if (EDGE_COUNT (idom1->succs) == 2)
3439 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3441 /* ??? We want to use SSA_VAL here. But possibly not
3442 allow VN_TOP. */
3443 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3444 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3446 vp1->hashcode = vn_phi_compute_hash (vp1);
3447 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
3448 if (!slot)
3449 return NULL_TREE;
3450 return (*slot)->result;
3453 /* Insert PHI into the current hash table with a value number of
3454 RESULT. */
3456 static vn_phi_t
3457 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
3459 vn_phi_s **slot;
3460 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
3461 sizeof (vn_phi_s)
3462 + ((gimple_phi_num_args (phi) - 1)
3463 * sizeof (tree)));
3464 edge e;
3465 edge_iterator ei;
3467 /* Canonicalize the SSA_NAME's to their value number. */
3468 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
3470 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
3471 if (TREE_CODE (def) == SSA_NAME
3472 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
3473 def = SSA_VAL (def);
3474 vp1->phiargs[e->dest_idx] = def;
3476 vp1->value_id = VN_INFO (result)->value_id;
3477 vp1->type = TREE_TYPE (gimple_phi_result (phi));
3478 vp1->block = gimple_bb (phi);
3479 /* Extract values of the controlling condition. */
3480 vp1->cclhs = NULL_TREE;
3481 vp1->ccrhs = NULL_TREE;
3482 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
3483 if (EDGE_COUNT (idom1->succs) == 2)
3484 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
3486 /* ??? We want to use SSA_VAL here. But possibly not
3487 allow VN_TOP. */
3488 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
3489 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
3491 vp1->result = result;
3492 vp1->hashcode = vn_phi_compute_hash (vp1);
3494 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
3495 gcc_assert (!*slot);
3497 *slot = vp1;
3498 vp1->next = last_inserted_phi;
3499 last_inserted_phi = vp1;
3500 return vp1;
3504 /* Return true if BB1 is dominated by BB2 taking into account edges
3505 that are not executable. */
3507 static bool
3508 dominated_by_p_w_unex (basic_block bb1, basic_block bb2)
3510 edge_iterator ei;
3511 edge e;
3513 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3514 return true;
3516 /* Before iterating we'd like to know if there exists a
3517 (executable) path from bb2 to bb1 at all, if not we can
3518 directly return false. For now simply iterate once. */
3520 /* Iterate to the single executable bb1 predecessor. */
3521 if (EDGE_COUNT (bb1->preds) > 1)
3523 edge prede = NULL;
3524 FOR_EACH_EDGE (e, ei, bb1->preds)
3525 if (e->flags & EDGE_EXECUTABLE)
3527 if (prede)
3529 prede = NULL;
3530 break;
3532 prede = e;
3534 if (prede)
3536 bb1 = prede->src;
3538 /* Re-do the dominance check with changed bb1. */
3539 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3540 return true;
3544 /* Iterate to the single executable bb2 successor. */
3545 edge succe = NULL;
3546 FOR_EACH_EDGE (e, ei, bb2->succs)
3547 if (e->flags & EDGE_EXECUTABLE)
3549 if (succe)
3551 succe = NULL;
3552 break;
3554 succe = e;
3556 if (succe)
3558 /* Verify the reached block is only reached through succe.
3559 If there is only one edge we can spare us the dominator
3560 check and iterate directly. */
3561 if (EDGE_COUNT (succe->dest->preds) > 1)
3563 FOR_EACH_EDGE (e, ei, succe->dest->preds)
3564 if (e != succe
3565 && (e->flags & EDGE_EXECUTABLE))
3567 succe = NULL;
3568 break;
3571 if (succe)
3573 bb2 = succe->dest;
3575 /* Re-do the dominance check with changed bb2. */
3576 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
3577 return true;
3581 /* We could now iterate updating bb1 / bb2. */
3582 return false;
3585 /* Set the value number of FROM to TO, return true if it has changed
3586 as a result. */
3588 static inline bool
3589 set_ssa_val_to (tree from, tree to)
3591 vn_ssa_aux_t from_info = VN_INFO (from);
3592 tree currval = from_info->valnum; // SSA_VAL (from)
3593 poly_int64 toff, coff;
3595 /* The only thing we allow as value numbers are ssa_names
3596 and invariants. So assert that here. We don't allow VN_TOP
3597 as visiting a stmt should produce a value-number other than
3598 that.
3599 ??? Still VN_TOP can happen for unreachable code, so force
3600 it to varying in that case. Not all code is prepared to
3601 get VN_TOP on valueization. */
3602 if (to == VN_TOP)
3604 /* ??? When iterating and visiting PHI <undef, backedge-value>
3605 for the first time we rightfully get VN_TOP and we need to
3606 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3607 With SCCVN we were simply lucky we iterated the other PHI
3608 cycles first and thus visited the backedge-value DEF. */
3609 if (currval == VN_TOP)
3610 goto set_and_exit;
3611 if (dump_file && (dump_flags & TDF_DETAILS))
3612 fprintf (dump_file, "Forcing value number to varying on "
3613 "receiving VN_TOP\n");
3614 to = from;
3617 gcc_checking_assert (to != NULL_TREE
3618 && ((TREE_CODE (to) == SSA_NAME
3619 && (to == from || SSA_VAL (to) == to))
3620 || is_gimple_min_invariant (to)));
3622 if (from != to)
3624 if (currval == from)
3626 if (dump_file && (dump_flags & TDF_DETAILS))
3628 fprintf (dump_file, "Not changing value number of ");
3629 print_generic_expr (dump_file, from);
3630 fprintf (dump_file, " from VARYING to ");
3631 print_generic_expr (dump_file, to);
3632 fprintf (dump_file, "\n");
3634 return false;
3636 else if (currval != VN_TOP
3637 && ! is_gimple_min_invariant (currval)
3638 && ! ssa_undefined_value_p (currval, false)
3639 && is_gimple_min_invariant (to))
3641 if (dump_file && (dump_flags & TDF_DETAILS))
3643 fprintf (dump_file, "Forcing VARYING instead of changing "
3644 "value number of ");
3645 print_generic_expr (dump_file, from);
3646 fprintf (dump_file, " from ");
3647 print_generic_expr (dump_file, currval);
3648 fprintf (dump_file, " (non-constant) to ");
3649 print_generic_expr (dump_file, to);
3650 fprintf (dump_file, " (constant)\n");
3652 to = from;
3654 else if (TREE_CODE (to) == SSA_NAME
3655 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
3656 to = from;
3659 set_and_exit:
3660 if (dump_file && (dump_flags & TDF_DETAILS))
3662 fprintf (dump_file, "Setting value number of ");
3663 print_generic_expr (dump_file, from);
3664 fprintf (dump_file, " to ");
3665 print_generic_expr (dump_file, to);
3668 if (currval != to
3669 && !operand_equal_p (currval, to, 0)
3670 /* Different undefined SSA names are not actually different. See
3671 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3672 && !(TREE_CODE (currval) == SSA_NAME
3673 && TREE_CODE (to) == SSA_NAME
3674 && ssa_undefined_value_p (currval, false)
3675 && ssa_undefined_value_p (to, false))
3676 /* ??? For addresses involving volatile objects or types operand_equal_p
3677 does not reliably detect ADDR_EXPRs as equal. We know we are only
3678 getting invariant gimple addresses here, so can use
3679 get_addr_base_and_unit_offset to do this comparison. */
3680 && !(TREE_CODE (currval) == ADDR_EXPR
3681 && TREE_CODE (to) == ADDR_EXPR
3682 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
3683 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
3684 && known_eq (coff, toff)))
3686 if (dump_file && (dump_flags & TDF_DETAILS))
3687 fprintf (dump_file, " (changed)\n");
3688 from_info->valnum = to;
3689 return true;
3691 if (dump_file && (dump_flags & TDF_DETAILS))
3692 fprintf (dump_file, "\n");
3693 return false;
3696 /* Set all definitions in STMT to value number to themselves.
3697 Return true if a value number changed. */
3699 static bool
3700 defs_to_varying (gimple *stmt)
3702 bool changed = false;
3703 ssa_op_iter iter;
3704 def_operand_p defp;
3706 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
3708 tree def = DEF_FROM_PTR (defp);
3709 changed |= set_ssa_val_to (def, def);
3711 return changed;
3714 /* Visit a copy between LHS and RHS, return true if the value number
3715 changed. */
3717 static bool
3718 visit_copy (tree lhs, tree rhs)
3720 /* Valueize. */
3721 rhs = SSA_VAL (rhs);
3723 return set_ssa_val_to (lhs, rhs);
3726 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3727 is the same. */
3729 static tree
3730 valueized_wider_op (tree wide_type, tree op)
3732 if (TREE_CODE (op) == SSA_NAME)
3733 op = vn_valueize (op);
3735 /* Either we have the op widened available. */
3736 tree ops[3] = {};
3737 ops[0] = op;
3738 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
3739 wide_type, ops, NULL);
3740 if (tem)
3741 return tem;
3743 /* Or the op is truncated from some existing value. */
3744 if (TREE_CODE (op) == SSA_NAME)
3746 gimple *def = SSA_NAME_DEF_STMT (op);
3747 if (is_gimple_assign (def)
3748 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
3750 tem = gimple_assign_rhs1 (def);
3751 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
3753 if (TREE_CODE (tem) == SSA_NAME)
3754 tem = vn_valueize (tem);
3755 return tem;
3760 /* For constants simply extend it. */
3761 if (TREE_CODE (op) == INTEGER_CST)
3762 return wide_int_to_tree (wide_type, wi::to_wide (op));
3764 return NULL_TREE;
3767 /* Visit a nary operator RHS, value number it, and return true if the
3768 value number of LHS has changed as a result. */
3770 static bool
3771 visit_nary_op (tree lhs, gassign *stmt)
3773 vn_nary_op_t vnresult;
3774 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
3775 if (! result && vnresult)
3776 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
3777 if (result)
3778 return set_ssa_val_to (lhs, result);
3780 /* Do some special pattern matching for redundancies of operations
3781 in different types. */
3782 enum tree_code code = gimple_assign_rhs_code (stmt);
3783 tree type = TREE_TYPE (lhs);
3784 tree rhs1 = gimple_assign_rhs1 (stmt);
3785 switch (code)
3787 CASE_CONVERT:
3788 /* Match arithmetic done in a different type where we can easily
3789 substitute the result from some earlier sign-changed or widened
3790 operation. */
3791 if (INTEGRAL_TYPE_P (type)
3792 && TREE_CODE (rhs1) == SSA_NAME
3793 /* We only handle sign-changes or zero-extension -> & mask. */
3794 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1))
3795 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
3796 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
3798 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
3799 if (def
3800 && (gimple_assign_rhs_code (def) == PLUS_EXPR
3801 || gimple_assign_rhs_code (def) == MINUS_EXPR
3802 || gimple_assign_rhs_code (def) == MULT_EXPR))
3804 tree ops[3] = {};
3805 /* Either we have the op widened available. */
3806 ops[0] = valueized_wider_op (type,
3807 gimple_assign_rhs1 (def));
3808 if (ops[0])
3809 ops[1] = valueized_wider_op (type,
3810 gimple_assign_rhs2 (def));
3811 if (ops[0] && ops[1])
3813 ops[0] = vn_nary_op_lookup_pieces
3814 (2, gimple_assign_rhs_code (def), type, ops, NULL);
3815 /* We have wider operation available. */
3816 if (ops[0])
3818 unsigned lhs_prec = TYPE_PRECISION (type);
3819 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
3820 if (lhs_prec == rhs_prec)
3822 gimple_match_op match_op (gimple_match_cond::UNCOND,
3823 NOP_EXPR, type, ops[0]);
3824 result = vn_nary_build_or_lookup (&match_op);
3825 if (result)
3827 bool changed = set_ssa_val_to (lhs, result);
3828 vn_nary_op_insert_stmt (stmt, result);
3829 return changed;
3832 else
3834 tree mask = wide_int_to_tree
3835 (type, wi::mask (rhs_prec, false, lhs_prec));
3836 gimple_match_op match_op (gimple_match_cond::UNCOND,
3837 BIT_AND_EXPR,
3838 TREE_TYPE (lhs),
3839 ops[0], mask);
3840 result = vn_nary_build_or_lookup (&match_op);
3841 if (result)
3843 bool changed = set_ssa_val_to (lhs, result);
3844 vn_nary_op_insert_stmt (stmt, result);
3845 return changed;
3852 default:;
3855 bool changed = set_ssa_val_to (lhs, lhs);
3856 vn_nary_op_insert_stmt (stmt, lhs);
3857 return changed;
3860 /* Visit a call STMT storing into LHS. Return true if the value number
3861 of the LHS has changed as a result. */
3863 static bool
3864 visit_reference_op_call (tree lhs, gcall *stmt)
3866 bool changed = false;
3867 struct vn_reference_s vr1;
3868 vn_reference_t vnresult = NULL;
3869 tree vdef = gimple_vdef (stmt);
3871 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3872 if (lhs && TREE_CODE (lhs) != SSA_NAME)
3873 lhs = NULL_TREE;
3875 vn_reference_lookup_call (stmt, &vnresult, &vr1);
3876 if (vnresult)
3878 if (vnresult->result_vdef && vdef)
3879 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
3880 else if (vdef)
3881 /* If the call was discovered to be pure or const reflect
3882 that as far as possible. */
3883 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
3885 if (!vnresult->result && lhs)
3886 vnresult->result = lhs;
3888 if (vnresult->result && lhs)
3889 changed |= set_ssa_val_to (lhs, vnresult->result);
3891 else
3893 vn_reference_t vr2;
3894 vn_reference_s **slot;
3895 tree vdef_val = vdef;
3896 if (vdef)
3898 /* If we value numbered an indirect functions function to
3899 one not clobbering memory value number its VDEF to its
3900 VUSE. */
3901 tree fn = gimple_call_fn (stmt);
3902 if (fn && TREE_CODE (fn) == SSA_NAME)
3904 fn = SSA_VAL (fn);
3905 if (TREE_CODE (fn) == ADDR_EXPR
3906 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
3907 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
3908 & (ECF_CONST | ECF_PURE)))
3909 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
3911 changed |= set_ssa_val_to (vdef, vdef_val);
3913 if (lhs)
3914 changed |= set_ssa_val_to (lhs, lhs);
3915 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3916 vr2->vuse = vr1.vuse;
3917 /* As we are not walking the virtual operand chain we know the
3918 shared_lookup_references are still original so we can re-use
3919 them here. */
3920 vr2->operands = vr1.operands.copy ();
3921 vr2->type = vr1.type;
3922 vr2->set = vr1.set;
3923 vr2->hashcode = vr1.hashcode;
3924 vr2->result = lhs;
3925 vr2->result_vdef = vdef_val;
3926 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
3927 INSERT);
3928 gcc_assert (!*slot);
3929 *slot = vr2;
3930 vr2->next = last_inserted_ref;
3931 last_inserted_ref = vr2;
3934 return changed;
3937 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3938 and return true if the value number of the LHS has changed as a result. */
3940 static bool
3941 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
3943 bool changed = false;
3944 tree last_vuse;
3945 tree result;
3947 last_vuse = gimple_vuse (stmt);
3948 last_vuse_ptr = &last_vuse;
3949 result = vn_reference_lookup (op, gimple_vuse (stmt),
3950 default_vn_walk_kind, NULL, true);
3951 last_vuse_ptr = NULL;
3953 /* We handle type-punning through unions by value-numbering based
3954 on offset and size of the access. Be prepared to handle a
3955 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3956 if (result
3957 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
3959 /* We will be setting the value number of lhs to the value number
3960 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3961 So first simplify and lookup this expression to see if it
3962 is already available. */
3963 gimple_match_op res_op (gimple_match_cond::UNCOND,
3964 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
3965 result = vn_nary_build_or_lookup (&res_op);
3966 /* When building the conversion fails avoid inserting the reference
3967 again. */
3968 if (!result)
3969 return set_ssa_val_to (lhs, lhs);
3972 if (result)
3973 changed = set_ssa_val_to (lhs, result);
3974 else
3976 changed = set_ssa_val_to (lhs, lhs);
3977 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
3980 return changed;
3984 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3985 and return true if the value number of the LHS has changed as a result. */
3987 static bool
3988 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
3990 bool changed = false;
3991 vn_reference_t vnresult = NULL;
3992 tree assign;
3993 bool resultsame = false;
3994 tree vuse = gimple_vuse (stmt);
3995 tree vdef = gimple_vdef (stmt);
3997 if (TREE_CODE (op) == SSA_NAME)
3998 op = SSA_VAL (op);
4000 /* First we want to lookup using the *vuses* from the store and see
4001 if there the last store to this location with the same address
4002 had the same value.
4004 The vuses represent the memory state before the store. If the
4005 memory state, address, and value of the store is the same as the
4006 last store to this location, then this store will produce the
4007 same memory state as that store.
4009 In this case the vdef versions for this store are value numbered to those
4010 vuse versions, since they represent the same memory state after
4011 this store.
4013 Otherwise, the vdefs for the store are used when inserting into
4014 the table, since the store generates a new memory state. */
4016 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
4017 if (vnresult
4018 && vnresult->result)
4020 tree result = vnresult->result;
4021 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
4022 || result == SSA_VAL (result));
4023 resultsame = expressions_equal_p (result, op);
4024 if (resultsame)
4026 /* If the TBAA state isn't compatible for downstream reads
4027 we cannot value-number the VDEFs the same. */
4028 alias_set_type set = get_alias_set (lhs);
4029 if (vnresult->set != set
4030 && ! alias_set_subset_of (set, vnresult->set))
4031 resultsame = false;
4035 if (!resultsame)
4037 /* Only perform the following when being called from PRE
4038 which embeds tail merging. */
4039 if (default_vn_walk_kind == VN_WALK)
4041 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4042 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
4043 if (vnresult)
4045 VN_INFO (vdef)->visited = true;
4046 return set_ssa_val_to (vdef, vnresult->result_vdef);
4050 if (dump_file && (dump_flags & TDF_DETAILS))
4052 fprintf (dump_file, "No store match\n");
4053 fprintf (dump_file, "Value numbering store ");
4054 print_generic_expr (dump_file, lhs);
4055 fprintf (dump_file, " to ");
4056 print_generic_expr (dump_file, op);
4057 fprintf (dump_file, "\n");
4059 /* Have to set value numbers before insert, since insert is
4060 going to valueize the references in-place. */
4061 if (vdef)
4062 changed |= set_ssa_val_to (vdef, vdef);
4064 /* Do not insert structure copies into the tables. */
4065 if (is_gimple_min_invariant (op)
4066 || is_gimple_reg (op))
4067 vn_reference_insert (lhs, op, vdef, NULL);
4069 /* Only perform the following when being called from PRE
4070 which embeds tail merging. */
4071 if (default_vn_walk_kind == VN_WALK)
4073 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
4074 vn_reference_insert (assign, lhs, vuse, vdef);
4077 else
4079 /* We had a match, so value number the vdef to have the value
4080 number of the vuse it came from. */
4082 if (dump_file && (dump_flags & TDF_DETAILS))
4083 fprintf (dump_file, "Store matched earlier value, "
4084 "value numbering store vdefs to matching vuses.\n");
4086 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
4089 return changed;
4092 /* Visit and value number PHI, return true if the value number
4093 changed. When BACKEDGES_VARYING_P is true then assume all
4094 backedge values are varying. When INSERTED is not NULL then
4095 this is just a ahead query for a possible iteration, set INSERTED
4096 to true if we'd insert into the hashtable. */
4098 static bool
4099 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
4101 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
4102 tree sameval_base = NULL_TREE;
4103 poly_int64 soff, doff;
4104 unsigned n_executable = 0;
4105 bool allsame = true;
4106 edge_iterator ei;
4107 edge e;
4109 /* TODO: We could check for this in initialization, and replace this
4110 with a gcc_assert. */
4111 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
4112 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
4114 /* We track whether a PHI was CSEd to to avoid excessive iterations
4115 that would be necessary only because the PHI changed arguments
4116 but not value. */
4117 if (!inserted)
4118 gimple_set_plf (phi, GF_PLF_1, false);
4120 /* See if all non-TOP arguments have the same value. TOP is
4121 equivalent to everything, so we can ignore it. */
4122 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4123 if (e->flags & EDGE_EXECUTABLE)
4125 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4127 ++n_executable;
4128 if (TREE_CODE (def) == SSA_NAME
4129 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4130 def = SSA_VAL (def);
4131 if (def == VN_TOP)
4133 /* Ignore undefined defs for sameval but record one. */
4134 else if (TREE_CODE (def) == SSA_NAME
4135 && ! virtual_operand_p (def)
4136 && ssa_undefined_value_p (def, false))
4137 seen_undef = def;
4138 else if (sameval == VN_TOP)
4139 sameval = def;
4140 else if (!expressions_equal_p (def, sameval))
4142 /* We know we're arriving only with invariant addresses here,
4143 try harder comparing them. We can do some caching here
4144 which we cannot do in expressions_equal_p. */
4145 if (TREE_CODE (def) == ADDR_EXPR
4146 && TREE_CODE (sameval) == ADDR_EXPR
4147 && sameval_base != (void *)-1)
4149 if (!sameval_base)
4150 sameval_base = get_addr_base_and_unit_offset
4151 (TREE_OPERAND (sameval, 0), &soff);
4152 if (!sameval_base)
4153 sameval_base = (tree)(void *)-1;
4154 else if ((get_addr_base_and_unit_offset
4155 (TREE_OPERAND (def, 0), &doff) == sameval_base)
4156 && known_eq (soff, doff))
4157 continue;
4159 allsame = false;
4160 break;
4165 /* If none of the edges was executable keep the value-number at VN_TOP,
4166 if only a single edge is exectuable use its value. */
4167 if (n_executable <= 1)
4168 result = seen_undef ? seen_undef : sameval;
4169 /* If we saw only undefined values and VN_TOP use one of the
4170 undefined values. */
4171 else if (sameval == VN_TOP)
4172 result = seen_undef ? seen_undef : sameval;
4173 /* First see if it is equivalent to a phi node in this block. We prefer
4174 this as it allows IV elimination - see PRs 66502 and 67167. */
4175 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
4177 if (!inserted
4178 && TREE_CODE (result) == SSA_NAME
4179 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
4181 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
4182 if (dump_file && (dump_flags & TDF_DETAILS))
4184 fprintf (dump_file, "Marking CSEd to PHI node ");
4185 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
4186 0, TDF_SLIM);
4187 fprintf (dump_file, "\n");
4191 /* If all values are the same use that, unless we've seen undefined
4192 values as well and the value isn't constant.
4193 CCP/copyprop have the same restriction to not remove uninit warnings. */
4194 else if (allsame
4195 && (! seen_undef || is_gimple_min_invariant (sameval)))
4196 result = sameval;
4197 else
4199 result = PHI_RESULT (phi);
4200 /* Only insert PHIs that are varying, for constant value numbers
4201 we mess up equivalences otherwise as we are only comparing
4202 the immediate controlling predicates. */
4203 vn_phi_insert (phi, result, backedges_varying_p);
4204 if (inserted)
4205 *inserted = true;
4208 return set_ssa_val_to (PHI_RESULT (phi), result);
4211 /* Try to simplify RHS using equivalences and constant folding. */
4213 static tree
4214 try_to_simplify (gassign *stmt)
4216 enum tree_code code = gimple_assign_rhs_code (stmt);
4217 tree tem;
4219 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4220 in this case, there is no point in doing extra work. */
4221 if (code == SSA_NAME)
4222 return NULL_TREE;
4224 /* First try constant folding based on our current lattice. */
4225 mprts_hook = vn_lookup_simplify_result;
4226 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
4227 mprts_hook = NULL;
4228 if (tem
4229 && (TREE_CODE (tem) == SSA_NAME
4230 || is_gimple_min_invariant (tem)))
4231 return tem;
4233 return NULL_TREE;
4236 /* Visit and value number STMT, return true if the value number
4237 changed. */
4239 static bool
4240 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
4242 bool changed = false;
4244 if (dump_file && (dump_flags & TDF_DETAILS))
4246 fprintf (dump_file, "Value numbering stmt = ");
4247 print_gimple_stmt (dump_file, stmt, 0);
4250 if (gimple_code (stmt) == GIMPLE_PHI)
4251 changed = visit_phi (stmt, NULL, backedges_varying_p);
4252 else if (gimple_has_volatile_ops (stmt))
4253 changed = defs_to_varying (stmt);
4254 else if (gassign *ass = dyn_cast <gassign *> (stmt))
4256 enum tree_code code = gimple_assign_rhs_code (ass);
4257 tree lhs = gimple_assign_lhs (ass);
4258 tree rhs1 = gimple_assign_rhs1 (ass);
4259 tree simplified;
4261 /* Shortcut for copies. Simplifying copies is pointless,
4262 since we copy the expression and value they represent. */
4263 if (code == SSA_NAME
4264 && TREE_CODE (lhs) == SSA_NAME)
4266 changed = visit_copy (lhs, rhs1);
4267 goto done;
4269 simplified = try_to_simplify (ass);
4270 if (simplified)
4272 if (dump_file && (dump_flags & TDF_DETAILS))
4274 fprintf (dump_file, "RHS ");
4275 print_gimple_expr (dump_file, ass, 0);
4276 fprintf (dump_file, " simplified to ");
4277 print_generic_expr (dump_file, simplified);
4278 fprintf (dump_file, "\n");
4281 /* Setting value numbers to constants will occasionally
4282 screw up phi congruence because constants are not
4283 uniquely associated with a single ssa name that can be
4284 looked up. */
4285 if (simplified
4286 && is_gimple_min_invariant (simplified)
4287 && TREE_CODE (lhs) == SSA_NAME)
4289 changed = set_ssa_val_to (lhs, simplified);
4290 goto done;
4292 else if (simplified
4293 && TREE_CODE (simplified) == SSA_NAME
4294 && TREE_CODE (lhs) == SSA_NAME)
4296 changed = visit_copy (lhs, simplified);
4297 goto done;
4300 if ((TREE_CODE (lhs) == SSA_NAME
4301 /* We can substitute SSA_NAMEs that are live over
4302 abnormal edges with their constant value. */
4303 && !(gimple_assign_copy_p (ass)
4304 && is_gimple_min_invariant (rhs1))
4305 && !(simplified
4306 && is_gimple_min_invariant (simplified))
4307 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4308 /* Stores or copies from SSA_NAMEs that are live over
4309 abnormal edges are a problem. */
4310 || (code == SSA_NAME
4311 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
4312 changed = defs_to_varying (ass);
4313 else if (REFERENCE_CLASS_P (lhs)
4314 || DECL_P (lhs))
4315 changed = visit_reference_op_store (lhs, rhs1, ass);
4316 else if (TREE_CODE (lhs) == SSA_NAME)
4318 if ((gimple_assign_copy_p (ass)
4319 && is_gimple_min_invariant (rhs1))
4320 || (simplified
4321 && is_gimple_min_invariant (simplified)))
4323 if (simplified)
4324 changed = set_ssa_val_to (lhs, simplified);
4325 else
4326 changed = set_ssa_val_to (lhs, rhs1);
4328 else
4330 /* Visit the original statement. */
4331 switch (vn_get_stmt_kind (ass))
4333 case VN_NARY:
4334 changed = visit_nary_op (lhs, ass);
4335 break;
4336 case VN_REFERENCE:
4337 changed = visit_reference_op_load (lhs, rhs1, ass);
4338 break;
4339 default:
4340 changed = defs_to_varying (ass);
4341 break;
4345 else
4346 changed = defs_to_varying (ass);
4348 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
4350 tree lhs = gimple_call_lhs (call_stmt);
4351 if (lhs && TREE_CODE (lhs) == SSA_NAME)
4353 /* Try constant folding based on our current lattice. */
4354 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
4355 vn_valueize);
4356 if (simplified)
4358 if (dump_file && (dump_flags & TDF_DETAILS))
4360 fprintf (dump_file, "call ");
4361 print_gimple_expr (dump_file, call_stmt, 0);
4362 fprintf (dump_file, " simplified to ");
4363 print_generic_expr (dump_file, simplified);
4364 fprintf (dump_file, "\n");
4367 /* Setting value numbers to constants will occasionally
4368 screw up phi congruence because constants are not
4369 uniquely associated with a single ssa name that can be
4370 looked up. */
4371 if (simplified
4372 && is_gimple_min_invariant (simplified))
4374 changed = set_ssa_val_to (lhs, simplified);
4375 if (gimple_vdef (call_stmt))
4376 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4377 SSA_VAL (gimple_vuse (call_stmt)));
4378 goto done;
4380 else if (simplified
4381 && TREE_CODE (simplified) == SSA_NAME)
4383 changed = visit_copy (lhs, simplified);
4384 if (gimple_vdef (call_stmt))
4385 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
4386 SSA_VAL (gimple_vuse (call_stmt)));
4387 goto done;
4389 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
4391 changed = defs_to_varying (call_stmt);
4392 goto done;
4396 /* Pick up flags from a devirtualization target. */
4397 tree fn = gimple_call_fn (stmt);
4398 int extra_fnflags = 0;
4399 if (fn && TREE_CODE (fn) == SSA_NAME)
4401 fn = SSA_VAL (fn);
4402 if (TREE_CODE (fn) == ADDR_EXPR
4403 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
4404 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
4406 if (!gimple_call_internal_p (call_stmt)
4407 && (/* Calls to the same function with the same vuse
4408 and the same operands do not necessarily return the same
4409 value, unless they're pure or const. */
4410 ((gimple_call_flags (call_stmt) | extra_fnflags)
4411 & (ECF_PURE | ECF_CONST))
4412 /* If calls have a vdef, subsequent calls won't have
4413 the same incoming vuse. So, if 2 calls with vdef have the
4414 same vuse, we know they're not subsequent.
4415 We can value number 2 calls to the same function with the
4416 same vuse and the same operands which are not subsequent
4417 the same, because there is no code in the program that can
4418 compare the 2 values... */
4419 || (gimple_vdef (call_stmt)
4420 /* ... unless the call returns a pointer which does
4421 not alias with anything else. In which case the
4422 information that the values are distinct are encoded
4423 in the IL. */
4424 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
4425 /* Only perform the following when being called from PRE
4426 which embeds tail merging. */
4427 && default_vn_walk_kind == VN_WALK)))
4428 changed = visit_reference_op_call (lhs, call_stmt);
4429 else
4430 changed = defs_to_varying (call_stmt);
4432 else
4433 changed = defs_to_varying (stmt);
4434 done:
4435 return changed;
4439 /* Allocate a value number table. */
4441 static void
4442 allocate_vn_table (vn_tables_t table, unsigned size)
4444 table->phis = new vn_phi_table_type (size);
4445 table->nary = new vn_nary_op_table_type (size);
4446 table->references = new vn_reference_table_type (size);
4449 /* Free a value number table. */
4451 static void
4452 free_vn_table (vn_tables_t table)
4454 /* Walk over elements and release vectors. */
4455 vn_reference_iterator_type hir;
4456 vn_reference_t vr;
4457 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
4458 vr->operands.release ();
4459 delete table->phis;
4460 table->phis = NULL;
4461 delete table->nary;
4462 table->nary = NULL;
4463 delete table->references;
4464 table->references = NULL;
4467 /* Set *ID according to RESULT. */
4469 static void
4470 set_value_id_for_result (tree result, unsigned int *id)
4472 if (result && TREE_CODE (result) == SSA_NAME)
4473 *id = VN_INFO (result)->value_id;
4474 else if (result && is_gimple_min_invariant (result))
4475 *id = get_or_alloc_constant_value_id (result);
4476 else
4477 *id = get_next_value_id ();
4480 /* Set the value ids in the valid hash tables. */
4482 static void
4483 set_hashtable_value_ids (void)
4485 vn_nary_op_iterator_type hin;
4486 vn_phi_iterator_type hip;
4487 vn_reference_iterator_type hir;
4488 vn_nary_op_t vno;
4489 vn_reference_t vr;
4490 vn_phi_t vp;
4492 /* Now set the value ids of the things we had put in the hash
4493 table. */
4495 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
4496 if (! vno->predicated_values)
4497 set_value_id_for_result (vno->u.result, &vno->value_id);
4499 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
4500 set_value_id_for_result (vp->result, &vp->value_id);
4502 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
4503 hir)
4504 set_value_id_for_result (vr->result, &vr->value_id);
4508 /* Allocate and initialize CONST_PARAMS, a bitmap of parameter default defs
4509 we know point to readonly memory. */
4511 static void
4512 init_const_parms ()
4514 /* Collect pointers we know point to readonly memory. */
4515 const_parms = BITMAP_ALLOC (NULL);
4516 tree fnspec = lookup_attribute ("fn spec",
4517 TYPE_ATTRIBUTES (TREE_TYPE (cfun->decl)));
4518 if (fnspec)
4520 fnspec = TREE_VALUE (TREE_VALUE (fnspec));
4521 unsigned i = 1;
4522 for (tree arg = DECL_ARGUMENTS (cfun->decl);
4523 arg; arg = DECL_CHAIN (arg), ++i)
4525 if (i >= (unsigned) TREE_STRING_LENGTH (fnspec))
4526 break;
4527 if (TREE_STRING_POINTER (fnspec)[i] == 'R'
4528 || TREE_STRING_POINTER (fnspec)[i] == 'r')
4530 tree name = ssa_default_def (cfun, arg);
4531 if (name)
4532 bitmap_set_bit (const_parms, SSA_NAME_VERSION (name));
4538 /* Return the maximum value id we have ever seen. */
4540 unsigned int
4541 get_max_value_id (void)
4543 return next_value_id;
4546 /* Return the next unique value id. */
4548 unsigned int
4549 get_next_value_id (void)
4551 return next_value_id++;
4555 /* Compare two expressions E1 and E2 and return true if they are equal. */
4557 bool
4558 expressions_equal_p (tree e1, tree e2)
4560 /* The obvious case. */
4561 if (e1 == e2)
4562 return true;
4564 /* If either one is VN_TOP consider them equal. */
4565 if (e1 == VN_TOP || e2 == VN_TOP)
4566 return true;
4568 /* If only one of them is null, they cannot be equal. */
4569 if (!e1 || !e2)
4570 return false;
4572 /* Now perform the actual comparison. */
4573 if (TREE_CODE (e1) == TREE_CODE (e2)
4574 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4575 return true;
4577 return false;
4581 /* Return true if the nary operation NARY may trap. This is a copy
4582 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4584 bool
4585 vn_nary_may_trap (vn_nary_op_t nary)
4587 tree type;
4588 tree rhs2 = NULL_TREE;
4589 bool honor_nans = false;
4590 bool honor_snans = false;
4591 bool fp_operation = false;
4592 bool honor_trapv = false;
4593 bool handled, ret;
4594 unsigned i;
4596 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4597 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4598 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4600 type = nary->type;
4601 fp_operation = FLOAT_TYPE_P (type);
4602 if (fp_operation)
4604 honor_nans = flag_trapping_math && !flag_finite_math_only;
4605 honor_snans = flag_signaling_nans != 0;
4607 else if (INTEGRAL_TYPE_P (type)
4608 && TYPE_OVERFLOW_TRAPS (type))
4609 honor_trapv = true;
4611 if (nary->length >= 2)
4612 rhs2 = nary->op[1];
4613 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4614 honor_trapv,
4615 honor_nans, honor_snans, rhs2,
4616 &handled);
4617 if (handled
4618 && ret)
4619 return true;
4621 for (i = 0; i < nary->length; ++i)
4622 if (tree_could_trap_p (nary->op[i]))
4623 return true;
4625 return false;
4629 class eliminate_dom_walker : public dom_walker
4631 public:
4632 eliminate_dom_walker (cdi_direction, bitmap);
4633 ~eliminate_dom_walker ();
4635 virtual edge before_dom_children (basic_block);
4636 virtual void after_dom_children (basic_block);
4638 virtual tree eliminate_avail (basic_block, tree op);
4639 virtual void eliminate_push_avail (basic_block, tree op);
4640 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
4642 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
4644 unsigned eliminate_cleanup (bool region_p = false);
4646 bool do_pre;
4647 unsigned int el_todo;
4648 unsigned int eliminations;
4649 unsigned int insertions;
4651 /* SSA names that had their defs inserted by PRE if do_pre. */
4652 bitmap inserted_exprs;
4654 /* Blocks with statements that have had their EH properties changed. */
4655 bitmap need_eh_cleanup;
4657 /* Blocks with statements that have had their AB properties changed. */
4658 bitmap need_ab_cleanup;
4660 /* Local state for the eliminate domwalk. */
4661 auto_vec<gimple *> to_remove;
4662 auto_vec<gimple *> to_fixup;
4663 auto_vec<tree> avail;
4664 auto_vec<tree> avail_stack;
4667 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
4668 bitmap inserted_exprs_)
4669 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
4670 el_todo (0), eliminations (0), insertions (0),
4671 inserted_exprs (inserted_exprs_)
4673 need_eh_cleanup = BITMAP_ALLOC (NULL);
4674 need_ab_cleanup = BITMAP_ALLOC (NULL);
4677 eliminate_dom_walker::~eliminate_dom_walker ()
4679 BITMAP_FREE (need_eh_cleanup);
4680 BITMAP_FREE (need_ab_cleanup);
4683 /* Return a leader for OP that is available at the current point of the
4684 eliminate domwalk. */
4686 tree
4687 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
4689 tree valnum = VN_INFO (op)->valnum;
4690 if (TREE_CODE (valnum) == SSA_NAME)
4692 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
4693 return valnum;
4694 if (avail.length () > SSA_NAME_VERSION (valnum))
4695 return avail[SSA_NAME_VERSION (valnum)];
4697 else if (is_gimple_min_invariant (valnum))
4698 return valnum;
4699 return NULL_TREE;
4702 /* At the current point of the eliminate domwalk make OP available. */
4704 void
4705 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
4707 tree valnum = VN_INFO (op)->valnum;
4708 if (TREE_CODE (valnum) == SSA_NAME)
4710 if (avail.length () <= SSA_NAME_VERSION (valnum))
4711 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1);
4712 tree pushop = op;
4713 if (avail[SSA_NAME_VERSION (valnum)])
4714 pushop = avail[SSA_NAME_VERSION (valnum)];
4715 avail_stack.safe_push (pushop);
4716 avail[SSA_NAME_VERSION (valnum)] = op;
4720 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4721 the leader for the expression if insertion was successful. */
4723 tree
4724 eliminate_dom_walker::eliminate_insert (basic_block bb,
4725 gimple_stmt_iterator *gsi, tree val)
4727 /* We can insert a sequence with a single assignment only. */
4728 gimple_seq stmts = VN_INFO (val)->expr;
4729 if (!gimple_seq_singleton_p (stmts))
4730 return NULL_TREE;
4731 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
4732 if (!stmt
4733 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
4734 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
4735 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
4736 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
4737 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
4738 return NULL_TREE;
4740 tree op = gimple_assign_rhs1 (stmt);
4741 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
4742 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4743 op = TREE_OPERAND (op, 0);
4744 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
4745 if (!leader)
4746 return NULL_TREE;
4748 tree res;
4749 stmts = NULL;
4750 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
4751 res = gimple_build (&stmts, BIT_FIELD_REF,
4752 TREE_TYPE (val), leader,
4753 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
4754 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
4755 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
4756 res = gimple_build (&stmts, BIT_AND_EXPR,
4757 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
4758 else
4759 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
4760 TREE_TYPE (val), leader);
4761 if (TREE_CODE (res) != SSA_NAME
4762 || SSA_NAME_IS_DEFAULT_DEF (res)
4763 || gimple_bb (SSA_NAME_DEF_STMT (res)))
4765 gimple_seq_discard (stmts);
4767 /* During propagation we have to treat SSA info conservatively
4768 and thus we can end up simplifying the inserted expression
4769 at elimination time to sth not defined in stmts. */
4770 /* But then this is a redundancy we failed to detect. Which means
4771 res now has two values. That doesn't play well with how
4772 we track availability here, so give up. */
4773 if (dump_file && (dump_flags & TDF_DETAILS))
4775 if (TREE_CODE (res) == SSA_NAME)
4776 res = eliminate_avail (bb, res);
4777 if (res)
4779 fprintf (dump_file, "Failed to insert expression for value ");
4780 print_generic_expr (dump_file, val);
4781 fprintf (dump_file, " which is really fully redundant to ");
4782 print_generic_expr (dump_file, res);
4783 fprintf (dump_file, "\n");
4787 return NULL_TREE;
4789 else
4791 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
4792 VN_INFO (res)->valnum = val;
4793 VN_INFO (res)->visited = true;
4796 insertions++;
4797 if (dump_file && (dump_flags & TDF_DETAILS))
4799 fprintf (dump_file, "Inserted ");
4800 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
4803 return res;
4806 void
4807 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
4809 tree sprime = NULL_TREE;
4810 gimple *stmt = gsi_stmt (*gsi);
4811 tree lhs = gimple_get_lhs (stmt);
4812 if (lhs && TREE_CODE (lhs) == SSA_NAME
4813 && !gimple_has_volatile_ops (stmt)
4814 /* See PR43491. Do not replace a global register variable when
4815 it is a the RHS of an assignment. Do replace local register
4816 variables since gcc does not guarantee a local variable will
4817 be allocated in register.
4818 ??? The fix isn't effective here. This should instead
4819 be ensured by not value-numbering them the same but treating
4820 them like volatiles? */
4821 && !(gimple_assign_single_p (stmt)
4822 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
4823 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
4824 && is_global_var (gimple_assign_rhs1 (stmt)))))
4826 sprime = eliminate_avail (b, lhs);
4827 if (!sprime)
4829 /* If there is no existing usable leader but SCCVN thinks
4830 it has an expression it wants to use as replacement,
4831 insert that. */
4832 tree val = VN_INFO (lhs)->valnum;
4833 if (val != VN_TOP
4834 && TREE_CODE (val) == SSA_NAME
4835 && VN_INFO (val)->needs_insertion
4836 && VN_INFO (val)->expr != NULL
4837 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
4838 eliminate_push_avail (b, sprime);
4841 /* If this now constitutes a copy duplicate points-to
4842 and range info appropriately. This is especially
4843 important for inserted code. See tree-ssa-copy.c
4844 for similar code. */
4845 if (sprime
4846 && TREE_CODE (sprime) == SSA_NAME)
4848 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
4849 if (POINTER_TYPE_P (TREE_TYPE (lhs))
4850 && SSA_NAME_PTR_INFO (lhs)
4851 && ! SSA_NAME_PTR_INFO (sprime))
4853 duplicate_ssa_name_ptr_info (sprime,
4854 SSA_NAME_PTR_INFO (lhs));
4855 if (b != sprime_b)
4856 mark_ptr_info_alignment_unknown
4857 (SSA_NAME_PTR_INFO (sprime));
4859 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
4860 && SSA_NAME_RANGE_INFO (lhs)
4861 && ! SSA_NAME_RANGE_INFO (sprime)
4862 && b == sprime_b)
4863 duplicate_ssa_name_range_info (sprime,
4864 SSA_NAME_RANGE_TYPE (lhs),
4865 SSA_NAME_RANGE_INFO (lhs));
4868 /* Inhibit the use of an inserted PHI on a loop header when
4869 the address of the memory reference is a simple induction
4870 variable. In other cases the vectorizer won't do anything
4871 anyway (either it's loop invariant or a complicated
4872 expression). */
4873 if (sprime
4874 && TREE_CODE (sprime) == SSA_NAME
4875 && do_pre
4876 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
4877 && loop_outer (b->loop_father)
4878 && has_zero_uses (sprime)
4879 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
4880 && gimple_assign_load_p (stmt))
4882 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
4883 basic_block def_bb = gimple_bb (def_stmt);
4884 if (gimple_code (def_stmt) == GIMPLE_PHI
4885 && def_bb->loop_father->header == def_bb)
4887 loop_p loop = def_bb->loop_father;
4888 ssa_op_iter iter;
4889 tree op;
4890 bool found = false;
4891 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
4893 affine_iv iv;
4894 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
4895 if (def_bb
4896 && flow_bb_inside_loop_p (loop, def_bb)
4897 && simple_iv (loop, loop, op, &iv, true))
4899 found = true;
4900 break;
4903 if (found)
4905 if (dump_file && (dump_flags & TDF_DETAILS))
4907 fprintf (dump_file, "Not replacing ");
4908 print_gimple_expr (dump_file, stmt, 0);
4909 fprintf (dump_file, " with ");
4910 print_generic_expr (dump_file, sprime);
4911 fprintf (dump_file, " which would add a loop"
4912 " carried dependence to loop %d\n",
4913 loop->num);
4915 /* Don't keep sprime available. */
4916 sprime = NULL_TREE;
4921 if (sprime)
4923 /* If we can propagate the value computed for LHS into
4924 all uses don't bother doing anything with this stmt. */
4925 if (may_propagate_copy (lhs, sprime))
4927 /* Mark it for removal. */
4928 to_remove.safe_push (stmt);
4930 /* ??? Don't count copy/constant propagations. */
4931 if (gimple_assign_single_p (stmt)
4932 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
4933 || gimple_assign_rhs1 (stmt) == sprime))
4934 return;
4936 if (dump_file && (dump_flags & TDF_DETAILS))
4938 fprintf (dump_file, "Replaced ");
4939 print_gimple_expr (dump_file, stmt, 0);
4940 fprintf (dump_file, " with ");
4941 print_generic_expr (dump_file, sprime);
4942 fprintf (dump_file, " in all uses of ");
4943 print_gimple_stmt (dump_file, stmt, 0);
4946 eliminations++;
4947 return;
4950 /* If this is an assignment from our leader (which
4951 happens in the case the value-number is a constant)
4952 then there is nothing to do. */
4953 if (gimple_assign_single_p (stmt)
4954 && sprime == gimple_assign_rhs1 (stmt))
4955 return;
4957 /* Else replace its RHS. */
4958 bool can_make_abnormal_goto
4959 = is_gimple_call (stmt)
4960 && stmt_can_make_abnormal_goto (stmt);
4962 if (dump_file && (dump_flags & TDF_DETAILS))
4964 fprintf (dump_file, "Replaced ");
4965 print_gimple_expr (dump_file, stmt, 0);
4966 fprintf (dump_file, " with ");
4967 print_generic_expr (dump_file, sprime);
4968 fprintf (dump_file, " in ");
4969 print_gimple_stmt (dump_file, stmt, 0);
4972 eliminations++;
4973 gimple *orig_stmt = stmt;
4974 if (!useless_type_conversion_p (TREE_TYPE (lhs),
4975 TREE_TYPE (sprime)))
4976 sprime = fold_convert (TREE_TYPE (lhs), sprime);
4977 tree vdef = gimple_vdef (stmt);
4978 tree vuse = gimple_vuse (stmt);
4979 propagate_tree_value_into_stmt (gsi, sprime);
4980 stmt = gsi_stmt (*gsi);
4981 update_stmt (stmt);
4982 if (vdef != gimple_vdef (stmt))
4983 VN_INFO (vdef)->valnum = vuse;
4985 /* If we removed EH side-effects from the statement, clean
4986 its EH information. */
4987 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
4989 bitmap_set_bit (need_eh_cleanup,
4990 gimple_bb (stmt)->index);
4991 if (dump_file && (dump_flags & TDF_DETAILS))
4992 fprintf (dump_file, " Removed EH side-effects.\n");
4995 /* Likewise for AB side-effects. */
4996 if (can_make_abnormal_goto
4997 && !stmt_can_make_abnormal_goto (stmt))
4999 bitmap_set_bit (need_ab_cleanup,
5000 gimple_bb (stmt)->index);
5001 if (dump_file && (dump_flags & TDF_DETAILS))
5002 fprintf (dump_file, " Removed AB side-effects.\n");
5005 return;
5009 /* If the statement is a scalar store, see if the expression
5010 has the same value number as its rhs. If so, the store is
5011 dead. */
5012 if (gimple_assign_single_p (stmt)
5013 && !gimple_has_volatile_ops (stmt)
5014 && !is_gimple_reg (gimple_assign_lhs (stmt))
5015 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
5016 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
5018 tree val;
5019 tree rhs = gimple_assign_rhs1 (stmt);
5020 vn_reference_t vnresult;
5021 val = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_WALKREWRITE,
5022 &vnresult, false);
5023 if (TREE_CODE (rhs) == SSA_NAME)
5024 rhs = VN_INFO (rhs)->valnum;
5025 if (val
5026 && operand_equal_p (val, rhs, 0))
5028 /* We can only remove the later store if the former aliases
5029 at least all accesses the later one does or if the store
5030 was to readonly memory storing the same value. */
5031 alias_set_type set = get_alias_set (lhs);
5032 if (! vnresult
5033 || vnresult->set == set
5034 || alias_set_subset_of (set, vnresult->set))
5036 if (dump_file && (dump_flags & TDF_DETAILS))
5038 fprintf (dump_file, "Deleted redundant store ");
5039 print_gimple_stmt (dump_file, stmt, 0);
5042 /* Queue stmt for removal. */
5043 to_remove.safe_push (stmt);
5044 return;
5049 /* If this is a control statement value numbering left edges
5050 unexecuted on force the condition in a way consistent with
5051 that. */
5052 if (gcond *cond = dyn_cast <gcond *> (stmt))
5054 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
5055 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
5057 if (dump_file && (dump_flags & TDF_DETAILS))
5059 fprintf (dump_file, "Removing unexecutable edge from ");
5060 print_gimple_stmt (dump_file, stmt, 0);
5062 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
5063 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
5064 gimple_cond_make_true (cond);
5065 else
5066 gimple_cond_make_false (cond);
5067 update_stmt (cond);
5068 el_todo |= TODO_cleanup_cfg;
5069 return;
5073 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
5074 bool was_noreturn = (is_gimple_call (stmt)
5075 && gimple_call_noreturn_p (stmt));
5076 tree vdef = gimple_vdef (stmt);
5077 tree vuse = gimple_vuse (stmt);
5079 /* If we didn't replace the whole stmt (or propagate the result
5080 into all uses), replace all uses on this stmt with their
5081 leaders. */
5082 bool modified = false;
5083 use_operand_p use_p;
5084 ssa_op_iter iter;
5085 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
5087 tree use = USE_FROM_PTR (use_p);
5088 /* ??? The call code above leaves stmt operands un-updated. */
5089 if (TREE_CODE (use) != SSA_NAME)
5090 continue;
5091 tree sprime;
5092 if (SSA_NAME_IS_DEFAULT_DEF (use))
5093 /* ??? For default defs BB shouldn't matter, but we have to
5094 solve the inconsistency between rpo eliminate and
5095 dom eliminate avail valueization first. */
5096 sprime = eliminate_avail (b, use);
5097 else
5098 /* Look for sth available at the definition block of the argument.
5099 This avoids inconsistencies between availability there which
5100 decides if the stmt can be removed and availability at the
5101 use site. The SSA property ensures that things available
5102 at the definition are also available at uses. */
5103 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
5104 if (sprime && sprime != use
5105 && may_propagate_copy (use, sprime)
5106 /* We substitute into debug stmts to avoid excessive
5107 debug temporaries created by removed stmts, but we need
5108 to avoid doing so for inserted sprimes as we never want
5109 to create debug temporaries for them. */
5110 && (!inserted_exprs
5111 || TREE_CODE (sprime) != SSA_NAME
5112 || !is_gimple_debug (stmt)
5113 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
5115 propagate_value (use_p, sprime);
5116 modified = true;
5120 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5121 into which is a requirement for the IPA devirt machinery. */
5122 gimple *old_stmt = stmt;
5123 if (modified)
5125 /* If a formerly non-invariant ADDR_EXPR is turned into an
5126 invariant one it was on a separate stmt. */
5127 if (gimple_assign_single_p (stmt)
5128 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
5129 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
5130 gimple_stmt_iterator prev = *gsi;
5131 gsi_prev (&prev);
5132 if (fold_stmt (gsi))
5134 /* fold_stmt may have created new stmts inbetween
5135 the previous stmt and the folded stmt. Mark
5136 all defs created there as varying to not confuse
5137 the SCCVN machinery as we're using that even during
5138 elimination. */
5139 if (gsi_end_p (prev))
5140 prev = gsi_start_bb (b);
5141 else
5142 gsi_next (&prev);
5143 if (gsi_stmt (prev) != gsi_stmt (*gsi))
5146 tree def;
5147 ssa_op_iter dit;
5148 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
5149 dit, SSA_OP_ALL_DEFS)
5150 /* As existing DEFs may move between stmts
5151 only process new ones. */
5152 if (! has_VN_INFO (def))
5154 VN_INFO (def)->valnum = def;
5155 VN_INFO (def)->visited = true;
5157 if (gsi_stmt (prev) == gsi_stmt (*gsi))
5158 break;
5159 gsi_next (&prev);
5161 while (1);
5163 stmt = gsi_stmt (*gsi);
5164 /* In case we folded the stmt away schedule the NOP for removal. */
5165 if (gimple_nop_p (stmt))
5166 to_remove.safe_push (stmt);
5169 /* Visit indirect calls and turn them into direct calls if
5170 possible using the devirtualization machinery. Do this before
5171 checking for required EH/abnormal/noreturn cleanup as devird
5172 may expose more of those. */
5173 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5175 tree fn = gimple_call_fn (call_stmt);
5176 if (fn
5177 && flag_devirtualize
5178 && virtual_method_call_p (fn))
5180 tree otr_type = obj_type_ref_class (fn);
5181 unsigned HOST_WIDE_INT otr_tok
5182 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
5183 tree instance;
5184 ipa_polymorphic_call_context context (current_function_decl,
5185 fn, stmt, &instance);
5186 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
5187 otr_type, stmt);
5188 bool final;
5189 vec <cgraph_node *> targets
5190 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
5191 otr_tok, context, &final);
5192 if (dump_file)
5193 dump_possible_polymorphic_call_targets (dump_file,
5194 obj_type_ref_class (fn),
5195 otr_tok, context);
5196 if (final && targets.length () <= 1 && dbg_cnt (devirt))
5198 tree fn;
5199 if (targets.length () == 1)
5200 fn = targets[0]->decl;
5201 else
5202 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
5203 if (dump_enabled_p ())
5205 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
5206 "converting indirect call to "
5207 "function %s\n",
5208 lang_hooks.decl_printable_name (fn, 2));
5210 gimple_call_set_fndecl (call_stmt, fn);
5211 /* If changing the call to __builtin_unreachable
5212 or similar noreturn function, adjust gimple_call_fntype
5213 too. */
5214 if (gimple_call_noreturn_p (call_stmt)
5215 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
5216 && TYPE_ARG_TYPES (TREE_TYPE (fn))
5217 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
5218 == void_type_node))
5219 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
5220 maybe_remove_unused_call_args (cfun, call_stmt);
5221 modified = true;
5226 if (modified)
5228 /* When changing a call into a noreturn call, cfg cleanup
5229 is needed to fix up the noreturn call. */
5230 if (!was_noreturn
5231 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
5232 to_fixup.safe_push (stmt);
5233 /* When changing a condition or switch into one we know what
5234 edge will be executed, schedule a cfg cleanup. */
5235 if ((gimple_code (stmt) == GIMPLE_COND
5236 && (gimple_cond_true_p (as_a <gcond *> (stmt))
5237 || gimple_cond_false_p (as_a <gcond *> (stmt))))
5238 || (gimple_code (stmt) == GIMPLE_SWITCH
5239 && TREE_CODE (gimple_switch_index
5240 (as_a <gswitch *> (stmt))) == INTEGER_CST))
5241 el_todo |= TODO_cleanup_cfg;
5242 /* If we removed EH side-effects from the statement, clean
5243 its EH information. */
5244 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
5246 bitmap_set_bit (need_eh_cleanup,
5247 gimple_bb (stmt)->index);
5248 if (dump_file && (dump_flags & TDF_DETAILS))
5249 fprintf (dump_file, " Removed EH side-effects.\n");
5251 /* Likewise for AB side-effects. */
5252 if (can_make_abnormal_goto
5253 && !stmt_can_make_abnormal_goto (stmt))
5255 bitmap_set_bit (need_ab_cleanup,
5256 gimple_bb (stmt)->index);
5257 if (dump_file && (dump_flags & TDF_DETAILS))
5258 fprintf (dump_file, " Removed AB side-effects.\n");
5260 update_stmt (stmt);
5261 if (vdef != gimple_vdef (stmt))
5262 VN_INFO (vdef)->valnum = vuse;
5265 /* Make new values available - for fully redundant LHS we
5266 continue with the next stmt above and skip this. */
5267 def_operand_p defp;
5268 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
5269 eliminate_push_avail (b, DEF_FROM_PTR (defp));
5272 /* Perform elimination for the basic-block B during the domwalk. */
5274 edge
5275 eliminate_dom_walker::before_dom_children (basic_block b)
5277 /* Mark new bb. */
5278 avail_stack.safe_push (NULL_TREE);
5280 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5281 if (!(b->flags & BB_EXECUTABLE))
5282 return NULL;
5284 vn_context_bb = b;
5286 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
5288 gphi *phi = gsi.phi ();
5289 tree res = PHI_RESULT (phi);
5291 if (virtual_operand_p (res))
5293 gsi_next (&gsi);
5294 continue;
5297 tree sprime = eliminate_avail (b, res);
5298 if (sprime
5299 && sprime != res)
5301 if (dump_file && (dump_flags & TDF_DETAILS))
5303 fprintf (dump_file, "Replaced redundant PHI node defining ");
5304 print_generic_expr (dump_file, res);
5305 fprintf (dump_file, " with ");
5306 print_generic_expr (dump_file, sprime);
5307 fprintf (dump_file, "\n");
5310 /* If we inserted this PHI node ourself, it's not an elimination. */
5311 if (! inserted_exprs
5312 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
5313 eliminations++;
5315 /* If we will propagate into all uses don't bother to do
5316 anything. */
5317 if (may_propagate_copy (res, sprime))
5319 /* Mark the PHI for removal. */
5320 to_remove.safe_push (phi);
5321 gsi_next (&gsi);
5322 continue;
5325 remove_phi_node (&gsi, false);
5327 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
5328 sprime = fold_convert (TREE_TYPE (res), sprime);
5329 gimple *stmt = gimple_build_assign (res, sprime);
5330 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
5331 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
5332 continue;
5335 eliminate_push_avail (b, res);
5336 gsi_next (&gsi);
5339 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
5340 !gsi_end_p (gsi);
5341 gsi_next (&gsi))
5342 eliminate_stmt (b, &gsi);
5344 /* Replace destination PHI arguments. */
5345 edge_iterator ei;
5346 edge e;
5347 FOR_EACH_EDGE (e, ei, b->succs)
5348 if (e->flags & EDGE_EXECUTABLE)
5349 for (gphi_iterator gsi = gsi_start_phis (e->dest);
5350 !gsi_end_p (gsi);
5351 gsi_next (&gsi))
5353 gphi *phi = gsi.phi ();
5354 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
5355 tree arg = USE_FROM_PTR (use_p);
5356 if (TREE_CODE (arg) != SSA_NAME
5357 || virtual_operand_p (arg))
5358 continue;
5359 tree sprime = eliminate_avail (b, arg);
5360 if (sprime && may_propagate_copy (arg, sprime))
5361 propagate_value (use_p, sprime);
5364 vn_context_bb = NULL;
5366 return NULL;
5369 /* Make no longer available leaders no longer available. */
5371 void
5372 eliminate_dom_walker::after_dom_children (basic_block)
5374 tree entry;
5375 while ((entry = avail_stack.pop ()) != NULL_TREE)
5377 tree valnum = VN_INFO (entry)->valnum;
5378 tree old = avail[SSA_NAME_VERSION (valnum)];
5379 if (old == entry)
5380 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
5381 else
5382 avail[SSA_NAME_VERSION (valnum)] = entry;
5386 /* Remove queued stmts and perform delayed cleanups. */
5388 unsigned
5389 eliminate_dom_walker::eliminate_cleanup (bool region_p)
5391 statistics_counter_event (cfun, "Eliminated", eliminations);
5392 statistics_counter_event (cfun, "Insertions", insertions);
5394 /* We cannot remove stmts during BB walk, especially not release SSA
5395 names there as this confuses the VN machinery. The stmts ending
5396 up in to_remove are either stores or simple copies.
5397 Remove stmts in reverse order to make debug stmt creation possible. */
5398 while (!to_remove.is_empty ())
5400 bool do_release_defs = true;
5401 gimple *stmt = to_remove.pop ();
5403 /* When we are value-numbering a region we do not require exit PHIs to
5404 be present so we have to make sure to deal with uses outside of the
5405 region of stmts that we thought are eliminated.
5406 ??? Note we may be confused by uses in dead regions we didn't run
5407 elimination on. Rather than checking individual uses we accept
5408 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5409 contains such example). */
5410 if (region_p)
5412 if (gphi *phi = dyn_cast <gphi *> (stmt))
5414 tree lhs = gimple_phi_result (phi);
5415 if (!has_zero_uses (lhs))
5417 if (dump_file && (dump_flags & TDF_DETAILS))
5418 fprintf (dump_file, "Keeping eliminated stmt live "
5419 "as copy because of out-of-region uses\n");
5420 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5421 gimple *copy = gimple_build_assign (lhs, sprime);
5422 gimple_stmt_iterator gsi
5423 = gsi_after_labels (gimple_bb (stmt));
5424 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5425 do_release_defs = false;
5428 else if (tree lhs = gimple_get_lhs (stmt))
5429 if (TREE_CODE (lhs) == SSA_NAME
5430 && !has_zero_uses (lhs))
5432 if (dump_file && (dump_flags & TDF_DETAILS))
5433 fprintf (dump_file, "Keeping eliminated stmt live "
5434 "as copy because of out-of-region uses\n");
5435 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
5436 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5437 if (is_gimple_assign (stmt))
5439 gimple_assign_set_rhs_from_tree (&gsi, sprime);
5440 update_stmt (gsi_stmt (gsi));
5441 continue;
5443 else
5445 gimple *copy = gimple_build_assign (lhs, sprime);
5446 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
5447 do_release_defs = false;
5452 if (dump_file && (dump_flags & TDF_DETAILS))
5454 fprintf (dump_file, "Removing dead stmt ");
5455 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
5458 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
5459 if (gimple_code (stmt) == GIMPLE_PHI)
5460 remove_phi_node (&gsi, do_release_defs);
5461 else
5463 basic_block bb = gimple_bb (stmt);
5464 unlink_stmt_vdef (stmt);
5465 if (gsi_remove (&gsi, true))
5466 bitmap_set_bit (need_eh_cleanup, bb->index);
5467 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
5468 bitmap_set_bit (need_ab_cleanup, bb->index);
5469 if (do_release_defs)
5470 release_defs (stmt);
5473 /* Removing a stmt may expose a forwarder block. */
5474 el_todo |= TODO_cleanup_cfg;
5477 /* Fixup stmts that became noreturn calls. This may require splitting
5478 blocks and thus isn't possible during the dominator walk. Do this
5479 in reverse order so we don't inadvertedly remove a stmt we want to
5480 fixup by visiting a dominating now noreturn call first. */
5481 while (!to_fixup.is_empty ())
5483 gimple *stmt = to_fixup.pop ();
5485 if (dump_file && (dump_flags & TDF_DETAILS))
5487 fprintf (dump_file, "Fixing up noreturn call ");
5488 print_gimple_stmt (dump_file, stmt, 0);
5491 if (fixup_noreturn_call (stmt))
5492 el_todo |= TODO_cleanup_cfg;
5495 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
5496 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
5498 if (do_eh_cleanup)
5499 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
5501 if (do_ab_cleanup)
5502 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
5504 if (do_eh_cleanup || do_ab_cleanup)
5505 el_todo |= TODO_cleanup_cfg;
5507 return el_todo;
5510 /* Eliminate fully redundant computations. */
5512 unsigned
5513 eliminate_with_rpo_vn (bitmap inserted_exprs)
5515 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
5517 walker.walk (cfun->cfg->x_entry_block_ptr);
5518 return walker.eliminate_cleanup ();
5521 static unsigned
5522 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
5523 bool iterate, bool eliminate);
5525 void
5526 run_rpo_vn (vn_lookup_kind kind)
5528 default_vn_walk_kind = kind;
5529 do_rpo_vn (cfun, NULL, NULL, true, false);
5531 /* ??? Prune requirement of these. */
5532 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
5533 constant_value_ids = BITMAP_ALLOC (NULL);
5535 /* Initialize the value ids and prune out remaining VN_TOPs
5536 from dead code. */
5537 tree name;
5538 unsigned i;
5539 FOR_EACH_SSA_NAME (i, name, cfun)
5541 vn_ssa_aux_t info = VN_INFO (name);
5542 if (!info->visited
5543 || info->valnum == VN_TOP)
5544 info->valnum = name;
5545 if (info->valnum == name)
5546 info->value_id = get_next_value_id ();
5547 else if (is_gimple_min_invariant (info->valnum))
5548 info->value_id = get_or_alloc_constant_value_id (info->valnum);
5551 /* Propagate. */
5552 FOR_EACH_SSA_NAME (i, name, cfun)
5554 vn_ssa_aux_t info = VN_INFO (name);
5555 if (TREE_CODE (info->valnum) == SSA_NAME
5556 && info->valnum != name
5557 && info->value_id != VN_INFO (info->valnum)->value_id)
5558 info->value_id = VN_INFO (info->valnum)->value_id;
5561 set_hashtable_value_ids ();
5563 if (dump_file && (dump_flags & TDF_DETAILS))
5565 fprintf (dump_file, "Value numbers:\n");
5566 FOR_EACH_SSA_NAME (i, name, cfun)
5568 if (VN_INFO (name)->visited
5569 && SSA_VAL (name) != name)
5571 print_generic_expr (dump_file, name);
5572 fprintf (dump_file, " = ");
5573 print_generic_expr (dump_file, SSA_VAL (name));
5574 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
5580 /* Free VN associated data structures. */
5582 void
5583 free_rpo_vn (void)
5585 free_vn_table (valid_info);
5586 XDELETE (valid_info);
5587 obstack_free (&vn_tables_obstack, NULL);
5588 obstack_free (&vn_tables_insert_obstack, NULL);
5590 BITMAP_FREE (const_parms);
5592 vn_ssa_aux_iterator_type it;
5593 vn_ssa_aux_t info;
5594 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
5595 if (info->needs_insertion)
5596 release_ssa_name (info->name);
5597 obstack_free (&vn_ssa_aux_obstack, NULL);
5598 delete vn_ssa_aux_hash;
5600 delete constant_to_value_id;
5601 constant_to_value_id = NULL;
5602 BITMAP_FREE (constant_value_ids);
5605 /* Adaptor to the elimination engine using RPO availability. */
5607 class rpo_elim : public eliminate_dom_walker
5609 public:
5610 rpo_elim(basic_block entry_)
5611 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_) {}
5612 ~rpo_elim();
5614 virtual tree eliminate_avail (basic_block, tree op);
5616 virtual void eliminate_push_avail (basic_block, tree);
5618 basic_block entry;
5619 /* Instead of having a local availability lattice for each
5620 basic-block and availability at X defined as union of
5621 the local availabilities at X and its dominators we're
5622 turning this upside down and track availability per
5623 value given values are usually made available at very
5624 few points (at least one).
5625 So we have a value -> vec<location, leader> map where
5626 LOCATION is specifying the basic-block LEADER is made
5627 available for VALUE. We push to this vector in RPO
5628 order thus for iteration we can simply pop the last
5629 entries.
5630 LOCATION is the basic-block index and LEADER is its
5631 SSA name version. */
5632 /* ??? We'd like to use auto_vec here with embedded storage
5633 but that doesn't play well until we can provide move
5634 constructors and use std::move on hash-table expansion.
5635 So for now this is a bit more expensive than necessary.
5636 We eventually want to switch to a chaining scheme like
5637 for hashtable entries for unwinding which would make
5638 making the vector part of the vn_ssa_aux structure possible. */
5639 typedef hash_map<tree, vec<std::pair<int, int> > > rpo_avail_t;
5640 rpo_avail_t m_rpo_avail;
5643 /* Global RPO state for access from hooks. */
5644 static rpo_elim *rpo_avail;
5646 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5648 static tree
5649 vn_lookup_simplify_result (gimple_match_op *res_op)
5651 if (!res_op->code.is_tree_code ())
5652 return NULL_TREE;
5653 tree *ops = res_op->ops;
5654 unsigned int length = res_op->num_ops;
5655 if (res_op->code == CONSTRUCTOR
5656 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5657 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5658 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
5660 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
5661 ops = XALLOCAVEC (tree, length);
5662 for (unsigned i = 0; i < length; ++i)
5663 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
5665 vn_nary_op_t vnresult = NULL;
5666 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
5667 res_op->type, ops, &vnresult);
5668 /* If this is used from expression simplification make sure to
5669 return an available expression. */
5670 if (res && mprts_hook && rpo_avail)
5671 res = rpo_avail->eliminate_avail (vn_context_bb, res);
5672 return res;
5675 rpo_elim::~rpo_elim ()
5677 /* Release the avail vectors. */
5678 for (rpo_avail_t::iterator i = m_rpo_avail.begin ();
5679 i != m_rpo_avail.end (); ++i)
5680 (*i).second.release ();
5683 /* Return a leader for OPs value that is valid at BB. */
5685 tree
5686 rpo_elim::eliminate_avail (basic_block bb, tree op)
5688 tree valnum = SSA_VAL (op);
5689 if (TREE_CODE (valnum) == SSA_NAME)
5691 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5692 return valnum;
5693 vec<std::pair<int, int> > *av = m_rpo_avail.get (valnum);
5694 if (!av || av->is_empty ())
5695 return NULL_TREE;
5696 int i = av->length () - 1;
5697 if ((*av)[i].first == bb->index)
5698 /* On tramp3d 90% of the cases are here. */
5699 return ssa_name ((*av)[i].second);
5702 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, (*av)[i].first);
5703 /* ??? During elimination we have to use availability at the
5704 definition site of a use we try to replace. This
5705 is required to not run into inconsistencies because
5706 of dominated_by_p_w_unex behavior and removing a definition
5707 while not replacing all uses.
5708 ??? We could try to consistently walk dominators
5709 ignoring non-executable regions. The nearest common
5710 dominator of bb and abb is where we can stop walking. We
5711 may also be able to "pre-compute" (bits of) the next immediate
5712 (non-)dominator during the RPO walk when marking edges as
5713 executable. */
5714 if (dominated_by_p_w_unex (bb, abb))
5716 tree leader = ssa_name ((*av)[i].second);
5717 /* Prevent eliminations that break loop-closed SSA. */
5718 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
5719 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
5720 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5721 (leader))->loop_father,
5722 bb))
5723 return NULL_TREE;
5724 if (dump_file && (dump_flags & TDF_DETAILS))
5726 print_generic_expr (dump_file, leader);
5727 fprintf (dump_file, " is available for ");
5728 print_generic_expr (dump_file, valnum);
5729 fprintf (dump_file, "\n");
5731 /* On tramp3d 99% of the _remaining_ cases succeed at
5732 the first enty. */
5733 return leader;
5735 /* ??? Can we somehow skip to the immediate dominator
5736 RPO index (bb_to_rpo)? Again, maybe not worth, on
5737 tramp3d the worst number of elements in the vector is 9. */
5739 while (--i >= 0);
5741 else if (valnum != VN_TOP)
5742 /* valnum is is_gimple_min_invariant. */
5743 return valnum;
5744 return NULL_TREE;
5747 /* Make LEADER a leader for its value at BB. */
5749 void
5750 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
5752 tree valnum = VN_INFO (leader)->valnum;
5753 if (valnum == VN_TOP)
5754 return;
5755 if (dump_file && (dump_flags & TDF_DETAILS))
5757 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
5758 print_generic_expr (dump_file, leader);
5759 fprintf (dump_file, " for value ");
5760 print_generic_expr (dump_file, valnum);
5761 fprintf (dump_file, "\n");
5763 bool existed;
5764 vec<std::pair<int, int> > &av = m_rpo_avail.get_or_insert (valnum, &existed);
5765 if (!existed)
5767 new (&av) vec<std::pair<int, int> >;
5768 av.reserve_exact (2);
5770 av.safe_push (std::make_pair (bb->index, SSA_NAME_VERSION (leader)));
5773 /* Valueization hook for RPO VN plus required state. */
5775 tree
5776 rpo_vn_valueize (tree name)
5778 if (TREE_CODE (name) == SSA_NAME)
5780 vn_ssa_aux_t val = VN_INFO (name);
5781 /* For region-based VN this makes walk_non_aliased_vuses stop walking
5782 when we are about to look at a def outside of the region. */
5783 if (SSA_NAME_IS_VIRTUAL_OPERAND (name)
5784 && !val->visited)
5785 return NULL_TREE;
5786 if (val)
5788 tree tem = val->valnum;
5789 if (tem != VN_TOP && tem != name)
5791 if (TREE_CODE (tem) != SSA_NAME)
5792 return tem;
5793 /* For all values we only valueize to an available leader
5794 which means we can use SSA name info without restriction. */
5795 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
5796 if (tem)
5797 return tem;
5801 return name;
5804 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5805 inverted condition. */
5807 static void
5808 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
5810 switch (code)
5812 case LT_EXPR:
5813 /* a < b -> a {!,<}= b */
5814 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5815 ops, boolean_true_node, 0, pred_e);
5816 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
5817 ops, boolean_true_node, 0, pred_e);
5818 /* a < b -> ! a {>,=} b */
5819 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5820 ops, boolean_false_node, 0, pred_e);
5821 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5822 ops, boolean_false_node, 0, pred_e);
5823 break;
5824 case GT_EXPR:
5825 /* a > b -> a {!,>}= b */
5826 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
5827 ops, boolean_true_node, 0, pred_e);
5828 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
5829 ops, boolean_true_node, 0, pred_e);
5830 /* a > b -> ! a {<,=} b */
5831 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5832 ops, boolean_false_node, 0, pred_e);
5833 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
5834 ops, boolean_false_node, 0, pred_e);
5835 break;
5836 case EQ_EXPR:
5837 /* a == b -> ! a {<,>} b */
5838 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
5839 ops, boolean_false_node, 0, pred_e);
5840 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
5841 ops, boolean_false_node, 0, pred_e);
5842 break;
5843 case LE_EXPR:
5844 case GE_EXPR:
5845 case NE_EXPR:
5846 /* Nothing besides inverted condition. */
5847 break;
5848 default:;
5852 /* Main stmt worker for RPO VN, process BB. */
5854 static unsigned
5855 process_bb (rpo_elim &avail, basic_block bb,
5856 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
5857 bool do_region, bitmap exit_bbs)
5859 unsigned todo = 0;
5860 edge_iterator ei;
5861 edge e;
5863 vn_context_bb = bb;
5865 /* If we are in loop-closed SSA preserve this state. This is
5866 relevant when called on regions from outside of FRE/PRE. */
5867 bool lc_phi_nodes = false;
5868 if (loops_state_satisfies_p (LOOP_CLOSED_SSA))
5869 FOR_EACH_EDGE (e, ei, bb->preds)
5870 if (e->src->loop_father != e->dest->loop_father
5871 && flow_loop_nested_p (e->dest->loop_father,
5872 e->src->loop_father))
5874 lc_phi_nodes = true;
5875 break;
5878 /* Value-number all defs in the basic-block. */
5879 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
5880 gsi_next (&gsi))
5882 gphi *phi = gsi.phi ();
5883 tree res = PHI_RESULT (phi);
5884 vn_ssa_aux_t res_info = VN_INFO (res);
5885 if (!bb_visited)
5887 gcc_assert (!res_info->visited);
5888 res_info->valnum = VN_TOP;
5889 res_info->visited = true;
5892 /* When not iterating force backedge values to varying. */
5893 visit_stmt (phi, !iterate_phis);
5894 if (virtual_operand_p (res))
5895 continue;
5897 /* Eliminate */
5898 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5899 how we handle backedges and availability.
5900 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5901 tree val = res_info->valnum;
5902 if (res != val && !iterate && eliminate)
5904 if (tree leader = avail.eliminate_avail (bb, res))
5906 if (leader != res
5907 /* Preserve loop-closed SSA form. */
5908 && (! lc_phi_nodes
5909 || is_gimple_min_invariant (leader)))
5911 if (dump_file && (dump_flags & TDF_DETAILS))
5913 fprintf (dump_file, "Replaced redundant PHI node "
5914 "defining ");
5915 print_generic_expr (dump_file, res);
5916 fprintf (dump_file, " with ");
5917 print_generic_expr (dump_file, leader);
5918 fprintf (dump_file, "\n");
5920 avail.eliminations++;
5922 if (may_propagate_copy (res, leader))
5924 /* Schedule for removal. */
5925 avail.to_remove.safe_push (phi);
5926 continue;
5928 /* ??? Else generate a copy stmt. */
5932 /* Only make defs available that not already are. But make
5933 sure loop-closed SSA PHI node defs are picked up for
5934 downstream uses. */
5935 if (lc_phi_nodes
5936 || res == val
5937 || ! avail.eliminate_avail (bb, res))
5938 avail.eliminate_push_avail (bb, res);
5941 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5942 we do this when processing the last stmt as we have to do this
5943 before elimination which otherwise forces GIMPLE_CONDs to
5944 if (1 != 0) style when seeing non-executable edges. */
5945 if (gsi_end_p (gsi_start_bb (bb)))
5947 FOR_EACH_EDGE (e, ei, bb->succs)
5949 if (e->flags & EDGE_EXECUTABLE)
5950 continue;
5951 if (dump_file && (dump_flags & TDF_DETAILS))
5952 fprintf (dump_file,
5953 "marking outgoing edge %d -> %d executable\n",
5954 e->src->index, e->dest->index);
5955 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
5956 e->flags |= EDGE_EXECUTABLE;
5957 e->dest->flags |= BB_EXECUTABLE;
5960 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
5961 !gsi_end_p (gsi); gsi_next (&gsi))
5963 ssa_op_iter i;
5964 tree op;
5965 if (!bb_visited)
5967 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
5969 vn_ssa_aux_t op_info = VN_INFO (op);
5970 gcc_assert (!op_info->visited);
5971 op_info->valnum = VN_TOP;
5972 op_info->visited = true;
5975 /* We somehow have to deal with uses that are not defined
5976 in the processed region. Forcing unvisited uses to
5977 varying here doesn't play well with def-use following during
5978 expression simplification, so we deal with this by checking
5979 the visited flag in SSA_VAL. */
5982 visit_stmt (gsi_stmt (gsi));
5984 gimple *last = gsi_stmt (gsi);
5985 e = NULL;
5986 switch (gimple_code (last))
5988 case GIMPLE_SWITCH:
5989 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
5990 (as_a <gswitch *> (last))));
5991 break;
5992 case GIMPLE_COND:
5994 tree lhs = vn_valueize (gimple_cond_lhs (last));
5995 tree rhs = vn_valueize (gimple_cond_rhs (last));
5996 tree val = gimple_simplify (gimple_cond_code (last),
5997 boolean_type_node, lhs, rhs,
5998 NULL, vn_valueize);
5999 /* If the condition didn't simplfy see if we have recorded
6000 an expression from sofar taken edges. */
6001 if (! val || TREE_CODE (val) != INTEGER_CST)
6003 vn_nary_op_t vnresult;
6004 tree ops[2];
6005 ops[0] = lhs;
6006 ops[1] = rhs;
6007 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
6008 boolean_type_node, ops,
6009 &vnresult);
6010 /* Did we get a predicated value? */
6011 if (! val && vnresult && vnresult->predicated_values)
6013 val = vn_nary_op_get_predicated_value (vnresult, bb);
6014 if (val && dump_file && (dump_flags & TDF_DETAILS))
6016 fprintf (dump_file, "Got predicated value ");
6017 print_generic_expr (dump_file, val, TDF_NONE);
6018 fprintf (dump_file, " for ");
6019 print_gimple_stmt (dump_file, last, TDF_SLIM);
6023 if (val)
6024 e = find_taken_edge (bb, val);
6025 if (! e)
6027 /* If we didn't manage to compute the taken edge then
6028 push predicated expressions for the condition itself
6029 and related conditions to the hashtables. This allows
6030 simplification of redundant conditions which is
6031 important as early cleanup. */
6032 edge true_e, false_e;
6033 extract_true_false_edges_from_block (bb, &true_e, &false_e);
6034 enum tree_code code = gimple_cond_code (last);
6035 enum tree_code icode
6036 = invert_tree_comparison (code, HONOR_NANS (lhs));
6037 tree ops[2];
6038 ops[0] = lhs;
6039 ops[1] = rhs;
6040 if (do_region
6041 && bitmap_bit_p (exit_bbs, true_e->dest->index))
6042 true_e = NULL;
6043 if (do_region
6044 && bitmap_bit_p (exit_bbs, false_e->dest->index))
6045 false_e = NULL;
6046 if (true_e)
6047 vn_nary_op_insert_pieces_predicated
6048 (2, code, boolean_type_node, ops,
6049 boolean_true_node, 0, true_e);
6050 if (false_e)
6051 vn_nary_op_insert_pieces_predicated
6052 (2, code, boolean_type_node, ops,
6053 boolean_false_node, 0, false_e);
6054 if (icode != ERROR_MARK)
6056 if (true_e)
6057 vn_nary_op_insert_pieces_predicated
6058 (2, icode, boolean_type_node, ops,
6059 boolean_false_node, 0, true_e);
6060 if (false_e)
6061 vn_nary_op_insert_pieces_predicated
6062 (2, icode, boolean_type_node, ops,
6063 boolean_true_node, 0, false_e);
6065 /* Relax for non-integers, inverted condition handled
6066 above. */
6067 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
6069 if (true_e)
6070 insert_related_predicates_on_edge (code, ops, true_e);
6071 if (false_e)
6072 insert_related_predicates_on_edge (icode, ops, false_e);
6075 break;
6077 case GIMPLE_GOTO:
6078 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
6079 break;
6080 default:
6081 e = NULL;
6083 if (e)
6085 todo = TODO_cleanup_cfg;
6086 if (!(e->flags & EDGE_EXECUTABLE))
6088 if (dump_file && (dump_flags & TDF_DETAILS))
6089 fprintf (dump_file,
6090 "marking known outgoing %sedge %d -> %d executable\n",
6091 e->flags & EDGE_DFS_BACK ? "back-" : "",
6092 e->src->index, e->dest->index);
6093 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6094 e->flags |= EDGE_EXECUTABLE;
6095 e->dest->flags |= BB_EXECUTABLE;
6098 else if (gsi_one_before_end_p (gsi))
6100 FOR_EACH_EDGE (e, ei, bb->succs)
6102 if (e->flags & EDGE_EXECUTABLE)
6103 continue;
6104 if (dump_file && (dump_flags & TDF_DETAILS))
6105 fprintf (dump_file,
6106 "marking outgoing edge %d -> %d executable\n",
6107 e->src->index, e->dest->index);
6108 gcc_checking_assert (iterate || !(e->flags & EDGE_DFS_BACK));
6109 e->flags |= EDGE_EXECUTABLE;
6110 e->dest->flags |= BB_EXECUTABLE;
6114 /* Eliminate. That also pushes to avail. */
6115 if (eliminate && ! iterate)
6116 avail.eliminate_stmt (bb, &gsi);
6117 else
6118 /* If not eliminating, make all not already available defs
6119 available. */
6120 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
6121 if (! avail.eliminate_avail (bb, op))
6122 avail.eliminate_push_avail (bb, op);
6125 /* Eliminate in destination PHI arguments. Always substitute in dest
6126 PHIs, even for non-executable edges. This handles region
6127 exits PHIs. */
6128 if (!iterate && eliminate)
6129 FOR_EACH_EDGE (e, ei, bb->succs)
6130 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6131 !gsi_end_p (gsi); gsi_next (&gsi))
6133 gphi *phi = gsi.phi ();
6134 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6135 tree arg = USE_FROM_PTR (use_p);
6136 if (TREE_CODE (arg) != SSA_NAME
6137 || virtual_operand_p (arg))
6138 continue;
6139 tree sprime;
6140 if (SSA_NAME_IS_DEFAULT_DEF (arg))
6142 sprime = SSA_VAL (arg);
6143 gcc_assert (TREE_CODE (sprime) != SSA_NAME
6144 || SSA_NAME_IS_DEFAULT_DEF (sprime));
6146 else
6147 /* Look for sth available at the definition block of the argument.
6148 This avoids inconsistencies between availability there which
6149 decides if the stmt can be removed and availability at the
6150 use site. The SSA property ensures that things available
6151 at the definition are also available at uses. */
6152 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
6153 arg);
6154 if (sprime
6155 && sprime != arg
6156 && may_propagate_copy (arg, sprime))
6157 propagate_value (use_p, sprime);
6160 vn_context_bb = NULL;
6161 return todo;
6164 /* Unwind state per basic-block. */
6166 struct unwind_state
6168 /* Times this block has been visited. */
6169 unsigned visited;
6170 /* Whether to handle this as iteration point or whether to treat
6171 incoming backedge PHI values as varying. */
6172 bool iterate;
6173 void *ob_top;
6174 vn_reference_t ref_top;
6175 vn_phi_t phi_top;
6176 vn_nary_op_t nary_top;
6179 /* Unwind the RPO VN state for iteration. */
6181 static void
6182 do_unwind (unwind_state *to, int rpo_idx, rpo_elim &avail, int *bb_to_rpo)
6184 gcc_assert (to->iterate);
6185 for (; last_inserted_nary != to->nary_top;
6186 last_inserted_nary = last_inserted_nary->next)
6188 vn_nary_op_t *slot;
6189 slot = valid_info->nary->find_slot_with_hash
6190 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
6191 /* Predication causes the need to restore previous state. */
6192 if ((*slot)->unwind_to)
6193 *slot = (*slot)->unwind_to;
6194 else
6195 valid_info->nary->clear_slot (slot);
6197 for (; last_inserted_phi != to->phi_top;
6198 last_inserted_phi = last_inserted_phi->next)
6200 vn_phi_t *slot;
6201 slot = valid_info->phis->find_slot_with_hash
6202 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
6203 valid_info->phis->clear_slot (slot);
6205 for (; last_inserted_ref != to->ref_top;
6206 last_inserted_ref = last_inserted_ref->next)
6208 vn_reference_t *slot;
6209 slot = valid_info->references->find_slot_with_hash
6210 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
6211 (*slot)->operands.release ();
6212 valid_info->references->clear_slot (slot);
6214 obstack_free (&vn_tables_obstack, to->ob_top);
6216 /* Prune [rpo_idx, ] from avail. */
6217 /* ??? This is O(number-of-values-in-region) which is
6218 O(region-size) rather than O(iteration-piece). */
6219 for (rpo_elim::rpo_avail_t::iterator i
6220 = avail.m_rpo_avail.begin ();
6221 i != avail.m_rpo_avail.end (); ++i)
6223 while (! (*i).second.is_empty ())
6225 if (bb_to_rpo[(*i).second.last ().first] < rpo_idx)
6226 break;
6227 (*i).second.pop ();
6232 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6233 If ITERATE is true then treat backedges optimistically as not
6234 executed and iterate. If ELIMINATE is true then perform
6235 elimination, otherwise leave that to the caller. */
6237 static unsigned
6238 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6239 bool iterate, bool eliminate)
6241 unsigned todo = 0;
6243 /* We currently do not support region-based iteration when
6244 elimination is requested. */
6245 gcc_assert (!entry || !iterate || !eliminate);
6246 /* When iterating we need loop info up-to-date. */
6247 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
6249 bool do_region = entry != NULL;
6250 if (!do_region)
6252 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
6253 exit_bbs = BITMAP_ALLOC (NULL);
6254 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
6257 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
6258 int n = rev_post_order_and_mark_dfs_back_seme (fn, entry, exit_bbs,
6259 iterate, rpo);
6260 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6261 for (int i = 0; i < n / 2; ++i)
6262 std::swap (rpo[i], rpo[n-i-1]);
6264 if (!do_region)
6265 BITMAP_FREE (exit_bbs);
6267 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
6268 for (int i = 0; i < n; ++i)
6269 bb_to_rpo[rpo[i]] = i;
6271 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
6273 rpo_elim avail (entry->dest);
6274 rpo_avail = &avail;
6276 /* Verify we have no extra entries into the region. */
6277 if (flag_checking && do_region)
6279 auto_bb_flag bb_in_region (fn);
6280 for (int i = 0; i < n; ++i)
6282 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6283 bb->flags |= bb_in_region;
6285 /* We can't merge the first two loops because we cannot rely
6286 on EDGE_DFS_BACK for edges not within the region. But if
6287 we decide to always have the bb_in_region flag we can
6288 do the checking during the RPO walk itself (but then it's
6289 also easy to handle MEME conservatively). */
6290 for (int i = 0; i < n; ++i)
6292 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6293 edge e;
6294 edge_iterator ei;
6295 FOR_EACH_EDGE (e, ei, bb->preds)
6296 gcc_assert (e == entry || (e->src->flags & bb_in_region));
6298 for (int i = 0; i < n; ++i)
6300 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6301 bb->flags &= ~bb_in_region;
6305 /* Create the VN state. For the initial size of the various hashtables
6306 use a heuristic based on region size and number of SSA names. */
6307 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
6308 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
6309 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
6310 init_const_parms ();
6312 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
6313 gcc_obstack_init (&vn_ssa_aux_obstack);
6315 gcc_obstack_init (&vn_tables_obstack);
6316 gcc_obstack_init (&vn_tables_insert_obstack);
6317 valid_info = XCNEW (struct vn_tables_s);
6318 allocate_vn_table (valid_info, region_size);
6319 last_inserted_ref = NULL;
6320 last_inserted_phi = NULL;
6321 last_inserted_nary = NULL;
6323 vn_valueize = rpo_vn_valueize;
6325 /* Initialize the unwind state and edge/BB executable state. */
6326 for (int i = 0; i < n; ++i)
6328 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6329 rpo_state[i].visited = 0;
6330 bool has_backedges = false;
6331 edge e;
6332 edge_iterator ei;
6333 FOR_EACH_EDGE (e, ei, bb->preds)
6335 if (e->flags & EDGE_DFS_BACK)
6336 has_backedges = true;
6337 if (! iterate && (e->flags & EDGE_DFS_BACK))
6338 e->flags |= EDGE_EXECUTABLE;
6339 else
6340 e->flags &= ~EDGE_EXECUTABLE;
6342 rpo_state[i].iterate = iterate && has_backedges;
6343 bb->flags &= ~BB_EXECUTABLE;
6345 entry->flags |= EDGE_EXECUTABLE;
6346 entry->dest->flags |= BB_EXECUTABLE;
6348 /* As heuristic to improve compile-time we handle only the N innermost
6349 loops and the outermost one optimistically. */
6350 if (iterate)
6352 loop_p loop;
6353 unsigned max_depth = PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH);
6354 FOR_EACH_LOOP (loop, LI_ONLY_INNERMOST)
6355 if (loop_depth (loop) > max_depth)
6356 for (unsigned i = 2;
6357 i < loop_depth (loop) - max_depth; ++i)
6359 basic_block header = superloop_at_depth (loop, i)->header;
6360 rpo_state[bb_to_rpo[header->index]].iterate = false;
6361 edge e;
6362 edge_iterator ei;
6363 FOR_EACH_EDGE (e, ei, header->preds)
6364 if (e->flags & EDGE_DFS_BACK)
6365 e->flags |= EDGE_EXECUTABLE;
6369 /* Go and process all blocks, iterating as necessary. */
6370 int idx = 0;
6371 uint64_t nblk = 0;
6374 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
6376 /* If the block has incoming backedges remember unwind state. This
6377 is required even for non-executable blocks since in irreducible
6378 regions we might reach them via the backedge and re-start iterating
6379 from there.
6380 Note we can individually mark blocks with incoming backedges to
6381 not iterate where we then handle PHIs conservatively. We do that
6382 heuristically to reduce compile-time for degenerate cases. */
6383 if (rpo_state[idx].iterate)
6385 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
6386 rpo_state[idx].ref_top = last_inserted_ref;
6387 rpo_state[idx].phi_top = last_inserted_phi;
6388 rpo_state[idx].nary_top = last_inserted_nary;
6391 if (!(bb->flags & BB_EXECUTABLE))
6393 if (dump_file && (dump_flags & TDF_DETAILS))
6394 fprintf (dump_file, "Block %d: BB%d found not executable\n",
6395 idx, bb->index);
6396 idx++;
6397 continue;
6400 if (dump_file && (dump_flags & TDF_DETAILS))
6401 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
6402 nblk++;
6403 todo |= process_bb (avail, bb,
6404 rpo_state[idx].visited != 0,
6405 rpo_state[idx].iterate,
6406 iterate, eliminate, do_region, exit_bbs);
6407 rpo_state[idx].visited++;
6409 if (iterate)
6411 /* Verify if changed values flow over executable outgoing backedges
6412 and those change destination PHI values (that's the thing we
6413 can easily verify). Reduce over all such edges to the farthest
6414 away PHI. */
6415 int iterate_to = -1;
6416 edge_iterator ei;
6417 edge e;
6418 FOR_EACH_EDGE (e, ei, bb->succs)
6419 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
6420 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
6421 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
6423 if (dump_file && (dump_flags & TDF_DETAILS))
6424 fprintf (dump_file, "Looking for changed values of backedge "
6425 "%d->%d destination PHIs\n",
6426 e->src->index, e->dest->index);
6427 vn_context_bb = e->dest;
6428 gphi_iterator gsi;
6429 for (gsi = gsi_start_phis (e->dest);
6430 !gsi_end_p (gsi); gsi_next (&gsi))
6432 bool inserted = false;
6433 /* While we'd ideally just iterate on value changes
6434 we CSE PHIs and do that even across basic-block
6435 boundaries. So even hashtable state changes can
6436 be important (which is roughly equivalent to
6437 PHI argument value changes). To not excessively
6438 iterate because of that we track whether a PHI
6439 was CSEd to with GF_PLF_1. */
6440 bool phival_changed;
6441 if ((phival_changed = visit_phi (gsi.phi (),
6442 &inserted, false))
6443 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
6445 if (!phival_changed
6446 && dump_file && (dump_flags & TDF_DETAILS))
6447 fprintf (dump_file, "PHI was CSEd and hashtable "
6448 "state (changed)\n");
6449 int destidx = bb_to_rpo[e->dest->index];
6450 if (iterate_to == -1
6451 || destidx < iterate_to)
6452 iterate_to = destidx;
6453 break;
6456 vn_context_bb = NULL;
6458 if (iterate_to != -1)
6460 do_unwind (&rpo_state[iterate_to], iterate_to,
6461 avail, bb_to_rpo);
6462 idx = iterate_to;
6463 if (dump_file && (dump_flags & TDF_DETAILS))
6464 fprintf (dump_file, "Iterating to %d BB%d\n",
6465 iterate_to, rpo[iterate_to]);
6466 continue;
6470 idx++;
6472 while (idx < n);
6474 /* If statistics or dump file active. */
6475 int nex = 0;
6476 unsigned max_visited = 1;
6477 for (int i = 0; i < n; ++i)
6479 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
6480 if (bb->flags & BB_EXECUTABLE)
6481 nex++;
6482 statistics_histogram_event (cfun, "RPO block visited times",
6483 rpo_state[i].visited);
6484 if (rpo_state[i].visited > max_visited)
6485 max_visited = rpo_state[i].visited;
6487 unsigned nvalues = 0, navail = 0;
6488 for (rpo_elim::rpo_avail_t::iterator i = avail.m_rpo_avail.begin ();
6489 i != avail.m_rpo_avail.end (); ++i)
6491 nvalues++;
6492 navail += (*i).second.length ();
6494 statistics_counter_event (cfun, "RPO blocks", n);
6495 statistics_counter_event (cfun, "RPO blocks visited", nblk);
6496 statistics_counter_event (cfun, "RPO blocks executable", nex);
6497 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
6498 statistics_histogram_event (cfun, "RPO num values", nvalues);
6499 statistics_histogram_event (cfun, "RPO num avail", navail);
6500 statistics_histogram_event (cfun, "RPO num lattice",
6501 vn_ssa_aux_hash->elements ());
6502 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
6504 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
6505 " blocks in total discovering %d executable blocks iterating "
6506 "%d.%d times, a block was visited max. %u times\n",
6507 n, nblk, nex,
6508 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
6509 max_visited);
6510 fprintf (dump_file, "RPO tracked %d values available at %d locations "
6511 "and %" PRIu64 " lattice elements\n",
6512 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
6515 if (eliminate)
6517 /* When !iterate we already performed elimination during the RPO
6518 walk. */
6519 if (iterate)
6521 /* Elimination for region-based VN needs to be done within the
6522 RPO walk. */
6523 gcc_assert (! do_region);
6524 /* Note we can't use avail.walk here because that gets confused
6525 by the existing availability and it will be less efficient
6526 as well. */
6527 todo |= eliminate_with_rpo_vn (NULL);
6529 else
6530 todo |= avail.eliminate_cleanup (do_region);
6533 vn_valueize = NULL;
6534 rpo_avail = NULL;
6536 XDELETEVEC (bb_to_rpo);
6537 XDELETEVEC (rpo);
6539 return todo;
6542 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6543 on the SEME region specified by ENTRY and EXIT_BBS. */
6545 unsigned
6546 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
6548 default_vn_walk_kind = VN_WALKREWRITE;
6549 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
6550 free_rpo_vn ();
6551 return todo;
6555 namespace {
6557 const pass_data pass_data_fre =
6559 GIMPLE_PASS, /* type */
6560 "fre", /* name */
6561 OPTGROUP_NONE, /* optinfo_flags */
6562 TV_TREE_FRE, /* tv_id */
6563 ( PROP_cfg | PROP_ssa ), /* properties_required */
6564 0, /* properties_provided */
6565 0, /* properties_destroyed */
6566 0, /* todo_flags_start */
6567 0, /* todo_flags_finish */
6570 class pass_fre : public gimple_opt_pass
6572 public:
6573 pass_fre (gcc::context *ctxt)
6574 : gimple_opt_pass (pass_data_fre, ctxt)
6577 /* opt_pass methods: */
6578 opt_pass * clone () { return new pass_fre (m_ctxt); }
6579 virtual bool gate (function *) { return flag_tree_fre != 0; }
6580 virtual unsigned int execute (function *);
6582 }; // class pass_fre
6584 unsigned int
6585 pass_fre::execute (function *fun)
6587 unsigned todo = 0;
6589 /* At -O[1g] use the cheap non-iterating mode. */
6590 calculate_dominance_info (CDI_DOMINATORS);
6591 if (optimize > 1)
6592 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
6594 default_vn_walk_kind = VN_WALKREWRITE;
6595 todo = do_rpo_vn (fun, NULL, NULL, optimize > 1, true);
6596 free_rpo_vn ();
6598 if (optimize > 1)
6599 loop_optimizer_finalize ();
6601 return todo;
6604 } // anon namespace
6606 gimple_opt_pass *
6607 make_pass_fre (gcc::context *ctxt)
6609 return new pass_fre (ctxt);
6612 #undef BB_EXECUTABLE