1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree
*last_vuse_ptr
;
134 static vn_lookup_kind vn_walk_kind
;
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
313 /* This represents the top of the VN lattice, which is the universal
318 /* Unique counter for our value ids. */
320 static unsigned int next_value_id
;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
329 typedef vn_ssa_aux_t value_type
;
330 typedef tree compare_type
;
331 static inline hashval_t
hash (const value_type
&);
332 static inline bool equal (const value_type
&, const compare_type
&);
333 static inline void mark_deleted (value_type
&) {}
334 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
335 static inline bool is_deleted (value_type
&) { return false; }
336 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
340 vn_ssa_aux_hasher::hash (const value_type
&entry
)
342 return SSA_NAME_VERSION (entry
->name
);
346 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
348 return name
== entry
->name
;
351 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
352 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
353 static struct obstack vn_ssa_aux_obstack
;
355 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
356 static unsigned int vn_nary_length_from_stmt (gimple
*);
357 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
358 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
359 vn_nary_op_table_type
*, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
362 enum tree_code
, tree
, tree
*);
363 static tree
vn_lookup_simplify_result (gimple_match_op
*);
365 /* Return whether there is value numbering information for a given SSA name. */
368 has_VN_INFO (tree name
)
370 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
377 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
382 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
383 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
384 newinfo
->name
= name
;
385 newinfo
->valnum
= VN_TOP
;
386 /* We are using the visited flag to handle uses with defs not within the
387 region being value-numbered. */
388 newinfo
->visited
= false;
390 /* Given we create the VN_INFOs on-demand now we have to do initialization
391 different than VN_TOP here. */
392 if (SSA_NAME_IS_DEFAULT_DEF (name
))
393 switch (TREE_CODE (SSA_NAME_VAR (name
)))
396 /* All undefined vars are VARYING. */
397 newinfo
->valnum
= name
;
398 newinfo
->visited
= true;
402 /* Parameters are VARYING but we can record a condition
403 if we know it is a non-NULL pointer. */
404 newinfo
->visited
= true;
405 newinfo
->valnum
= name
;
406 if (POINTER_TYPE_P (TREE_TYPE (name
))
407 && nonnull_arg_p (SSA_NAME_VAR (name
)))
411 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
413 /* Allocate from non-unwinding stack. */
414 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
415 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
416 boolean_type_node
, ops
);
417 nary
->predicated_values
= 0;
418 nary
->u
.result
= boolean_true_node
;
419 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
420 gcc_assert (nary
->unwind_to
== NULL
);
421 /* Also do not link it into the undo chain. */
422 last_inserted_nary
= nary
->next
;
423 nary
->next
= (vn_nary_op_t
)(void *)-1;
424 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
425 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
426 boolean_type_node
, ops
);
427 nary
->predicated_values
= 0;
428 nary
->u
.result
= boolean_false_node
;
429 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
430 gcc_assert (nary
->unwind_to
== NULL
);
431 last_inserted_nary
= nary
->next
;
432 nary
->next
= (vn_nary_op_t
)(void *)-1;
433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 fprintf (dump_file
, "Recording ");
436 print_generic_expr (dump_file
, name
, TDF_SLIM
);
437 fprintf (dump_file
, " != 0\n");
443 /* If the result is passed by invisible reference the default
444 def is initialized, otherwise it's uninitialized. Still
445 undefined is varying. */
446 newinfo
->visited
= true;
447 newinfo
->valnum
= name
;
456 /* Return the SSA value of X. */
461 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
462 return tem
&& tem
->visited
? tem
->valnum
: x
;
465 /* Return the SSA value of the VUSE x, supporting released VDEFs
466 during elimination which will value-number the VDEF to the
467 associated VUSE (but not substitute in the whole lattice). */
470 vuse_ssa_val (tree x
)
477 if (SSA_NAME_IS_DEFAULT_DEF (x
))
480 = vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
481 /* For region-based VN this makes walk_non_aliased_vuses stop walking
482 when we are about to look at a def outside of the region. */
483 if (!tem
|| !tem
->visited
)
485 gcc_assert (tem
->valnum
!= VN_TOP
);
488 while (SSA_NAME_IN_FREE_LIST (x
));
494 /* Return the vn_kind the expression computed by the stmt should be
498 vn_get_stmt_kind (gimple
*stmt
)
500 switch (gimple_code (stmt
))
508 enum tree_code code
= gimple_assign_rhs_code (stmt
);
509 tree rhs1
= gimple_assign_rhs1 (stmt
);
510 switch (get_gimple_rhs_class (code
))
512 case GIMPLE_UNARY_RHS
:
513 case GIMPLE_BINARY_RHS
:
514 case GIMPLE_TERNARY_RHS
:
516 case GIMPLE_SINGLE_RHS
:
517 switch (TREE_CODE_CLASS (code
))
520 /* VOP-less references can go through unary case. */
521 if ((code
== REALPART_EXPR
522 || code
== IMAGPART_EXPR
523 || code
== VIEW_CONVERT_EXPR
524 || code
== BIT_FIELD_REF
)
525 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
529 case tcc_declaration
:
536 if (code
== ADDR_EXPR
)
537 return (is_gimple_min_invariant (rhs1
)
538 ? VN_CONSTANT
: VN_REFERENCE
);
539 else if (code
== CONSTRUCTOR
)
552 /* Lookup a value id for CONSTANT and return it. If it does not
556 get_constant_value_id (tree constant
)
558 vn_constant_s
**slot
;
559 struct vn_constant_s vc
;
561 vc
.hashcode
= vn_hash_constant_with_type (constant
);
562 vc
.constant
= constant
;
563 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
565 return (*slot
)->value_id
;
569 /* Lookup a value id for CONSTANT, and if it does not exist, create a
570 new one and return it. If it does exist, return it. */
573 get_or_alloc_constant_value_id (tree constant
)
575 vn_constant_s
**slot
;
576 struct vn_constant_s vc
;
579 /* If the hashtable isn't initialized we're not running from PRE and thus
580 do not need value-ids. */
581 if (!constant_to_value_id
)
584 vc
.hashcode
= vn_hash_constant_with_type (constant
);
585 vc
.constant
= constant
;
586 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
588 return (*slot
)->value_id
;
590 vcp
= XNEW (struct vn_constant_s
);
591 vcp
->hashcode
= vc
.hashcode
;
592 vcp
->constant
= constant
;
593 vcp
->value_id
= get_next_value_id ();
595 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
596 return vcp
->value_id
;
599 /* Return true if V is a value id for a constant. */
602 value_id_constant_p (unsigned int v
)
604 return bitmap_bit_p (constant_value_ids
, v
);
607 /* Compute the hash for a reference operand VRO1. */
610 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
612 hstate
.add_int (vro1
->opcode
);
614 inchash::add_expr (vro1
->op0
, hstate
);
616 inchash::add_expr (vro1
->op1
, hstate
);
618 inchash::add_expr (vro1
->op2
, hstate
);
621 /* Compute a hash for the reference operation VR1 and return it. */
624 vn_reference_compute_hash (const vn_reference_t vr1
)
626 inchash::hash hstate
;
629 vn_reference_op_t vro
;
633 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
635 if (vro
->opcode
== MEM_REF
)
637 else if (vro
->opcode
!= ADDR_EXPR
)
639 if (maybe_ne (vro
->off
, -1))
641 if (known_eq (off
, -1))
647 if (maybe_ne (off
, -1)
648 && maybe_ne (off
, 0))
649 hstate
.add_poly_int (off
);
652 && vro
->opcode
== ADDR_EXPR
)
656 tree op
= TREE_OPERAND (vro
->op0
, 0);
657 hstate
.add_int (TREE_CODE (op
));
658 inchash::add_expr (op
, hstate
);
662 vn_reference_op_compute_hash (vro
, hstate
);
665 result
= hstate
.end ();
666 /* ??? We would ICE later if we hash instead of adding that in. */
668 result
+= SSA_NAME_VERSION (vr1
->vuse
);
673 /* Return true if reference operations VR1 and VR2 are equivalent. This
674 means they have the same set of operands and vuses. */
677 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
681 /* Early out if this is not a hash collision. */
682 if (vr1
->hashcode
!= vr2
->hashcode
)
685 /* The VOP needs to be the same. */
686 if (vr1
->vuse
!= vr2
->vuse
)
689 /* If the operands are the same we are done. */
690 if (vr1
->operands
== vr2
->operands
)
693 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
696 if (INTEGRAL_TYPE_P (vr1
->type
)
697 && INTEGRAL_TYPE_P (vr2
->type
))
699 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
702 else if (INTEGRAL_TYPE_P (vr1
->type
)
703 && (TYPE_PRECISION (vr1
->type
)
704 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
706 else if (INTEGRAL_TYPE_P (vr2
->type
)
707 && (TYPE_PRECISION (vr2
->type
)
708 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
715 poly_int64 off1
= 0, off2
= 0;
716 vn_reference_op_t vro1
, vro2
;
717 vn_reference_op_s tem1
, tem2
;
718 bool deref1
= false, deref2
= false;
719 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
721 if (vro1
->opcode
== MEM_REF
)
723 /* Do not look through a storage order barrier. */
724 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
726 if (known_eq (vro1
->off
, -1))
730 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
732 if (vro2
->opcode
== MEM_REF
)
734 /* Do not look through a storage order barrier. */
735 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
737 if (known_eq (vro2
->off
, -1))
741 if (maybe_ne (off1
, off2
))
743 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
745 memset (&tem1
, 0, sizeof (tem1
));
746 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
747 tem1
.type
= TREE_TYPE (tem1
.op0
);
748 tem1
.opcode
= TREE_CODE (tem1
.op0
);
752 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
754 memset (&tem2
, 0, sizeof (tem2
));
755 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
756 tem2
.type
= TREE_TYPE (tem2
.op0
);
757 tem2
.opcode
= TREE_CODE (tem2
.op0
);
761 if (deref1
!= deref2
)
763 if (!vn_reference_op_eq (vro1
, vro2
))
768 while (vr1
->operands
.length () != i
769 || vr2
->operands
.length () != j
);
774 /* Copy the operations present in load/store REF into RESULT, a vector of
775 vn_reference_op_s's. */
778 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
780 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
782 vn_reference_op_s temp
;
786 memset (&temp
, 0, sizeof (temp
));
787 temp
.type
= TREE_TYPE (ref
);
788 temp
.opcode
= TREE_CODE (ref
);
789 temp
.op0
= TMR_INDEX (ref
);
790 temp
.op1
= TMR_STEP (ref
);
791 temp
.op2
= TMR_OFFSET (ref
);
793 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
794 temp
.base
= MR_DEPENDENCE_BASE (ref
);
795 result
->quick_push (temp
);
797 memset (&temp
, 0, sizeof (temp
));
798 temp
.type
= NULL_TREE
;
799 temp
.opcode
= ERROR_MARK
;
800 temp
.op0
= TMR_INDEX2 (ref
);
802 result
->quick_push (temp
);
804 memset (&temp
, 0, sizeof (temp
));
805 temp
.type
= NULL_TREE
;
806 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
807 temp
.op0
= TMR_BASE (ref
);
809 result
->quick_push (temp
);
813 /* For non-calls, store the information that makes up the address. */
817 vn_reference_op_s temp
;
819 memset (&temp
, 0, sizeof (temp
));
820 temp
.type
= TREE_TYPE (ref
);
821 temp
.opcode
= TREE_CODE (ref
);
827 temp
.op0
= TREE_OPERAND (ref
, 1);
830 temp
.op0
= TREE_OPERAND (ref
, 1);
834 /* The base address gets its own vn_reference_op_s structure. */
835 temp
.op0
= TREE_OPERAND (ref
, 1);
836 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
838 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
839 temp
.base
= MR_DEPENDENCE_BASE (ref
);
840 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
843 /* Record bits, position and storage order. */
844 temp
.op0
= TREE_OPERAND (ref
, 1);
845 temp
.op1
= TREE_OPERAND (ref
, 2);
846 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
848 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
851 /* The field decl is enough to unambiguously specify the field,
852 a matching type is not necessary and a mismatching type
853 is always a spurious difference. */
854 temp
.type
= NULL_TREE
;
855 temp
.op0
= TREE_OPERAND (ref
, 1);
856 temp
.op1
= TREE_OPERAND (ref
, 2);
858 tree this_offset
= component_ref_field_offset (ref
);
860 && poly_int_tree_p (this_offset
))
862 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
863 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
866 = (wi::to_poly_offset (this_offset
)
867 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
868 /* Probibit value-numbering zero offset components
869 of addresses the same before the pass folding
870 __builtin_object_size had a chance to run
871 (checking cfun->after_inlining does the
873 if (TREE_CODE (orig
) != ADDR_EXPR
875 || cfun
->after_inlining
)
876 off
.to_shwi (&temp
.off
);
881 case ARRAY_RANGE_REF
:
884 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
885 /* Record index as operand. */
886 temp
.op0
= TREE_OPERAND (ref
, 1);
887 /* Always record lower bounds and element size. */
888 temp
.op1
= array_ref_low_bound (ref
);
889 /* But record element size in units of the type alignment. */
890 temp
.op2
= TREE_OPERAND (ref
, 3);
891 temp
.align
= eltype
->type_common
.align
;
893 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
894 size_int (TYPE_ALIGN_UNIT (eltype
)));
895 if (poly_int_tree_p (temp
.op0
)
896 && poly_int_tree_p (temp
.op1
)
897 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
899 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
900 - wi::to_poly_offset (temp
.op1
))
901 * wi::to_offset (temp
.op2
)
902 * vn_ref_op_align_unit (&temp
));
903 off
.to_shwi (&temp
.off
);
908 if (DECL_HARD_REGISTER (ref
))
917 /* Canonicalize decls to MEM[&decl] which is what we end up with
918 when valueizing MEM[ptr] with ptr = &decl. */
919 temp
.opcode
= MEM_REF
;
920 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
922 result
->safe_push (temp
);
923 temp
.opcode
= ADDR_EXPR
;
924 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
925 temp
.type
= TREE_TYPE (temp
.op0
);
939 if (is_gimple_min_invariant (ref
))
945 /* These are only interesting for their operands, their
946 existence, and their type. They will never be the last
947 ref in the chain of references (IE they require an
948 operand), so we don't have to put anything
949 for op* as it will be handled by the iteration */
953 case VIEW_CONVERT_EXPR
:
955 temp
.reverse
= storage_order_barrier_p (ref
);
958 /* This is only interesting for its constant offset. */
959 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
964 result
->safe_push (temp
);
966 if (REFERENCE_CLASS_P (ref
)
967 || TREE_CODE (ref
) == MODIFY_EXPR
968 || TREE_CODE (ref
) == WITH_SIZE_EXPR
969 || (TREE_CODE (ref
) == ADDR_EXPR
970 && !is_gimple_min_invariant (ref
)))
971 ref
= TREE_OPERAND (ref
, 0);
977 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
978 operands in *OPS, the reference alias set SET and the reference type TYPE.
979 Return true if something useful was produced. */
982 ao_ref_init_from_vn_reference (ao_ref
*ref
,
983 alias_set_type set
, tree type
,
984 vec
<vn_reference_op_s
> ops
)
986 vn_reference_op_t op
;
988 tree base
= NULL_TREE
;
990 poly_offset_int offset
= 0;
991 poly_offset_int max_size
;
992 poly_offset_int size
= -1;
993 tree size_tree
= NULL_TREE
;
994 alias_set_type base_alias_set
= -1;
996 /* First get the final access size from just the outermost expression. */
998 if (op
->opcode
== COMPONENT_REF
)
999 size_tree
= DECL_SIZE (op
->op0
);
1000 else if (op
->opcode
== BIT_FIELD_REF
)
1001 size_tree
= op
->op0
;
1004 machine_mode mode
= TYPE_MODE (type
);
1005 if (mode
== BLKmode
)
1006 size_tree
= TYPE_SIZE (type
);
1008 size
= GET_MODE_BITSIZE (mode
);
1010 if (size_tree
!= NULL_TREE
1011 && poly_int_tree_p (size_tree
))
1012 size
= wi::to_poly_offset (size_tree
);
1014 /* Initially, maxsize is the same as the accessed element size.
1015 In the following it will only grow (or become -1). */
1018 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1019 and find the ultimate containing object. */
1020 FOR_EACH_VEC_ELT (ops
, i
, op
)
1024 /* These may be in the reference ops, but we cannot do anything
1025 sensible with them here. */
1027 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1028 if (base
!= NULL_TREE
1029 && TREE_CODE (base
) == MEM_REF
1031 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1033 vn_reference_op_t pop
= &ops
[i
-1];
1034 base
= TREE_OPERAND (op
->op0
, 0);
1035 if (known_eq (pop
->off
, -1))
1041 offset
+= pop
->off
* BITS_PER_UNIT
;
1049 /* Record the base objects. */
1051 base_alias_set
= get_deref_alias_set (op
->op0
);
1052 *op0_p
= build2 (MEM_REF
, op
->type
,
1053 NULL_TREE
, op
->op0
);
1054 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1055 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1056 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1067 /* And now the usual component-reference style ops. */
1069 offset
+= wi::to_poly_offset (op
->op1
);
1074 tree field
= op
->op0
;
1075 /* We do not have a complete COMPONENT_REF tree here so we
1076 cannot use component_ref_field_offset. Do the interesting
1078 tree this_offset
= DECL_FIELD_OFFSET (field
);
1080 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1084 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1085 << LOG2_BITS_PER_UNIT
);
1086 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1092 case ARRAY_RANGE_REF
:
1094 /* We recorded the lower bound and the element size. */
1095 if (!poly_int_tree_p (op
->op0
)
1096 || !poly_int_tree_p (op
->op1
)
1097 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1101 poly_offset_int woffset
1102 = wi::sext (wi::to_poly_offset (op
->op0
)
1103 - wi::to_poly_offset (op
->op1
),
1104 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1105 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1106 woffset
<<= LOG2_BITS_PER_UNIT
;
1118 case VIEW_CONVERT_EXPR
:
1135 if (base
== NULL_TREE
)
1138 ref
->ref
= NULL_TREE
;
1140 ref
->ref_alias_set
= set
;
1141 if (base_alias_set
!= -1)
1142 ref
->base_alias_set
= base_alias_set
;
1144 ref
->base_alias_set
= get_alias_set (base
);
1145 /* We discount volatiles from value-numbering elsewhere. */
1146 ref
->volatile_p
= false;
1148 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1156 if (!offset
.to_shwi (&ref
->offset
))
1163 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1169 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1170 vn_reference_op_s's. */
1173 copy_reference_ops_from_call (gcall
*call
,
1174 vec
<vn_reference_op_s
> *result
)
1176 vn_reference_op_s temp
;
1178 tree lhs
= gimple_call_lhs (call
);
1181 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1182 different. By adding the lhs here in the vector, we ensure that the
1183 hashcode is different, guaranteeing a different value number. */
1184 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1186 memset (&temp
, 0, sizeof (temp
));
1187 temp
.opcode
= MODIFY_EXPR
;
1188 temp
.type
= TREE_TYPE (lhs
);
1191 result
->safe_push (temp
);
1194 /* Copy the type, opcode, function, static chain and EH region, if any. */
1195 memset (&temp
, 0, sizeof (temp
));
1196 temp
.type
= gimple_call_return_type (call
);
1197 temp
.opcode
= CALL_EXPR
;
1198 temp
.op0
= gimple_call_fn (call
);
1199 temp
.op1
= gimple_call_chain (call
);
1200 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1201 temp
.op2
= size_int (lr
);
1203 result
->safe_push (temp
);
1205 /* Copy the call arguments. As they can be references as well,
1206 just chain them together. */
1207 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1209 tree callarg
= gimple_call_arg (call
, i
);
1210 copy_reference_ops_from_ref (callarg
, result
);
1214 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1215 *I_P to point to the last element of the replacement. */
1217 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1220 unsigned int i
= *i_p
;
1221 vn_reference_op_t op
= &(*ops
)[i
];
1222 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1224 poly_int64 addr_offset
= 0;
1226 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1227 from .foo.bar to the preceding MEM_REF offset and replace the
1228 address with &OBJ. */
1229 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1231 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1232 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1235 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1238 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1239 op
->op0
= build_fold_addr_expr (addr_base
);
1240 if (tree_fits_shwi_p (mem_op
->op0
))
1241 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1249 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1250 *I_P to point to the last element of the replacement. */
1252 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1255 unsigned int i
= *i_p
;
1256 vn_reference_op_t op
= &(*ops
)[i
];
1257 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1259 enum tree_code code
;
1260 poly_offset_int off
;
1262 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1263 if (!is_gimple_assign (def_stmt
))
1266 code
= gimple_assign_rhs_code (def_stmt
);
1267 if (code
!= ADDR_EXPR
1268 && code
!= POINTER_PLUS_EXPR
)
1271 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1273 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1274 from .foo.bar to the preceding MEM_REF offset and replace the
1275 address with &OBJ. */
1276 if (code
== ADDR_EXPR
)
1278 tree addr
, addr_base
;
1279 poly_int64 addr_offset
;
1281 addr
= gimple_assign_rhs1 (def_stmt
);
1282 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1284 /* If that didn't work because the address isn't invariant propagate
1285 the reference tree from the address operation in case the current
1286 dereference isn't offsetted. */
1288 && *i_p
== ops
->length () - 1
1289 && known_eq (off
, 0)
1290 /* This makes us disable this transform for PRE where the
1291 reference ops might be also used for code insertion which
1293 && default_vn_walk_kind
== VN_WALKREWRITE
)
1295 auto_vec
<vn_reference_op_s
, 32> tem
;
1296 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1297 /* Make sure to preserve TBAA info. The only objects not
1298 wrapped in MEM_REFs that can have their address taken are
1300 if (tem
.length () >= 2
1301 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1303 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1305 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1306 wi::to_poly_wide (new_mem_op
->op0
));
1309 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1312 ops
->safe_splice (tem
);
1317 || TREE_CODE (addr_base
) != MEM_REF
1318 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1319 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
, 0))))
1323 off
+= mem_ref_offset (addr_base
);
1324 op
->op0
= TREE_OPERAND (addr_base
, 0);
1329 ptr
= gimple_assign_rhs1 (def_stmt
);
1330 ptroff
= gimple_assign_rhs2 (def_stmt
);
1331 if (TREE_CODE (ptr
) != SSA_NAME
1332 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1333 /* Make sure to not endlessly recurse.
1334 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1335 happen when we value-number a PHI to its backedge value. */
1336 || SSA_VAL (ptr
) == op
->op0
1337 || !poly_int_tree_p (ptroff
))
1340 off
+= wi::to_poly_offset (ptroff
);
1344 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1345 if (tree_fits_shwi_p (mem_op
->op0
))
1346 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1349 /* ??? Can end up with endless recursion here!?
1350 gcc.c-torture/execute/strcmp-1.c */
1351 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1352 op
->op0
= SSA_VAL (op
->op0
);
1353 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1354 op
->opcode
= TREE_CODE (op
->op0
);
1357 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1358 vn_reference_maybe_forwprop_address (ops
, i_p
);
1359 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1360 vn_reference_fold_indirect (ops
, i_p
);
1364 /* Optimize the reference REF to a constant if possible or return
1365 NULL_TREE if not. */
1368 fully_constant_vn_reference_p (vn_reference_t ref
)
1370 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1371 vn_reference_op_t op
;
1373 /* Try to simplify the translated expression if it is
1374 a call to a builtin function with at most two arguments. */
1376 if (op
->opcode
== CALL_EXPR
1377 && TREE_CODE (op
->op0
) == ADDR_EXPR
1378 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1379 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1380 && operands
.length () >= 2
1381 && operands
.length () <= 3)
1383 vn_reference_op_t arg0
, arg1
= NULL
;
1384 bool anyconst
= false;
1385 arg0
= &operands
[1];
1386 if (operands
.length () > 2)
1387 arg1
= &operands
[2];
1388 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1389 || (arg0
->opcode
== ADDR_EXPR
1390 && is_gimple_min_invariant (arg0
->op0
)))
1393 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1394 || (arg1
->opcode
== ADDR_EXPR
1395 && is_gimple_min_invariant (arg1
->op0
))))
1399 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1402 arg1
? arg1
->op0
: NULL
);
1404 && TREE_CODE (folded
) == NOP_EXPR
)
1405 folded
= TREE_OPERAND (folded
, 0);
1407 && is_gimple_min_invariant (folded
))
1412 /* Simplify reads from constants or constant initializers. */
1413 else if (BITS_PER_UNIT
== 8
1414 && COMPLETE_TYPE_P (ref
->type
)
1415 && is_gimple_reg_type (ref
->type
))
1419 if (INTEGRAL_TYPE_P (ref
->type
))
1420 size
= TYPE_PRECISION (ref
->type
);
1421 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1422 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1425 if (size
% BITS_PER_UNIT
!= 0
1426 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1428 size
/= BITS_PER_UNIT
;
1430 for (i
= 0; i
< operands
.length (); ++i
)
1432 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1437 if (known_eq (operands
[i
].off
, -1))
1439 off
+= operands
[i
].off
;
1440 if (operands
[i
].opcode
== MEM_REF
)
1446 vn_reference_op_t base
= &operands
[--i
];
1447 tree ctor
= error_mark_node
;
1448 tree decl
= NULL_TREE
;
1449 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1451 else if (base
->opcode
== MEM_REF
1452 && base
[1].opcode
== ADDR_EXPR
1453 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1454 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1455 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1457 decl
= TREE_OPERAND (base
[1].op0
, 0);
1458 if (TREE_CODE (decl
) == STRING_CST
)
1461 ctor
= ctor_for_folding (decl
);
1463 if (ctor
== NULL_TREE
)
1464 return build_zero_cst (ref
->type
);
1465 else if (ctor
!= error_mark_node
)
1467 HOST_WIDE_INT const_off
;
1470 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1471 off
* BITS_PER_UNIT
,
1472 size
* BITS_PER_UNIT
, decl
);
1475 STRIP_USELESS_TYPE_CONVERSION (res
);
1476 if (is_gimple_min_invariant (res
))
1480 else if (off
.is_constant (&const_off
))
1482 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1483 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1485 return native_interpret_expr (ref
->type
, buf
, len
);
1493 /* Return true if OPS contain a storage order barrier. */
1496 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1498 vn_reference_op_t op
;
1501 FOR_EACH_VEC_ELT (ops
, i
, op
)
1502 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1508 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1509 structures into their value numbers. This is done in-place, and
1510 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1511 whether any operands were valueized. */
1513 static vec
<vn_reference_op_s
>
1514 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1515 bool with_avail
= false)
1517 vn_reference_op_t vro
;
1520 *valueized_anything
= false;
1522 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1524 if (vro
->opcode
== SSA_NAME
1525 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1527 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1528 if (tem
!= vro
->op0
)
1530 *valueized_anything
= true;
1533 /* If it transforms from an SSA_NAME to a constant, update
1535 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1536 vro
->opcode
= TREE_CODE (vro
->op0
);
1538 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1540 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1541 if (tem
!= vro
->op1
)
1543 *valueized_anything
= true;
1547 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1549 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1550 if (tem
!= vro
->op2
)
1552 *valueized_anything
= true;
1556 /* If it transforms from an SSA_NAME to an address, fold with
1557 a preceding indirect reference. */
1560 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1561 && orig
[i
- 1].opcode
== MEM_REF
)
1563 if (vn_reference_fold_indirect (&orig
, &i
))
1564 *valueized_anything
= true;
1567 && vro
->opcode
== SSA_NAME
1568 && orig
[i
- 1].opcode
== MEM_REF
)
1570 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1571 *valueized_anything
= true;
1573 /* If it transforms a non-constant ARRAY_REF into a constant
1574 one, adjust the constant offset. */
1575 else if (vro
->opcode
== ARRAY_REF
1576 && known_eq (vro
->off
, -1)
1577 && poly_int_tree_p (vro
->op0
)
1578 && poly_int_tree_p (vro
->op1
)
1579 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1581 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1582 - wi::to_poly_offset (vro
->op1
))
1583 * wi::to_offset (vro
->op2
)
1584 * vn_ref_op_align_unit (vro
));
1585 off
.to_shwi (&vro
->off
);
1592 static vec
<vn_reference_op_s
>
1593 valueize_refs (vec
<vn_reference_op_s
> orig
)
1596 return valueize_refs_1 (orig
, &tem
);
1599 static vec
<vn_reference_op_s
> shared_lookup_references
;
1601 /* Create a vector of vn_reference_op_s structures from REF, a
1602 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1603 this function. *VALUEIZED_ANYTHING will specify whether any
1604 operands were valueized. */
1606 static vec
<vn_reference_op_s
>
1607 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1611 shared_lookup_references
.truncate (0);
1612 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1613 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1614 valueized_anything
);
1615 return shared_lookup_references
;
1618 /* Create a vector of vn_reference_op_s structures from CALL, a
1619 call statement. The vector is shared among all callers of
1622 static vec
<vn_reference_op_s
>
1623 valueize_shared_reference_ops_from_call (gcall
*call
)
1627 shared_lookup_references
.truncate (0);
1628 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1629 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1630 return shared_lookup_references
;
1633 /* Lookup a SCCVN reference operation VR in the current hash table.
1634 Returns the resulting value number if it exists in the hash table,
1635 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1636 vn_reference_t stored in the hashtable if something is found. */
1639 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1641 vn_reference_s
**slot
;
1644 hash
= vr
->hashcode
;
1645 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1649 *vnresult
= (vn_reference_t
)*slot
;
1650 return ((vn_reference_t
)*slot
)->result
;
1656 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1657 with the current VUSE and performs the expression lookup. */
1660 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1661 unsigned int cnt
, void *vr_
)
1663 vn_reference_t vr
= (vn_reference_t
)vr_
;
1664 vn_reference_s
**slot
;
1667 /* This bounds the stmt walks we perform on reference lookups
1668 to O(1) instead of O(N) where N is the number of dominating
1670 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1674 *last_vuse_ptr
= vuse
;
1676 /* Fixup vuse and hash. */
1678 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1679 vr
->vuse
= vuse_ssa_val (vuse
);
1681 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1683 hash
= vr
->hashcode
;
1684 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1691 /* Lookup an existing or insert a new vn_reference entry into the
1692 value table for the VUSE, SET, TYPE, OPERANDS reference which
1693 has the value VALUE which is either a constant or an SSA name. */
1695 static vn_reference_t
1696 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1699 vec
<vn_reference_op_s
,
1704 vn_reference_t result
;
1706 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1707 vr1
.operands
= operands
;
1710 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1711 if (vn_reference_lookup_1 (&vr1
, &result
))
1713 if (TREE_CODE (value
) == SSA_NAME
)
1714 value_id
= VN_INFO (value
)->value_id
;
1716 value_id
= get_or_alloc_constant_value_id (value
);
1717 return vn_reference_insert_pieces (vuse
, set
, type
,
1718 operands
.copy (), value
, value_id
);
1721 /* Return a value-number for RCODE OPS... either by looking up an existing
1722 value-number for the simplified result or by inserting the operation if
1726 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
1728 tree result
= NULL_TREE
;
1729 /* We will be creating a value number for
1731 So first simplify and lookup this expression to see if it
1732 is already available. */
1733 mprts_hook
= vn_lookup_simplify_result
;
1735 switch (TREE_CODE_LENGTH ((tree_code
) res_op
->code
))
1738 res
= gimple_resimplify1 (NULL
, res_op
, vn_valueize
);
1741 res
= gimple_resimplify2 (NULL
, res_op
, vn_valueize
);
1744 res
= gimple_resimplify3 (NULL
, res_op
, vn_valueize
);
1748 gimple
*new_stmt
= NULL
;
1750 && gimple_simplified_result_is_gimple_val (res_op
))
1751 /* The expression is already available. */
1752 result
= res_op
->ops
[0];
1755 tree val
= vn_lookup_simplify_result (res_op
);
1758 gimple_seq stmts
= NULL
;
1759 result
= maybe_push_res_to_seq (res_op
, &stmts
);
1762 gcc_assert (gimple_seq_singleton_p (stmts
));
1763 new_stmt
= gimple_seq_first_stmt (stmts
);
1767 /* The expression is already available. */
1772 /* The expression is not yet available, value-number lhs to
1773 the new SSA_NAME we created. */
1774 /* Initialize value-number information properly. */
1775 VN_INFO (result
)->valnum
= result
;
1776 VN_INFO (result
)->value_id
= get_next_value_id ();
1777 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1779 VN_INFO (result
)->needs_insertion
= true;
1780 /* ??? PRE phi-translation inserts NARYs without corresponding
1781 SSA name result. Re-use those but set their result according
1782 to the stmt we just built. */
1783 vn_nary_op_t nary
= NULL
;
1784 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1787 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
1788 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
1790 /* As all "inserted" statements are singleton SCCs, insert
1791 to the valid table. This is strictly needed to
1792 avoid re-generating new value SSA_NAMEs for the same
1793 expression during SCC iteration over and over (the
1794 optimistic table gets cleared after each iteration).
1795 We do not need to insert into the optimistic table, as
1796 lookups there will fall back to the valid table. */
1799 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
1801 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
1802 vno1
->value_id
= VN_INFO (result
)->value_id
;
1803 vno1
->length
= length
;
1804 vno1
->predicated_values
= 0;
1805 vno1
->u
.result
= result
;
1806 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
1807 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
1808 /* Also do not link it into the undo chain. */
1809 last_inserted_nary
= vno1
->next
;
1810 vno1
->next
= (vn_nary_op_t
)(void *)-1;
1812 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1814 fprintf (dump_file
, "Inserting name ");
1815 print_generic_expr (dump_file
, result
);
1816 fprintf (dump_file
, " for expression ");
1817 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1818 fprintf (dump_file
, "\n");
1824 /* Return a value-number for RCODE OPS... either by looking up an existing
1825 value-number for the simplified result or by inserting the operation. */
1828 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
1830 return vn_nary_build_or_lookup_1 (res_op
, true);
1833 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1834 its value if present. */
1837 vn_nary_simplify (vn_nary_op_t nary
)
1839 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
1841 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
1842 nary
->type
, nary
->length
);
1843 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
1844 return vn_nary_build_or_lookup_1 (&op
, false);
1847 basic_block vn_context_bb
;
1849 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1850 from the statement defining VUSE and if not successful tries to
1851 translate *REFP and VR_ through an aggregate copy at the definition
1852 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1853 of *REF and *VR. If only disambiguation was performed then
1854 *DISAMBIGUATE_ONLY is set to true. */
1857 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1858 bool *disambiguate_only
)
1860 vn_reference_t vr
= (vn_reference_t
)vr_
;
1861 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1862 tree base
= ao_ref_base (ref
);
1863 HOST_WIDE_INT offseti
, maxsizei
;
1864 static vec
<vn_reference_op_s
> lhs_ops
;
1866 bool lhs_ref_ok
= false;
1867 poly_int64 copy_size
;
1869 /* First try to disambiguate after value-replacing in the definitions LHS. */
1870 if (is_gimple_assign (def_stmt
))
1872 tree lhs
= gimple_assign_lhs (def_stmt
);
1873 bool valueized_anything
= false;
1874 /* Avoid re-allocation overhead. */
1875 lhs_ops
.truncate (0);
1876 basic_block saved_rpo_bb
= vn_context_bb
;
1877 vn_context_bb
= gimple_bb (def_stmt
);
1878 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1879 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
1880 vn_context_bb
= saved_rpo_bb
;
1881 if (valueized_anything
)
1883 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1884 get_alias_set (lhs
),
1885 TREE_TYPE (lhs
), lhs_ops
);
1887 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1889 *disambiguate_only
= true;
1895 ao_ref_init (&lhs_ref
, lhs
);
1899 /* If we reach a clobbering statement try to skip it and see if
1900 we find a VN result with exactly the same value as the
1901 possible clobber. In this case we can ignore the clobber
1902 and return the found value.
1903 Note that we don't need to worry about partial overlapping
1904 accesses as we then can use TBAA to disambiguate against the
1905 clobbering statement when looking up a load (thus the
1906 VN_WALKREWRITE guard). */
1907 if (vn_walk_kind
== VN_WALKREWRITE
1908 && is_gimple_reg_type (TREE_TYPE (lhs
))
1909 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
))
1911 tree
*saved_last_vuse_ptr
= last_vuse_ptr
;
1912 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1913 last_vuse_ptr
= NULL
;
1914 tree saved_vuse
= vr
->vuse
;
1915 hashval_t saved_hashcode
= vr
->hashcode
;
1916 void *res
= vn_reference_lookup_2 (ref
,
1917 gimple_vuse (def_stmt
), 0, vr
);
1918 /* Need to restore vr->vuse and vr->hashcode. */
1919 vr
->vuse
= saved_vuse
;
1920 vr
->hashcode
= saved_hashcode
;
1921 last_vuse_ptr
= saved_last_vuse_ptr
;
1922 if (res
&& res
!= (void *)-1)
1924 vn_reference_t vnresult
= (vn_reference_t
) res
;
1925 if (vnresult
->result
1926 && operand_equal_p (vnresult
->result
,
1927 gimple_assign_rhs1 (def_stmt
), 0))
1932 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1933 && gimple_call_num_args (def_stmt
) <= 4)
1935 /* For builtin calls valueize its arguments and call the
1936 alias oracle again. Valueization may improve points-to
1937 info of pointers and constify size and position arguments.
1938 Originally this was motivated by PR61034 which has
1939 conditional calls to free falsely clobbering ref because
1940 of imprecise points-to info of the argument. */
1942 bool valueized_anything
= false;
1943 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1945 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1946 tree val
= vn_valueize (oldargs
[i
]);
1947 if (val
!= oldargs
[i
])
1949 gimple_call_set_arg (def_stmt
, i
, val
);
1950 valueized_anything
= true;
1953 if (valueized_anything
)
1955 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1957 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1958 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1961 *disambiguate_only
= true;
1967 if (*disambiguate_only
)
1970 /* If we cannot constrain the size of the reference we cannot
1971 test if anything kills it. */
1972 if (!ref
->max_size_known_p ())
1975 poly_int64 offset
= ref
->offset
;
1976 poly_int64 maxsize
= ref
->max_size
;
1978 /* We can't deduce anything useful from clobbers. */
1979 if (gimple_clobber_p (def_stmt
))
1982 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1983 from that definition.
1985 if (is_gimple_reg_type (vr
->type
)
1986 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
1987 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
1988 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
1989 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
1990 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
1991 && offset
.is_constant (&offseti
)
1992 && offseti
% BITS_PER_UNIT
== 0))
1993 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
1994 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
1995 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
1998 poly_int64 offset2
, size2
, maxsize2
;
2000 tree ref2
= gimple_call_arg (def_stmt
, 0);
2001 if (TREE_CODE (ref2
) == SSA_NAME
)
2003 ref2
= SSA_VAL (ref2
);
2004 if (TREE_CODE (ref2
) == SSA_NAME
2005 && (TREE_CODE (base
) != MEM_REF
2006 || TREE_OPERAND (base
, 0) != ref2
))
2008 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2009 if (gimple_assign_single_p (def_stmt
)
2010 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2011 ref2
= gimple_assign_rhs1 (def_stmt
);
2014 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2016 ref2
= TREE_OPERAND (ref2
, 0);
2017 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2019 if (!known_size_p (maxsize2
)
2020 || !known_eq (maxsize2
, size2
)
2021 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2024 else if (TREE_CODE (ref2
) == SSA_NAME
)
2027 if (TREE_CODE (base
) != MEM_REF
2028 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2032 if (TREE_OPERAND (base
, 0) != ref2
)
2034 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2035 if (is_gimple_assign (def
)
2036 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2037 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2038 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2039 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2040 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2042 ref2
= gimple_assign_rhs1 (def
);
2043 if (TREE_CODE (ref2
) == SSA_NAME
)
2044 ref2
= SSA_VAL (ref2
);
2052 tree len
= gimple_call_arg (def_stmt
, 2);
2053 if (known_subrange_p (offset
, maxsize
, offset2
,
2054 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2057 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2058 val
= build_zero_cst (vr
->type
);
2059 else if (INTEGRAL_TYPE_P (vr
->type
)
2060 && known_eq (ref
->size
, 8))
2062 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2063 vr
->type
, gimple_call_arg (def_stmt
, 1));
2064 val
= vn_nary_build_or_lookup (&res_op
);
2066 || (TREE_CODE (val
) == SSA_NAME
2067 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2072 unsigned len
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
));
2073 unsigned char *buf
= XALLOCAVEC (unsigned char, len
);
2074 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2076 val
= native_interpret_expr (vr
->type
, buf
, len
);
2080 return vn_reference_lookup_or_insert_for_pieces
2081 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2085 /* 2) Assignment from an empty CONSTRUCTOR. */
2086 else if (is_gimple_reg_type (vr
->type
)
2087 && gimple_assign_single_p (def_stmt
)
2088 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2089 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2092 poly_int64 offset2
, size2
, maxsize2
;
2094 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2095 &offset2
, &size2
, &maxsize2
, &reverse
);
2096 if (known_size_p (maxsize2
)
2097 && operand_equal_p (base
, base2
, 0)
2098 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2100 tree val
= build_zero_cst (vr
->type
);
2101 return vn_reference_lookup_or_insert_for_pieces
2102 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2106 /* 3) Assignment from a constant. We can use folds native encode/interpret
2107 routines to extract the assigned bits. */
2108 else if (known_eq (ref
->size
, maxsize
)
2109 && is_gimple_reg_type (vr
->type
)
2110 && !contains_storage_order_barrier_p (vr
->operands
)
2111 && gimple_assign_single_p (def_stmt
)
2112 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2113 /* native_encode and native_decode operate on arrays of bytes
2114 and so fundamentally need a compile-time size and offset. */
2115 && maxsize
.is_constant (&maxsizei
)
2116 && maxsizei
% BITS_PER_UNIT
== 0
2117 && offset
.is_constant (&offseti
)
2118 && offseti
% BITS_PER_UNIT
== 0
2119 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2120 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2121 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2124 HOST_WIDE_INT offset2
, size2
;
2126 base2
= get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt
),
2127 &offset2
, &size2
, &reverse
);
2130 && size2
% BITS_PER_UNIT
== 0
2131 && offset2
% BITS_PER_UNIT
== 0
2132 && operand_equal_p (base
, base2
, 0)
2133 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2135 /* We support up to 512-bit values (for V8DFmode). */
2136 unsigned char buffer
[64];
2139 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2140 if (TREE_CODE (rhs
) == SSA_NAME
)
2141 rhs
= SSA_VAL (rhs
);
2142 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
2143 buffer
, sizeof (buffer
),
2144 (offseti
- offset2
) / BITS_PER_UNIT
);
2145 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2147 tree type
= vr
->type
;
2148 /* Make sure to interpret in a type that has a range
2149 covering the whole access size. */
2150 if (INTEGRAL_TYPE_P (vr
->type
)
2151 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2152 type
= build_nonstandard_integer_type (maxsizei
,
2153 TYPE_UNSIGNED (type
));
2154 tree val
= native_interpret_expr (type
, buffer
,
2155 maxsizei
/ BITS_PER_UNIT
);
2156 /* If we chop off bits because the types precision doesn't
2157 match the memory access size this is ok when optimizing
2158 reads but not when called from the DSE code during
2161 && type
!= vr
->type
)
2163 if (! int_fits_type_p (val
, vr
->type
))
2166 val
= fold_convert (vr
->type
, val
);
2170 return vn_reference_lookup_or_insert_for_pieces
2171 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2176 /* 4) Assignment from an SSA name which definition we may be able
2177 to access pieces from. */
2178 else if (known_eq (ref
->size
, maxsize
)
2179 && is_gimple_reg_type (vr
->type
)
2180 && !contains_storage_order_barrier_p (vr
->operands
)
2181 && gimple_assign_single_p (def_stmt
)
2182 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2185 poly_int64 offset2
, size2
, maxsize2
;
2187 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2188 &offset2
, &size2
, &maxsize2
,
2191 && known_size_p (maxsize2
)
2192 && known_eq (maxsize2
, size2
)
2193 && operand_equal_p (base
, base2
, 0)
2194 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2195 /* ??? We can't handle bitfield precision extracts without
2196 either using an alternate type for the BIT_FIELD_REF and
2197 then doing a conversion or possibly adjusting the offset
2198 according to endianness. */
2199 && (! INTEGRAL_TYPE_P (vr
->type
)
2200 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2201 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2203 gimple_match_op
op (gimple_match_cond::UNCOND
,
2204 BIT_FIELD_REF
, vr
->type
,
2205 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
2206 bitsize_int (ref
->size
),
2207 bitsize_int (offset
- offset2
));
2208 tree val
= vn_nary_build_or_lookup (&op
);
2210 && (TREE_CODE (val
) != SSA_NAME
2211 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2213 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
2214 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2220 /* 5) For aggregate copies translate the reference through them if
2221 the copy kills ref. */
2222 else if (vn_walk_kind
== VN_WALKREWRITE
2223 && gimple_assign_single_p (def_stmt
)
2224 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2225 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2226 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2230 auto_vec
<vn_reference_op_s
> rhs
;
2231 vn_reference_op_t vro
;
2237 /* See if the assignment kills REF. */
2238 base2
= ao_ref_base (&lhs_ref
);
2239 if (!lhs_ref
.max_size_known_p ()
2241 && (TREE_CODE (base
) != MEM_REF
2242 || TREE_CODE (base2
) != MEM_REF
2243 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2244 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2245 TREE_OPERAND (base2
, 1))))
2246 || !stmt_kills_ref_p (def_stmt
, ref
))
2249 /* Find the common base of ref and the lhs. lhs_ops already
2250 contains valueized operands for the lhs. */
2251 i
= vr
->operands
.length () - 1;
2252 j
= lhs_ops
.length () - 1;
2253 while (j
>= 0 && i
>= 0
2254 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2260 /* ??? The innermost op should always be a MEM_REF and we already
2261 checked that the assignment to the lhs kills vr. Thus for
2262 aggregate copies using char[] types the vn_reference_op_eq
2263 may fail when comparing types for compatibility. But we really
2264 don't care here - further lookups with the rewritten operands
2265 will simply fail if we messed up types too badly. */
2266 poly_int64 extra_off
= 0;
2267 if (j
== 0 && i
>= 0
2268 && lhs_ops
[0].opcode
== MEM_REF
2269 && maybe_ne (lhs_ops
[0].off
, -1))
2271 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
2273 else if (vr
->operands
[i
].opcode
== MEM_REF
2274 && maybe_ne (vr
->operands
[i
].off
, -1))
2276 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2281 /* i now points to the first additional op.
2282 ??? LHS may not be completely contained in VR, one or more
2283 VIEW_CONVERT_EXPRs could be in its way. We could at least
2284 try handling outermost VIEW_CONVERT_EXPRs. */
2288 /* Punt if the additional ops contain a storage order barrier. */
2289 for (k
= i
; k
>= 0; k
--)
2291 vro
= &vr
->operands
[k
];
2292 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2296 /* Now re-write REF to be based on the rhs of the assignment. */
2297 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2299 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2300 if (maybe_ne (extra_off
, 0))
2302 if (rhs
.length () < 2)
2304 int ix
= rhs
.length () - 2;
2305 if (rhs
[ix
].opcode
!= MEM_REF
2306 || known_eq (rhs
[ix
].off
, -1))
2308 rhs
[ix
].off
+= extra_off
;
2309 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
2310 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
2314 /* We need to pre-pend vr->operands[0..i] to rhs. */
2315 vec
<vn_reference_op_s
> old
= vr
->operands
;
2316 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2317 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2319 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2320 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2321 vr
->operands
[i
+ 1 + j
] = *vro
;
2322 vr
->operands
= valueize_refs (vr
->operands
);
2323 if (old
== shared_lookup_references
)
2324 shared_lookup_references
= vr
->operands
;
2325 vr
->hashcode
= vn_reference_compute_hash (vr
);
2327 /* Try folding the new reference to a constant. */
2328 tree val
= fully_constant_vn_reference_p (vr
);
2330 return vn_reference_lookup_or_insert_for_pieces
2331 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2333 /* Adjust *ref from the new operands. */
2334 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2336 /* This can happen with bitfields. */
2337 if (maybe_ne (ref
->size
, r
.size
))
2341 /* Do not update last seen VUSE after translating. */
2342 last_vuse_ptr
= NULL
;
2344 /* Keep looking for the adjusted *REF / VR pair. */
2348 /* 6) For memcpy copies translate the reference through them if
2349 the copy kills ref. */
2350 else if (vn_walk_kind
== VN_WALKREWRITE
2351 && is_gimple_reg_type (vr
->type
)
2352 /* ??? Handle BCOPY as well. */
2353 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2354 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2355 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2356 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2357 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2358 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2359 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2360 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
))
2364 poly_int64 rhs_offset
, lhs_offset
;
2365 vn_reference_op_s op
;
2366 poly_uint64 mem_offset
;
2367 poly_int64 at
, byte_maxsize
;
2369 /* Only handle non-variable, addressable refs. */
2370 if (maybe_ne (ref
->size
, maxsize
)
2371 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
2372 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
2375 /* Extract a pointer base and an offset for the destination. */
2376 lhs
= gimple_call_arg (def_stmt
, 0);
2378 if (TREE_CODE (lhs
) == SSA_NAME
)
2380 lhs
= vn_valueize (lhs
);
2381 if (TREE_CODE (lhs
) == SSA_NAME
)
2383 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2384 if (gimple_assign_single_p (def_stmt
)
2385 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2386 lhs
= gimple_assign_rhs1 (def_stmt
);
2389 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2391 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2395 if (TREE_CODE (tem
) == MEM_REF
2396 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2398 lhs
= TREE_OPERAND (tem
, 0);
2399 if (TREE_CODE (lhs
) == SSA_NAME
)
2400 lhs
= vn_valueize (lhs
);
2401 lhs_offset
+= mem_offset
;
2403 else if (DECL_P (tem
))
2404 lhs
= build_fold_addr_expr (tem
);
2408 if (TREE_CODE (lhs
) != SSA_NAME
2409 && TREE_CODE (lhs
) != ADDR_EXPR
)
2412 /* Extract a pointer base and an offset for the source. */
2413 rhs
= gimple_call_arg (def_stmt
, 1);
2415 if (TREE_CODE (rhs
) == SSA_NAME
)
2416 rhs
= vn_valueize (rhs
);
2417 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2419 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2423 if (TREE_CODE (tem
) == MEM_REF
2424 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2426 rhs
= TREE_OPERAND (tem
, 0);
2427 rhs_offset
+= mem_offset
;
2429 else if (DECL_P (tem
)
2430 || TREE_CODE (tem
) == STRING_CST
)
2431 rhs
= build_fold_addr_expr (tem
);
2435 if (TREE_CODE (rhs
) != SSA_NAME
2436 && TREE_CODE (rhs
) != ADDR_EXPR
)
2439 /* The bases of the destination and the references have to agree. */
2440 if (TREE_CODE (base
) == MEM_REF
)
2442 if (TREE_OPERAND (base
, 0) != lhs
2443 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
2447 else if (!DECL_P (base
)
2448 || TREE_CODE (lhs
) != ADDR_EXPR
2449 || TREE_OPERAND (lhs
, 0) != base
)
2452 /* If the access is completely outside of the memcpy destination
2453 area there is no aliasing. */
2454 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
2456 /* And the access has to be contained within the memcpy destination. */
2457 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
2460 /* Make room for 2 operands in the new reference. */
2461 if (vr
->operands
.length () < 2)
2463 vec
<vn_reference_op_s
> old
= vr
->operands
;
2464 vr
->operands
.safe_grow_cleared (2);
2465 if (old
== shared_lookup_references
)
2466 shared_lookup_references
= vr
->operands
;
2469 vr
->operands
.truncate (2);
2471 /* The looked-through reference is a simple MEM_REF. */
2472 memset (&op
, 0, sizeof (op
));
2474 op
.opcode
= MEM_REF
;
2475 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
2476 op
.off
= at
- lhs_offset
+ rhs_offset
;
2477 vr
->operands
[0] = op
;
2478 op
.type
= TREE_TYPE (rhs
);
2479 op
.opcode
= TREE_CODE (rhs
);
2482 vr
->operands
[1] = op
;
2483 vr
->hashcode
= vn_reference_compute_hash (vr
);
2485 /* Try folding the new reference to a constant. */
2486 tree val
= fully_constant_vn_reference_p (vr
);
2488 return vn_reference_lookup_or_insert_for_pieces
2489 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2491 /* Adjust *ref from the new operands. */
2492 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2494 /* This can happen with bitfields. */
2495 if (maybe_ne (ref
->size
, r
.size
))
2499 /* Do not update last seen VUSE after translating. */
2500 last_vuse_ptr
= NULL
;
2502 /* Keep looking for the adjusted *REF / VR pair. */
2506 /* Bail out and stop walking. */
2510 /* Return a reference op vector from OP that can be used for
2511 vn_reference_lookup_pieces. The caller is responsible for releasing
2514 vec
<vn_reference_op_s
>
2515 vn_reference_operands_for_lookup (tree op
)
2518 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
2521 /* Lookup a reference operation by it's parts, in the current hash table.
2522 Returns the resulting value number if it exists in the hash table,
2523 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2524 vn_reference_t stored in the hashtable if something is found. */
2527 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2528 vec
<vn_reference_op_s
> operands
,
2529 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2531 struct vn_reference_s vr1
;
2539 vr1
.vuse
= vuse_ssa_val (vuse
);
2540 shared_lookup_references
.truncate (0);
2541 shared_lookup_references
.safe_grow (operands
.length ());
2542 memcpy (shared_lookup_references
.address (),
2543 operands
.address (),
2544 sizeof (vn_reference_op_s
)
2545 * operands
.length ());
2546 vr1
.operands
= operands
= shared_lookup_references
2547 = valueize_refs (shared_lookup_references
);
2550 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2551 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2554 vn_reference_lookup_1 (&vr1
, vnresult
);
2556 && kind
!= VN_NOWALK
2560 vn_walk_kind
= kind
;
2561 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2563 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2564 vn_reference_lookup_2
,
2565 vn_reference_lookup_3
,
2566 vuse_ssa_val
, &vr1
);
2567 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2571 return (*vnresult
)->result
;
2576 /* Lookup OP in the current hash table, and return the resulting value
2577 number if it exists in the hash table. Return NULL_TREE if it does
2578 not exist in the hash table or if the result field of the structure
2579 was NULL.. VNRESULT will be filled in with the vn_reference_t
2580 stored in the hashtable if one exists. When TBAA_P is false assume
2581 we are looking up a store and treat it as having alias-set zero. */
2584 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2585 vn_reference_t
*vnresult
, bool tbaa_p
)
2587 vec
<vn_reference_op_s
> operands
;
2588 struct vn_reference_s vr1
;
2590 bool valuezied_anything
;
2595 vr1
.vuse
= vuse_ssa_val (vuse
);
2596 vr1
.operands
= operands
2597 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2598 vr1
.type
= TREE_TYPE (op
);
2599 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2600 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2601 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2604 if (kind
!= VN_NOWALK
2607 vn_reference_t wvnresult
;
2609 /* Make sure to use a valueized reference if we valueized anything.
2610 Otherwise preserve the full reference for advanced TBAA. */
2611 if (!valuezied_anything
2612 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2614 ao_ref_init (&r
, op
);
2616 r
.ref_alias_set
= r
.base_alias_set
= 0;
2617 vn_walk_kind
= kind
;
2619 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2620 vn_reference_lookup_2
,
2621 vn_reference_lookup_3
,
2622 vuse_ssa_val
, &vr1
);
2623 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2627 *vnresult
= wvnresult
;
2628 return wvnresult
->result
;
2634 return vn_reference_lookup_1 (&vr1
, vnresult
);
2637 /* Lookup CALL in the current hash table and return the entry in
2638 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2641 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2647 tree vuse
= gimple_vuse (call
);
2649 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2650 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2651 vr
->type
= gimple_expr_type (call
);
2653 vr
->hashcode
= vn_reference_compute_hash (vr
);
2654 vn_reference_lookup_1 (vr
, vnresult
);
2657 /* Insert OP into the current hash table with a value number of RESULT. */
2660 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2662 vn_reference_s
**slot
;
2666 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2667 if (TREE_CODE (result
) == SSA_NAME
)
2668 vr1
->value_id
= VN_INFO (result
)->value_id
;
2670 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2671 vr1
->vuse
= vuse_ssa_val (vuse
);
2672 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2673 vr1
->type
= TREE_TYPE (op
);
2674 vr1
->set
= get_alias_set (op
);
2675 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2676 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2677 vr1
->result_vdef
= vdef
;
2679 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2682 /* Because IL walking on reference lookup can end up visiting
2683 a def that is only to be visited later in iteration order
2684 when we are about to make an irreducible region reducible
2685 the def can be effectively processed and its ref being inserted
2686 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2687 but save a lookup if we deal with already inserted refs here. */
2690 /* We cannot assert that we have the same value either because
2691 when disentangling an irreducible region we may end up visiting
2692 a use before the corresponding def. That's a missed optimization
2693 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2694 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2695 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
2697 fprintf (dump_file
, "Keeping old value ");
2698 print_generic_expr (dump_file
, (*slot
)->result
);
2699 fprintf (dump_file
, " because of collision\n");
2701 free_reference (vr1
);
2702 obstack_free (&vn_tables_obstack
, vr1
);
2707 vr1
->next
= last_inserted_ref
;
2708 last_inserted_ref
= vr1
;
2711 /* Insert a reference by it's pieces into the current hash table with
2712 a value number of RESULT. Return the resulting reference
2713 structure we created. */
2716 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2717 vec
<vn_reference_op_s
> operands
,
2718 tree result
, unsigned int value_id
)
2721 vn_reference_s
**slot
;
2724 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2725 vr1
->value_id
= value_id
;
2726 vr1
->vuse
= vuse_ssa_val (vuse
);
2727 vr1
->operands
= valueize_refs (operands
);
2730 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2731 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2732 result
= SSA_VAL (result
);
2733 vr1
->result
= result
;
2735 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2738 /* At this point we should have all the things inserted that we have
2739 seen before, and we should never try inserting something that
2741 gcc_assert (!*slot
);
2744 vr1
->next
= last_inserted_ref
;
2745 last_inserted_ref
= vr1
;
2749 /* Compute and return the hash value for nary operation VBO1. */
2752 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2754 inchash::hash hstate
;
2757 for (i
= 0; i
< vno1
->length
; ++i
)
2758 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2759 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2761 if (((vno1
->length
== 2
2762 && commutative_tree_code (vno1
->opcode
))
2763 || (vno1
->length
== 3
2764 && commutative_ternary_tree_code (vno1
->opcode
)))
2765 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2766 std::swap (vno1
->op
[0], vno1
->op
[1]);
2767 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2768 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2770 std::swap (vno1
->op
[0], vno1
->op
[1]);
2771 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2774 hstate
.add_int (vno1
->opcode
);
2775 for (i
= 0; i
< vno1
->length
; ++i
)
2776 inchash::add_expr (vno1
->op
[i
], hstate
);
2778 return hstate
.end ();
2781 /* Compare nary operations VNO1 and VNO2 and return true if they are
2785 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2789 if (vno1
->hashcode
!= vno2
->hashcode
)
2792 if (vno1
->length
!= vno2
->length
)
2795 if (vno1
->opcode
!= vno2
->opcode
2796 || !types_compatible_p (vno1
->type
, vno2
->type
))
2799 for (i
= 0; i
< vno1
->length
; ++i
)
2800 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2803 /* BIT_INSERT_EXPR has an implict operand as the type precision
2804 of op1. Need to check to make sure they are the same. */
2805 if (vno1
->opcode
== BIT_INSERT_EXPR
2806 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
2807 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
2808 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
2814 /* Initialize VNO from the pieces provided. */
2817 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2818 enum tree_code code
, tree type
, tree
*ops
)
2821 vno
->length
= length
;
2823 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2826 /* Initialize VNO from OP. */
2829 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2833 vno
->opcode
= TREE_CODE (op
);
2834 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2835 vno
->type
= TREE_TYPE (op
);
2836 for (i
= 0; i
< vno
->length
; ++i
)
2837 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2840 /* Return the number of operands for a vn_nary ops structure from STMT. */
2843 vn_nary_length_from_stmt (gimple
*stmt
)
2845 switch (gimple_assign_rhs_code (stmt
))
2849 case VIEW_CONVERT_EXPR
:
2856 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2859 return gimple_num_ops (stmt
) - 1;
2863 /* Initialize VNO from STMT. */
2866 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2870 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2871 vno
->type
= gimple_expr_type (stmt
);
2872 switch (vno
->opcode
)
2876 case VIEW_CONVERT_EXPR
:
2878 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2883 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2884 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2885 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2889 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2890 for (i
= 0; i
< vno
->length
; ++i
)
2891 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2895 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2896 vno
->length
= gimple_num_ops (stmt
) - 1;
2897 for (i
= 0; i
< vno
->length
; ++i
)
2898 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2902 /* Compute the hashcode for VNO and look for it in the hash table;
2903 return the resulting value number if it exists in the hash table.
2904 Return NULL_TREE if it does not exist in the hash table or if the
2905 result field of the operation is NULL. VNRESULT will contain the
2906 vn_nary_op_t from the hashtable if it exists. */
2909 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2911 vn_nary_op_s
**slot
;
2916 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2917 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
2922 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
2925 /* Lookup a n-ary operation by its pieces and return the resulting value
2926 number if it exists in the hash table. Return NULL_TREE if it does
2927 not exist in the hash table or if the result field of the operation
2928 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2932 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2933 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2935 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2936 sizeof_vn_nary_op (length
));
2937 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2938 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2941 /* Lookup OP in the current hash table, and return the resulting value
2942 number if it exists in the hash table. Return NULL_TREE if it does
2943 not exist in the hash table or if the result field of the operation
2944 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2948 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2951 = XALLOCAVAR (struct vn_nary_op_s
,
2952 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2953 init_vn_nary_op_from_op (vno1
, op
);
2954 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2957 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2958 value number if it exists in the hash table. Return NULL_TREE if
2959 it does not exist in the hash table. VNRESULT will contain the
2960 vn_nary_op_t from the hashtable if it exists. */
2963 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2966 = XALLOCAVAR (struct vn_nary_op_s
,
2967 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2968 init_vn_nary_op_from_stmt (vno1
, stmt
);
2969 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2972 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2975 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
2977 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
2980 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2984 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
2986 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
2988 vno1
->value_id
= value_id
;
2989 vno1
->length
= length
;
2990 vno1
->predicated_values
= 0;
2991 vno1
->u
.result
= result
;
2996 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2997 VNO->HASHCODE first. */
3000 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3003 vn_nary_op_s
**slot
;
3007 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3008 gcc_assert (! vno
->predicated_values
3009 || (! vno
->u
.values
->next
3010 && vno
->u
.values
->valid_dominated_by_p
[0] != EXIT_BLOCK
3011 && vno
->u
.values
->valid_dominated_by_p
[1] == EXIT_BLOCK
));
3014 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3015 vno
->unwind_to
= *slot
;
3018 /* Prefer non-predicated values.
3019 ??? Only if those are constant, otherwise, with constant predicated
3020 value, turn them into predicated values with entry-block validity
3021 (??? but we always find the first valid result currently). */
3022 if ((*slot
)->predicated_values
3023 && ! vno
->predicated_values
)
3025 /* ??? We cannot remove *slot from the unwind stack list.
3026 For the moment we deal with this by skipping not found
3027 entries but this isn't ideal ... */
3029 /* ??? Maintain a stack of states we can unwind in
3030 vn_nary_op_s? But how far do we unwind? In reality
3031 we need to push change records somewhere... Or not
3032 unwind vn_nary_op_s and linking them but instead
3033 unwind the results "list", linking that, which also
3034 doesn't move on hashtable resize. */
3035 /* We can also have a ->unwind_to recording *slot there.
3036 That way we can make u.values a fixed size array with
3037 recording the number of entries but of course we then
3038 have always N copies for each unwind_to-state. Or we
3039 make sure to only ever append and each unwinding will
3040 pop off one entry (but how to deal with predicated
3041 replaced with non-predicated here?) */
3042 vno
->next
= last_inserted_nary
;
3043 last_inserted_nary
= vno
;
3046 else if (vno
->predicated_values
3047 && ! (*slot
)->predicated_values
)
3049 else if (vno
->predicated_values
3050 && (*slot
)->predicated_values
)
3052 /* ??? Factor this all into a insert_single_predicated_value
3054 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3056 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3057 vn_pval
*nval
= vno
->u
.values
;
3058 vn_pval
**next
= &vno
->u
.values
;
3060 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3062 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3065 for (unsigned i
= 0; i
< val
->n
; ++i
)
3068 = BASIC_BLOCK_FOR_FN (cfun
,
3069 val
->valid_dominated_by_p
[i
]);
3070 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3071 /* Value registered with more generic predicate. */
3073 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3074 /* Shouldn't happen, we insert in RPO order. */
3078 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3080 + val
->n
* sizeof (int));
3081 (*next
)->next
= NULL
;
3082 (*next
)->result
= val
->result
;
3083 (*next
)->n
= val
->n
+ 1;
3084 memcpy ((*next
)->valid_dominated_by_p
,
3085 val
->valid_dominated_by_p
,
3086 val
->n
* sizeof (int));
3087 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3088 next
= &(*next
)->next
;
3089 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3090 fprintf (dump_file
, "Appending predicate to value.\n");
3093 /* Copy other predicated values. */
3094 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3096 + (val
->n
-1) * sizeof (int));
3097 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3098 (*next
)->next
= NULL
;
3099 next
= &(*next
)->next
;
3105 vno
->next
= last_inserted_nary
;
3106 last_inserted_nary
= vno
;
3110 /* While we do not want to insert things twice it's awkward to
3111 avoid it in the case where visit_nary_op pattern-matches stuff
3112 and ends up simplifying the replacement to itself. We then
3113 get two inserts, one from visit_nary_op and one from
3114 vn_nary_build_or_lookup.
3115 So allow inserts with the same value number. */
3116 if ((*slot
)->u
.result
== vno
->u
.result
)
3120 /* ??? There's also optimistic vs. previous commited state merging
3121 that is problematic for the case of unwinding. */
3123 /* ??? We should return NULL if we do not use 'vno' and have the
3124 caller release it. */
3125 gcc_assert (!*slot
);
3128 vno
->next
= last_inserted_nary
;
3129 last_inserted_nary
= vno
;
3133 /* Insert a n-ary operation into the current hash table using it's
3134 pieces. Return the vn_nary_op_t structure we created and put in
3138 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
3139 tree type
, tree
*ops
,
3140 tree result
, unsigned int value_id
)
3142 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
3143 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3144 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3148 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
3149 tree type
, tree
*ops
,
3150 tree result
, unsigned int value_id
,
3153 /* ??? Currently tracking BBs. */
3154 if (! single_pred_p (pred_e
->dest
))
3156 /* Never record for backedges. */
3157 if (pred_e
->flags
& EDGE_DFS_BACK
)
3162 /* Ignore backedges. */
3163 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
3164 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
3169 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3170 /* ??? Fix dumping, but currently we only get comparisons. */
3171 && TREE_CODE_CLASS (code
) == tcc_comparison
)
3173 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
3174 pred_e
->dest
->index
);
3175 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
3176 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
3177 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
3178 fprintf (dump_file
, " == %s\n",
3179 integer_zerop (result
) ? "false" : "true");
3181 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
3182 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3183 vno1
->predicated_values
= 1;
3184 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3186 vno1
->u
.values
->next
= NULL
;
3187 vno1
->u
.values
->result
= result
;
3188 vno1
->u
.values
->n
= 1;
3189 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
3190 vno1
->u
.values
->valid_dominated_by_p
[1] = EXIT_BLOCK
;
3191 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3195 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
3198 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
3200 if (! vno
->predicated_values
)
3201 return vno
->u
.result
;
3202 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
3203 for (unsigned i
= 0; i
< val
->n
; ++i
)
3204 if (dominated_by_p_w_unex (bb
,
3206 (cfun
, val
->valid_dominated_by_p
[i
])))
3211 /* Insert OP into the current hash table with a value number of
3212 RESULT. Return the vn_nary_op_t structure we created and put in
3216 vn_nary_op_insert (tree op
, tree result
)
3218 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
3221 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
3222 init_vn_nary_op_from_op (vno1
, op
);
3223 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3226 /* Insert the rhs of STMT into the current hash table with a value number of
3230 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
3233 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
3234 result
, VN_INFO (result
)->value_id
);
3235 init_vn_nary_op_from_stmt (vno1
, stmt
);
3236 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3239 /* Compute a hashcode for PHI operation VP1 and return it. */
3241 static inline hashval_t
3242 vn_phi_compute_hash (vn_phi_t vp1
)
3244 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
3245 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
3251 /* If all PHI arguments are constants we need to distinguish
3252 the PHI node via its type. */
3254 hstate
.merge_hash (vn_hash_type (type
));
3256 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
3258 /* Don't hash backedge values they need to be handled as VN_TOP
3259 for optimistic value-numbering. */
3260 if (e
->flags
& EDGE_DFS_BACK
)
3263 phi1op
= vp1
->phiargs
[e
->dest_idx
];
3264 if (phi1op
== VN_TOP
)
3266 inchash::add_expr (phi1op
, hstate
);
3269 return hstate
.end ();
3273 /* Return true if COND1 and COND2 represent the same condition, set
3274 *INVERTED_P if one needs to be inverted to make it the same as
3278 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
3279 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
3281 enum tree_code code1
= gimple_cond_code (cond1
);
3282 enum tree_code code2
= gimple_cond_code (cond2
);
3284 *inverted_p
= false;
3287 else if (code1
== swap_tree_comparison (code2
))
3288 std::swap (lhs2
, rhs2
);
3289 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
3291 else if (code1
== invert_tree_comparison
3292 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
3294 std::swap (lhs2
, rhs2
);
3300 return ((expressions_equal_p (lhs1
, lhs2
)
3301 && expressions_equal_p (rhs1
, rhs2
))
3302 || (commutative_tree_code (code1
)
3303 && expressions_equal_p (lhs1
, rhs2
)
3304 && expressions_equal_p (rhs1
, lhs2
)));
3307 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3310 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
3312 if (vp1
->hashcode
!= vp2
->hashcode
)
3315 if (vp1
->block
!= vp2
->block
)
3317 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
3320 switch (EDGE_COUNT (vp1
->block
->preds
))
3323 /* Single-arg PHIs are just copies. */
3328 /* Rule out backedges into the PHI. */
3329 if (vp1
->block
->loop_father
->header
== vp1
->block
3330 || vp2
->block
->loop_father
->header
== vp2
->block
)
3333 /* If the PHI nodes do not have compatible types
3334 they are not the same. */
3335 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3339 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3341 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
3342 /* If the immediate dominator end in switch stmts multiple
3343 values may end up in the same PHI arg via intermediate
3345 if (EDGE_COUNT (idom1
->succs
) != 2
3346 || EDGE_COUNT (idom2
->succs
) != 2)
3349 /* Verify the controlling stmt is the same. */
3350 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
3351 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
3352 if (! last1
|| ! last2
)
3355 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
3356 last2
, vp2
->cclhs
, vp2
->ccrhs
,
3360 /* Get at true/false controlled edges into the PHI. */
3361 edge te1
, te2
, fe1
, fe2
;
3362 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
3364 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
3368 /* Swap edges if the second condition is the inverted of the
3371 std::swap (te2
, fe2
);
3373 /* ??? Handle VN_TOP specially. */
3374 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
3375 vp2
->phiargs
[te2
->dest_idx
])
3376 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
3377 vp2
->phiargs
[fe2
->dest_idx
]))
3388 /* If the PHI nodes do not have compatible types
3389 they are not the same. */
3390 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3393 /* Any phi in the same block will have it's arguments in the
3394 same edge order, because of how we store phi nodes. */
3395 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
3397 tree phi1op
= vp1
->phiargs
[i
];
3398 tree phi2op
= vp2
->phiargs
[i
];
3399 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
3401 if (!expressions_equal_p (phi1op
, phi2op
))
3408 /* Lookup PHI in the current hash table, and return the resulting
3409 value number if it exists in the hash table. Return NULL_TREE if
3410 it does not exist in the hash table. */
3413 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
3416 struct vn_phi_s
*vp1
;
3420 vp1
= XALLOCAVAR (struct vn_phi_s
,
3421 sizeof (struct vn_phi_s
)
3422 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
3424 /* Canonicalize the SSA_NAME's to their value number. */
3425 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3427 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3428 if (TREE_CODE (def
) == SSA_NAME
3429 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3430 def
= SSA_VAL (def
);
3431 vp1
->phiargs
[e
->dest_idx
] = def
;
3433 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3434 vp1
->block
= gimple_bb (phi
);
3435 /* Extract values of the controlling condition. */
3436 vp1
->cclhs
= NULL_TREE
;
3437 vp1
->ccrhs
= NULL_TREE
;
3438 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3439 if (EDGE_COUNT (idom1
->succs
) == 2)
3440 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3442 /* ??? We want to use SSA_VAL here. But possibly not
3444 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3445 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3447 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3448 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
3451 return (*slot
)->result
;
3454 /* Insert PHI into the current hash table with a value number of
3458 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
3461 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
3463 + ((gimple_phi_num_args (phi
) - 1)
3468 /* Canonicalize the SSA_NAME's to their value number. */
3469 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3471 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3472 if (TREE_CODE (def
) == SSA_NAME
3473 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3474 def
= SSA_VAL (def
);
3475 vp1
->phiargs
[e
->dest_idx
] = def
;
3477 vp1
->value_id
= VN_INFO (result
)->value_id
;
3478 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3479 vp1
->block
= gimple_bb (phi
);
3480 /* Extract values of the controlling condition. */
3481 vp1
->cclhs
= NULL_TREE
;
3482 vp1
->ccrhs
= NULL_TREE
;
3483 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3484 if (EDGE_COUNT (idom1
->succs
) == 2)
3485 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3487 /* ??? We want to use SSA_VAL here. But possibly not
3489 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3490 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3492 vp1
->result
= result
;
3493 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3495 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3496 gcc_assert (!*slot
);
3499 vp1
->next
= last_inserted_phi
;
3500 last_inserted_phi
= vp1
;
3505 /* Return true if BB1 is dominated by BB2 taking into account edges
3506 that are not executable. */
3509 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3514 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3517 /* Before iterating we'd like to know if there exists a
3518 (executable) path from bb2 to bb1 at all, if not we can
3519 directly return false. For now simply iterate once. */
3521 /* Iterate to the single executable bb1 predecessor. */
3522 if (EDGE_COUNT (bb1
->preds
) > 1)
3525 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3526 if (e
->flags
& EDGE_EXECUTABLE
)
3539 /* Re-do the dominance check with changed bb1. */
3540 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3545 /* Iterate to the single executable bb2 successor. */
3547 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3548 if (e
->flags
& EDGE_EXECUTABLE
)
3559 /* Verify the reached block is only reached through succe.
3560 If there is only one edge we can spare us the dominator
3561 check and iterate directly. */
3562 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3564 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3566 && (e
->flags
& EDGE_EXECUTABLE
))
3576 /* Re-do the dominance check with changed bb2. */
3577 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3582 /* We could now iterate updating bb1 / bb2. */
3586 /* Set the value number of FROM to TO, return true if it has changed
3590 set_ssa_val_to (tree from
, tree to
)
3592 vn_ssa_aux_t from_info
= VN_INFO (from
);
3593 tree currval
= from_info
->valnum
; // SSA_VAL (from)
3594 poly_int64 toff
, coff
;
3596 /* The only thing we allow as value numbers are ssa_names
3597 and invariants. So assert that here. We don't allow VN_TOP
3598 as visiting a stmt should produce a value-number other than
3600 ??? Still VN_TOP can happen for unreachable code, so force
3601 it to varying in that case. Not all code is prepared to
3602 get VN_TOP on valueization. */
3605 /* ??? When iterating and visiting PHI <undef, backedge-value>
3606 for the first time we rightfully get VN_TOP and we need to
3607 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3608 With SCCVN we were simply lucky we iterated the other PHI
3609 cycles first and thus visited the backedge-value DEF. */
3610 if (currval
== VN_TOP
)
3612 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3613 fprintf (dump_file
, "Forcing value number to varying on "
3614 "receiving VN_TOP\n");
3618 gcc_checking_assert (to
!= NULL_TREE
3619 && ((TREE_CODE (to
) == SSA_NAME
3620 && (to
== from
|| SSA_VAL (to
) == to
))
3621 || is_gimple_min_invariant (to
)));
3625 if (currval
== from
)
3627 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3629 fprintf (dump_file
, "Not changing value number of ");
3630 print_generic_expr (dump_file
, from
);
3631 fprintf (dump_file
, " from VARYING to ");
3632 print_generic_expr (dump_file
, to
);
3633 fprintf (dump_file
, "\n");
3637 else if (currval
!= VN_TOP
3638 && ! is_gimple_min_invariant (currval
)
3639 && ! ssa_undefined_value_p (currval
, false)
3640 && is_gimple_min_invariant (to
))
3642 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3644 fprintf (dump_file
, "Forcing VARYING instead of changing "
3645 "value number of ");
3646 print_generic_expr (dump_file
, from
);
3647 fprintf (dump_file
, " from ");
3648 print_generic_expr (dump_file
, currval
);
3649 fprintf (dump_file
, " (non-constant) to ");
3650 print_generic_expr (dump_file
, to
);
3651 fprintf (dump_file
, " (constant)\n");
3655 else if (TREE_CODE (to
) == SSA_NAME
3656 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3661 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3663 fprintf (dump_file
, "Setting value number of ");
3664 print_generic_expr (dump_file
, from
);
3665 fprintf (dump_file
, " to ");
3666 print_generic_expr (dump_file
, to
);
3670 && !operand_equal_p (currval
, to
, 0)
3671 /* Different undefined SSA names are not actually different. See
3672 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3673 && !(TREE_CODE (currval
) == SSA_NAME
3674 && TREE_CODE (to
) == SSA_NAME
3675 && ssa_undefined_value_p (currval
, false)
3676 && ssa_undefined_value_p (to
, false))
3677 /* ??? For addresses involving volatile objects or types operand_equal_p
3678 does not reliably detect ADDR_EXPRs as equal. We know we are only
3679 getting invariant gimple addresses here, so can use
3680 get_addr_base_and_unit_offset to do this comparison. */
3681 && !(TREE_CODE (currval
) == ADDR_EXPR
3682 && TREE_CODE (to
) == ADDR_EXPR
3683 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3684 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3685 && known_eq (coff
, toff
)))
3687 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3688 fprintf (dump_file
, " (changed)\n");
3689 from_info
->valnum
= to
;
3692 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3693 fprintf (dump_file
, "\n");
3697 /* Set all definitions in STMT to value number to themselves.
3698 Return true if a value number changed. */
3701 defs_to_varying (gimple
*stmt
)
3703 bool changed
= false;
3707 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3709 tree def
= DEF_FROM_PTR (defp
);
3710 changed
|= set_ssa_val_to (def
, def
);
3715 /* Visit a copy between LHS and RHS, return true if the value number
3719 visit_copy (tree lhs
, tree rhs
)
3722 rhs
= SSA_VAL (rhs
);
3724 return set_ssa_val_to (lhs
, rhs
);
3727 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3731 valueized_wider_op (tree wide_type
, tree op
)
3733 if (TREE_CODE (op
) == SSA_NAME
)
3734 op
= vn_valueize (op
);
3736 /* Either we have the op widened available. */
3739 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
3740 wide_type
, ops
, NULL
);
3744 /* Or the op is truncated from some existing value. */
3745 if (TREE_CODE (op
) == SSA_NAME
)
3747 gimple
*def
= SSA_NAME_DEF_STMT (op
);
3748 if (is_gimple_assign (def
)
3749 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
3751 tem
= gimple_assign_rhs1 (def
);
3752 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
3754 if (TREE_CODE (tem
) == SSA_NAME
)
3755 tem
= vn_valueize (tem
);
3761 /* For constants simply extend it. */
3762 if (TREE_CODE (op
) == INTEGER_CST
)
3763 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
3768 /* Visit a nary operator RHS, value number it, and return true if the
3769 value number of LHS has changed as a result. */
3772 visit_nary_op (tree lhs
, gassign
*stmt
)
3774 vn_nary_op_t vnresult
;
3775 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
3776 if (! result
&& vnresult
)
3777 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
3779 return set_ssa_val_to (lhs
, result
);
3781 /* Do some special pattern matching for redundancies of operations
3782 in different types. */
3783 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3784 tree type
= TREE_TYPE (lhs
);
3785 tree rhs1
= gimple_assign_rhs1 (stmt
);
3789 /* Match arithmetic done in a different type where we can easily
3790 substitute the result from some earlier sign-changed or widened
3792 if (INTEGRAL_TYPE_P (type
)
3793 && TREE_CODE (rhs1
) == SSA_NAME
3794 /* We only handle sign-changes or zero-extension -> & mask. */
3795 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
3796 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
3797 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
3799 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
3801 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
3802 || gimple_assign_rhs_code (def
) == MINUS_EXPR
3803 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
3806 /* Either we have the op widened available. */
3807 ops
[0] = valueized_wider_op (type
,
3808 gimple_assign_rhs1 (def
));
3810 ops
[1] = valueized_wider_op (type
,
3811 gimple_assign_rhs2 (def
));
3812 if (ops
[0] && ops
[1])
3814 ops
[0] = vn_nary_op_lookup_pieces
3815 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
3816 /* We have wider operation available. */
3819 unsigned lhs_prec
= TYPE_PRECISION (type
);
3820 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
3821 if (lhs_prec
== rhs_prec
)
3823 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3824 NOP_EXPR
, type
, ops
[0]);
3825 result
= vn_nary_build_or_lookup (&match_op
);
3828 bool changed
= set_ssa_val_to (lhs
, result
);
3829 vn_nary_op_insert_stmt (stmt
, result
);
3835 tree mask
= wide_int_to_tree
3836 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
3837 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3841 result
= vn_nary_build_or_lookup (&match_op
);
3844 bool changed
= set_ssa_val_to (lhs
, result
);
3845 vn_nary_op_insert_stmt (stmt
, result
);
3856 bool changed
= set_ssa_val_to (lhs
, lhs
);
3857 vn_nary_op_insert_stmt (stmt
, lhs
);
3861 /* Visit a call STMT storing into LHS. Return true if the value number
3862 of the LHS has changed as a result. */
3865 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3867 bool changed
= false;
3868 struct vn_reference_s vr1
;
3869 vn_reference_t vnresult
= NULL
;
3870 tree vdef
= gimple_vdef (stmt
);
3872 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3873 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3876 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3879 if (vnresult
->result_vdef
&& vdef
)
3880 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3882 /* If the call was discovered to be pure or const reflect
3883 that as far as possible. */
3884 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
3886 if (!vnresult
->result
&& lhs
)
3887 vnresult
->result
= lhs
;
3889 if (vnresult
->result
&& lhs
)
3890 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3895 vn_reference_s
**slot
;
3896 tree vdef_val
= vdef
;
3899 /* If we value numbered an indirect functions function to
3900 one not clobbering memory value number its VDEF to its
3902 tree fn
= gimple_call_fn (stmt
);
3903 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
3906 if (TREE_CODE (fn
) == ADDR_EXPR
3907 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
3908 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
3909 & (ECF_CONST
| ECF_PURE
)))
3910 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
3912 changed
|= set_ssa_val_to (vdef
, vdef_val
);
3915 changed
|= set_ssa_val_to (lhs
, lhs
);
3916 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3917 vr2
->vuse
= vr1
.vuse
;
3918 /* As we are not walking the virtual operand chain we know the
3919 shared_lookup_references are still original so we can re-use
3921 vr2
->operands
= vr1
.operands
.copy ();
3922 vr2
->type
= vr1
.type
;
3924 vr2
->hashcode
= vr1
.hashcode
;
3926 vr2
->result_vdef
= vdef_val
;
3927 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3929 gcc_assert (!*slot
);
3931 vr2
->next
= last_inserted_ref
;
3932 last_inserted_ref
= vr2
;
3938 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3939 and return true if the value number of the LHS has changed as a result. */
3942 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3944 bool changed
= false;
3948 last_vuse
= gimple_vuse (stmt
);
3949 last_vuse_ptr
= &last_vuse
;
3950 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3951 default_vn_walk_kind
, NULL
, true);
3952 last_vuse_ptr
= NULL
;
3954 /* We handle type-punning through unions by value-numbering based
3955 on offset and size of the access. Be prepared to handle a
3956 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3958 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3960 /* We will be setting the value number of lhs to the value number
3961 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3962 So first simplify and lookup this expression to see if it
3963 is already available. */
3964 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
3965 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3966 result
= vn_nary_build_or_lookup (&res_op
);
3967 /* When building the conversion fails avoid inserting the reference
3970 return set_ssa_val_to (lhs
, lhs
);
3974 changed
= set_ssa_val_to (lhs
, result
);
3977 changed
= set_ssa_val_to (lhs
, lhs
);
3978 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
3985 /* Visit a store to a reference operator LHS, part of STMT, value number it,
3986 and return true if the value number of the LHS has changed as a result. */
3989 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
3991 bool changed
= false;
3992 vn_reference_t vnresult
= NULL
;
3994 bool resultsame
= false;
3995 tree vuse
= gimple_vuse (stmt
);
3996 tree vdef
= gimple_vdef (stmt
);
3998 if (TREE_CODE (op
) == SSA_NAME
)
4001 /* First we want to lookup using the *vuses* from the store and see
4002 if there the last store to this location with the same address
4005 The vuses represent the memory state before the store. If the
4006 memory state, address, and value of the store is the same as the
4007 last store to this location, then this store will produce the
4008 same memory state as that store.
4010 In this case the vdef versions for this store are value numbered to those
4011 vuse versions, since they represent the same memory state after
4014 Otherwise, the vdefs for the store are used when inserting into
4015 the table, since the store generates a new memory state. */
4017 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4019 && vnresult
->result
)
4021 tree result
= vnresult
->result
;
4022 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4023 || result
== SSA_VAL (result
));
4024 resultsame
= expressions_equal_p (result
, op
);
4027 /* If the TBAA state isn't compatible for downstream reads
4028 we cannot value-number the VDEFs the same. */
4029 alias_set_type set
= get_alias_set (lhs
);
4030 if (vnresult
->set
!= set
4031 && ! alias_set_subset_of (set
, vnresult
->set
))
4038 /* Only perform the following when being called from PRE
4039 which embeds tail merging. */
4040 if (default_vn_walk_kind
== VN_WALK
)
4042 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4043 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
4046 VN_INFO (vdef
)->visited
= true;
4047 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4053 fprintf (dump_file
, "No store match\n");
4054 fprintf (dump_file
, "Value numbering store ");
4055 print_generic_expr (dump_file
, lhs
);
4056 fprintf (dump_file
, " to ");
4057 print_generic_expr (dump_file
, op
);
4058 fprintf (dump_file
, "\n");
4060 /* Have to set value numbers before insert, since insert is
4061 going to valueize the references in-place. */
4063 changed
|= set_ssa_val_to (vdef
, vdef
);
4065 /* Do not insert structure copies into the tables. */
4066 if (is_gimple_min_invariant (op
)
4067 || is_gimple_reg (op
))
4068 vn_reference_insert (lhs
, op
, vdef
, NULL
);
4070 /* Only perform the following when being called from PRE
4071 which embeds tail merging. */
4072 if (default_vn_walk_kind
== VN_WALK
)
4074 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4075 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
4080 /* We had a match, so value number the vdef to have the value
4081 number of the vuse it came from. */
4083 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4084 fprintf (dump_file
, "Store matched earlier value, "
4085 "value numbering store vdefs to matching vuses.\n");
4087 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
4093 /* Visit and value number PHI, return true if the value number
4094 changed. When BACKEDGES_VARYING_P is true then assume all
4095 backedge values are varying. When INSERTED is not NULL then
4096 this is just a ahead query for a possible iteration, set INSERTED
4097 to true if we'd insert into the hashtable. */
4100 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
4102 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
4103 tree sameval_base
= NULL_TREE
;
4104 poly_int64 soff
, doff
;
4105 unsigned n_executable
= 0;
4106 bool allsame
= true;
4110 /* TODO: We could check for this in initialization, and replace this
4111 with a gcc_assert. */
4112 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
4113 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
4115 /* We track whether a PHI was CSEd to to avoid excessive iterations
4116 that would be necessary only because the PHI changed arguments
4119 gimple_set_plf (phi
, GF_PLF_1
, false);
4121 /* See if all non-TOP arguments have the same value. TOP is
4122 equivalent to everything, so we can ignore it. */
4123 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4124 if (e
->flags
& EDGE_EXECUTABLE
)
4126 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4129 if (TREE_CODE (def
) == SSA_NAME
4130 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4131 def
= SSA_VAL (def
);
4134 /* Ignore undefined defs for sameval but record one. */
4135 else if (TREE_CODE (def
) == SSA_NAME
4136 && ! virtual_operand_p (def
)
4137 && ssa_undefined_value_p (def
, false))
4139 else if (sameval
== VN_TOP
)
4141 else if (!expressions_equal_p (def
, sameval
))
4143 /* We know we're arriving only with invariant addresses here,
4144 try harder comparing them. We can do some caching here
4145 which we cannot do in expressions_equal_p. */
4146 if (TREE_CODE (def
) == ADDR_EXPR
4147 && TREE_CODE (sameval
) == ADDR_EXPR
4148 && sameval_base
!= (void *)-1)
4151 sameval_base
= get_addr_base_and_unit_offset
4152 (TREE_OPERAND (sameval
, 0), &soff
);
4154 sameval_base
= (tree
)(void *)-1;
4155 else if ((get_addr_base_and_unit_offset
4156 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
4157 && known_eq (soff
, doff
))
4166 /* If none of the edges was executable keep the value-number at VN_TOP,
4167 if only a single edge is exectuable use its value. */
4168 if (n_executable
<= 1)
4169 result
= seen_undef
? seen_undef
: sameval
;
4170 /* If we saw only undefined values and VN_TOP use one of the
4171 undefined values. */
4172 else if (sameval
== VN_TOP
)
4173 result
= seen_undef
? seen_undef
: sameval
;
4174 /* First see if it is equivalent to a phi node in this block. We prefer
4175 this as it allows IV elimination - see PRs 66502 and 67167. */
4176 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
4179 && TREE_CODE (result
) == SSA_NAME
4180 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
4182 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
4183 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4185 fprintf (dump_file
, "Marking CSEd to PHI node ");
4186 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
4188 fprintf (dump_file
, "\n");
4192 /* If all values are the same use that, unless we've seen undefined
4193 values as well and the value isn't constant.
4194 CCP/copyprop have the same restriction to not remove uninit warnings. */
4196 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
4200 result
= PHI_RESULT (phi
);
4201 /* Only insert PHIs that are varying, for constant value numbers
4202 we mess up equivalences otherwise as we are only comparing
4203 the immediate controlling predicates. */
4204 vn_phi_insert (phi
, result
, backedges_varying_p
);
4209 return set_ssa_val_to (PHI_RESULT (phi
), result
);
4212 /* Try to simplify RHS using equivalences and constant folding. */
4215 try_to_simplify (gassign
*stmt
)
4217 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4220 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4221 in this case, there is no point in doing extra work. */
4222 if (code
== SSA_NAME
)
4225 /* First try constant folding based on our current lattice. */
4226 mprts_hook
= vn_lookup_simplify_result
;
4227 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
4230 && (TREE_CODE (tem
) == SSA_NAME
4231 || is_gimple_min_invariant (tem
)))
4237 /* Visit and value number STMT, return true if the value number
4241 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
4243 bool changed
= false;
4245 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4247 fprintf (dump_file
, "Value numbering stmt = ");
4248 print_gimple_stmt (dump_file
, stmt
, 0);
4251 if (gimple_code (stmt
) == GIMPLE_PHI
)
4252 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
4253 else if (gimple_has_volatile_ops (stmt
))
4254 changed
= defs_to_varying (stmt
);
4255 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
4257 enum tree_code code
= gimple_assign_rhs_code (ass
);
4258 tree lhs
= gimple_assign_lhs (ass
);
4259 tree rhs1
= gimple_assign_rhs1 (ass
);
4262 /* Shortcut for copies. Simplifying copies is pointless,
4263 since we copy the expression and value they represent. */
4264 if (code
== SSA_NAME
4265 && TREE_CODE (lhs
) == SSA_NAME
)
4267 changed
= visit_copy (lhs
, rhs1
);
4270 simplified
= try_to_simplify (ass
);
4273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4275 fprintf (dump_file
, "RHS ");
4276 print_gimple_expr (dump_file
, ass
, 0);
4277 fprintf (dump_file
, " simplified to ");
4278 print_generic_expr (dump_file
, simplified
);
4279 fprintf (dump_file
, "\n");
4282 /* Setting value numbers to constants will occasionally
4283 screw up phi congruence because constants are not
4284 uniquely associated with a single ssa name that can be
4287 && is_gimple_min_invariant (simplified
)
4288 && TREE_CODE (lhs
) == SSA_NAME
)
4290 changed
= set_ssa_val_to (lhs
, simplified
);
4294 && TREE_CODE (simplified
) == SSA_NAME
4295 && TREE_CODE (lhs
) == SSA_NAME
)
4297 changed
= visit_copy (lhs
, simplified
);
4301 if ((TREE_CODE (lhs
) == SSA_NAME
4302 /* We can substitute SSA_NAMEs that are live over
4303 abnormal edges with their constant value. */
4304 && !(gimple_assign_copy_p (ass
)
4305 && is_gimple_min_invariant (rhs1
))
4307 && is_gimple_min_invariant (simplified
))
4308 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4309 /* Stores or copies from SSA_NAMEs that are live over
4310 abnormal edges are a problem. */
4311 || (code
== SSA_NAME
4312 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
4313 changed
= defs_to_varying (ass
);
4314 else if (REFERENCE_CLASS_P (lhs
)
4316 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
4317 else if (TREE_CODE (lhs
) == SSA_NAME
)
4319 if ((gimple_assign_copy_p (ass
)
4320 && is_gimple_min_invariant (rhs1
))
4322 && is_gimple_min_invariant (simplified
)))
4325 changed
= set_ssa_val_to (lhs
, simplified
);
4327 changed
= set_ssa_val_to (lhs
, rhs1
);
4331 /* Visit the original statement. */
4332 switch (vn_get_stmt_kind (ass
))
4335 changed
= visit_nary_op (lhs
, ass
);
4338 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
4341 changed
= defs_to_varying (ass
);
4347 changed
= defs_to_varying (ass
);
4349 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4351 tree lhs
= gimple_call_lhs (call_stmt
);
4352 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4354 /* Try constant folding based on our current lattice. */
4355 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
4359 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4361 fprintf (dump_file
, "call ");
4362 print_gimple_expr (dump_file
, call_stmt
, 0);
4363 fprintf (dump_file
, " simplified to ");
4364 print_generic_expr (dump_file
, simplified
);
4365 fprintf (dump_file
, "\n");
4368 /* Setting value numbers to constants will occasionally
4369 screw up phi congruence because constants are not
4370 uniquely associated with a single ssa name that can be
4373 && is_gimple_min_invariant (simplified
))
4375 changed
= set_ssa_val_to (lhs
, simplified
);
4376 if (gimple_vdef (call_stmt
))
4377 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4378 SSA_VAL (gimple_vuse (call_stmt
)));
4382 && TREE_CODE (simplified
) == SSA_NAME
)
4384 changed
= visit_copy (lhs
, simplified
);
4385 if (gimple_vdef (call_stmt
))
4386 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4387 SSA_VAL (gimple_vuse (call_stmt
)));
4390 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4392 changed
= defs_to_varying (call_stmt
);
4397 /* Pick up flags from a devirtualization target. */
4398 tree fn
= gimple_call_fn (stmt
);
4399 int extra_fnflags
= 0;
4400 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4403 if (TREE_CODE (fn
) == ADDR_EXPR
4404 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4405 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
4407 if (!gimple_call_internal_p (call_stmt
)
4408 && (/* Calls to the same function with the same vuse
4409 and the same operands do not necessarily return the same
4410 value, unless they're pure or const. */
4411 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
4412 & (ECF_PURE
| ECF_CONST
))
4413 /* If calls have a vdef, subsequent calls won't have
4414 the same incoming vuse. So, if 2 calls with vdef have the
4415 same vuse, we know they're not subsequent.
4416 We can value number 2 calls to the same function with the
4417 same vuse and the same operands which are not subsequent
4418 the same, because there is no code in the program that can
4419 compare the 2 values... */
4420 || (gimple_vdef (call_stmt
)
4421 /* ... unless the call returns a pointer which does
4422 not alias with anything else. In which case the
4423 information that the values are distinct are encoded
4425 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
4426 /* Only perform the following when being called from PRE
4427 which embeds tail merging. */
4428 && default_vn_walk_kind
== VN_WALK
)))
4429 changed
= visit_reference_op_call (lhs
, call_stmt
);
4431 changed
= defs_to_varying (call_stmt
);
4434 changed
= defs_to_varying (stmt
);
4440 /* Allocate a value number table. */
4443 allocate_vn_table (vn_tables_t table
, unsigned size
)
4445 table
->phis
= new vn_phi_table_type (size
);
4446 table
->nary
= new vn_nary_op_table_type (size
);
4447 table
->references
= new vn_reference_table_type (size
);
4450 /* Free a value number table. */
4453 free_vn_table (vn_tables_t table
)
4455 /* Walk over elements and release vectors. */
4456 vn_reference_iterator_type hir
;
4458 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
4459 vr
->operands
.release ();
4464 delete table
->references
;
4465 table
->references
= NULL
;
4468 /* Set *ID according to RESULT. */
4471 set_value_id_for_result (tree result
, unsigned int *id
)
4473 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4474 *id
= VN_INFO (result
)->value_id
;
4475 else if (result
&& is_gimple_min_invariant (result
))
4476 *id
= get_or_alloc_constant_value_id (result
);
4478 *id
= get_next_value_id ();
4481 /* Set the value ids in the valid hash tables. */
4484 set_hashtable_value_ids (void)
4486 vn_nary_op_iterator_type hin
;
4487 vn_phi_iterator_type hip
;
4488 vn_reference_iterator_type hir
;
4493 /* Now set the value ids of the things we had put in the hash
4496 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4497 if (! vno
->predicated_values
)
4498 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
4500 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4501 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4503 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4505 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4508 /* Return the maximum value id we have ever seen. */
4511 get_max_value_id (void)
4513 return next_value_id
;
4516 /* Return the next unique value id. */
4519 get_next_value_id (void)
4521 return next_value_id
++;
4525 /* Compare two expressions E1 and E2 and return true if they are equal. */
4528 expressions_equal_p (tree e1
, tree e2
)
4530 /* The obvious case. */
4534 /* If either one is VN_TOP consider them equal. */
4535 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4538 /* If only one of them is null, they cannot be equal. */
4542 /* Now perform the actual comparison. */
4543 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4544 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4551 /* Return true if the nary operation NARY may trap. This is a copy
4552 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4555 vn_nary_may_trap (vn_nary_op_t nary
)
4558 tree rhs2
= NULL_TREE
;
4559 bool honor_nans
= false;
4560 bool honor_snans
= false;
4561 bool fp_operation
= false;
4562 bool honor_trapv
= false;
4566 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4567 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4568 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4571 fp_operation
= FLOAT_TYPE_P (type
);
4574 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4575 honor_snans
= flag_signaling_nans
!= 0;
4577 else if (INTEGRAL_TYPE_P (type
)
4578 && TYPE_OVERFLOW_TRAPS (type
))
4581 if (nary
->length
>= 2)
4583 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4585 honor_nans
, honor_snans
, rhs2
,
4591 for (i
= 0; i
< nary
->length
; ++i
)
4592 if (tree_could_trap_p (nary
->op
[i
]))
4599 class eliminate_dom_walker
: public dom_walker
4602 eliminate_dom_walker (cdi_direction
, bitmap
);
4603 ~eliminate_dom_walker ();
4605 virtual edge
before_dom_children (basic_block
);
4606 virtual void after_dom_children (basic_block
);
4608 virtual tree
eliminate_avail (basic_block
, tree op
);
4609 virtual void eliminate_push_avail (basic_block
, tree op
);
4610 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
4612 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
4614 unsigned eliminate_cleanup (bool region_p
= false);
4617 unsigned int el_todo
;
4618 unsigned int eliminations
;
4619 unsigned int insertions
;
4621 /* SSA names that had their defs inserted by PRE if do_pre. */
4622 bitmap inserted_exprs
;
4624 /* Blocks with statements that have had their EH properties changed. */
4625 bitmap need_eh_cleanup
;
4627 /* Blocks with statements that have had their AB properties changed. */
4628 bitmap need_ab_cleanup
;
4630 /* Local state for the eliminate domwalk. */
4631 auto_vec
<gimple
*> to_remove
;
4632 auto_vec
<gimple
*> to_fixup
;
4633 auto_vec
<tree
> avail
;
4634 auto_vec
<tree
> avail_stack
;
4637 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
4638 bitmap inserted_exprs_
)
4639 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
4640 el_todo (0), eliminations (0), insertions (0),
4641 inserted_exprs (inserted_exprs_
)
4643 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4644 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4647 eliminate_dom_walker::~eliminate_dom_walker ()
4649 BITMAP_FREE (need_eh_cleanup
);
4650 BITMAP_FREE (need_ab_cleanup
);
4653 /* Return a leader for OP that is available at the current point of the
4654 eliminate domwalk. */
4657 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
4659 tree valnum
= VN_INFO (op
)->valnum
;
4660 if (TREE_CODE (valnum
) == SSA_NAME
)
4662 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
4664 if (avail
.length () > SSA_NAME_VERSION (valnum
))
4665 return avail
[SSA_NAME_VERSION (valnum
)];
4667 else if (is_gimple_min_invariant (valnum
))
4672 /* At the current point of the eliminate domwalk make OP available. */
4675 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
4677 tree valnum
= VN_INFO (op
)->valnum
;
4678 if (TREE_CODE (valnum
) == SSA_NAME
)
4680 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
4681 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
4683 if (avail
[SSA_NAME_VERSION (valnum
)])
4684 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
4685 avail_stack
.safe_push (pushop
);
4686 avail
[SSA_NAME_VERSION (valnum
)] = op
;
4690 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4691 the leader for the expression if insertion was successful. */
4694 eliminate_dom_walker::eliminate_insert (basic_block bb
,
4695 gimple_stmt_iterator
*gsi
, tree val
)
4697 /* We can insert a sequence with a single assignment only. */
4698 gimple_seq stmts
= VN_INFO (val
)->expr
;
4699 if (!gimple_seq_singleton_p (stmts
))
4701 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
4703 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
4704 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
4705 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
4706 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
4707 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
4710 tree op
= gimple_assign_rhs1 (stmt
);
4711 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
4712 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4713 op
= TREE_OPERAND (op
, 0);
4714 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
4720 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4721 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
4722 TREE_TYPE (val
), leader
,
4723 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
4724 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
4725 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
4726 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
4727 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
4729 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
4730 TREE_TYPE (val
), leader
);
4731 if (TREE_CODE (res
) != SSA_NAME
4732 || SSA_NAME_IS_DEFAULT_DEF (res
)
4733 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
4735 gimple_seq_discard (stmts
);
4737 /* During propagation we have to treat SSA info conservatively
4738 and thus we can end up simplifying the inserted expression
4739 at elimination time to sth not defined in stmts. */
4740 /* But then this is a redundancy we failed to detect. Which means
4741 res now has two values. That doesn't play well with how
4742 we track availability here, so give up. */
4743 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4745 if (TREE_CODE (res
) == SSA_NAME
)
4746 res
= eliminate_avail (bb
, res
);
4749 fprintf (dump_file
, "Failed to insert expression for value ");
4750 print_generic_expr (dump_file
, val
);
4751 fprintf (dump_file
, " which is really fully redundant to ");
4752 print_generic_expr (dump_file
, res
);
4753 fprintf (dump_file
, "\n");
4761 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
4762 VN_INFO (res
)->valnum
= val
;
4763 VN_INFO (res
)->visited
= true;
4767 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4769 fprintf (dump_file
, "Inserted ");
4770 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
4777 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
4779 tree sprime
= NULL_TREE
;
4780 gimple
*stmt
= gsi_stmt (*gsi
);
4781 tree lhs
= gimple_get_lhs (stmt
);
4782 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4783 && !gimple_has_volatile_ops (stmt
)
4784 /* See PR43491. Do not replace a global register variable when
4785 it is a the RHS of an assignment. Do replace local register
4786 variables since gcc does not guarantee a local variable will
4787 be allocated in register.
4788 ??? The fix isn't effective here. This should instead
4789 be ensured by not value-numbering them the same but treating
4790 them like volatiles? */
4791 && !(gimple_assign_single_p (stmt
)
4792 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4793 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4794 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4796 sprime
= eliminate_avail (b
, lhs
);
4799 /* If there is no existing usable leader but SCCVN thinks
4800 it has an expression it wants to use as replacement,
4802 tree val
= VN_INFO (lhs
)->valnum
;
4804 && TREE_CODE (val
) == SSA_NAME
4805 && VN_INFO (val
)->needs_insertion
4806 && VN_INFO (val
)->expr
!= NULL
4807 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
4808 eliminate_push_avail (b
, sprime
);
4811 /* If this now constitutes a copy duplicate points-to
4812 and range info appropriately. This is especially
4813 important for inserted code. See tree-ssa-copy.c
4814 for similar code. */
4816 && TREE_CODE (sprime
) == SSA_NAME
)
4818 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4819 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4820 && SSA_NAME_PTR_INFO (lhs
)
4821 && ! SSA_NAME_PTR_INFO (sprime
))
4823 duplicate_ssa_name_ptr_info (sprime
,
4824 SSA_NAME_PTR_INFO (lhs
));
4826 mark_ptr_info_alignment_unknown
4827 (SSA_NAME_PTR_INFO (sprime
));
4829 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
4830 && SSA_NAME_RANGE_INFO (lhs
)
4831 && ! SSA_NAME_RANGE_INFO (sprime
)
4833 duplicate_ssa_name_range_info (sprime
,
4834 SSA_NAME_RANGE_TYPE (lhs
),
4835 SSA_NAME_RANGE_INFO (lhs
));
4838 /* Inhibit the use of an inserted PHI on a loop header when
4839 the address of the memory reference is a simple induction
4840 variable. In other cases the vectorizer won't do anything
4841 anyway (either it's loop invariant or a complicated
4844 && TREE_CODE (sprime
) == SSA_NAME
4846 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
4847 && loop_outer (b
->loop_father
)
4848 && has_zero_uses (sprime
)
4849 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4850 && gimple_assign_load_p (stmt
))
4852 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4853 basic_block def_bb
= gimple_bb (def_stmt
);
4854 if (gimple_code (def_stmt
) == GIMPLE_PHI
4855 && def_bb
->loop_father
->header
== def_bb
)
4857 loop_p loop
= def_bb
->loop_father
;
4861 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4864 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4866 && flow_bb_inside_loop_p (loop
, def_bb
)
4867 && simple_iv (loop
, loop
, op
, &iv
, true))
4875 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4877 fprintf (dump_file
, "Not replacing ");
4878 print_gimple_expr (dump_file
, stmt
, 0);
4879 fprintf (dump_file
, " with ");
4880 print_generic_expr (dump_file
, sprime
);
4881 fprintf (dump_file
, " which would add a loop"
4882 " carried dependence to loop %d\n",
4885 /* Don't keep sprime available. */
4893 /* If we can propagate the value computed for LHS into
4894 all uses don't bother doing anything with this stmt. */
4895 if (may_propagate_copy (lhs
, sprime
))
4897 /* Mark it for removal. */
4898 to_remove
.safe_push (stmt
);
4900 /* ??? Don't count copy/constant propagations. */
4901 if (gimple_assign_single_p (stmt
)
4902 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4903 || gimple_assign_rhs1 (stmt
) == sprime
))
4906 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4908 fprintf (dump_file
, "Replaced ");
4909 print_gimple_expr (dump_file
, stmt
, 0);
4910 fprintf (dump_file
, " with ");
4911 print_generic_expr (dump_file
, sprime
);
4912 fprintf (dump_file
, " in all uses of ");
4913 print_gimple_stmt (dump_file
, stmt
, 0);
4920 /* If this is an assignment from our leader (which
4921 happens in the case the value-number is a constant)
4922 then there is nothing to do. */
4923 if (gimple_assign_single_p (stmt
)
4924 && sprime
== gimple_assign_rhs1 (stmt
))
4927 /* Else replace its RHS. */
4928 bool can_make_abnormal_goto
4929 = is_gimple_call (stmt
)
4930 && stmt_can_make_abnormal_goto (stmt
);
4932 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4934 fprintf (dump_file
, "Replaced ");
4935 print_gimple_expr (dump_file
, stmt
, 0);
4936 fprintf (dump_file
, " with ");
4937 print_generic_expr (dump_file
, sprime
);
4938 fprintf (dump_file
, " in ");
4939 print_gimple_stmt (dump_file
, stmt
, 0);
4943 gimple
*orig_stmt
= stmt
;
4944 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4945 TREE_TYPE (sprime
)))
4946 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4947 tree vdef
= gimple_vdef (stmt
);
4948 tree vuse
= gimple_vuse (stmt
);
4949 propagate_tree_value_into_stmt (gsi
, sprime
);
4950 stmt
= gsi_stmt (*gsi
);
4952 /* In case the VDEF on the original stmt was released, value-number
4953 it to the VUSE. This is to make vuse_ssa_val able to skip
4954 released virtual operands. */
4955 if (vdef
!= gimple_vdef (stmt
))
4957 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
4958 VN_INFO (vdef
)->valnum
= vuse
;
4961 /* If we removed EH side-effects from the statement, clean
4962 its EH information. */
4963 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
4965 bitmap_set_bit (need_eh_cleanup
,
4966 gimple_bb (stmt
)->index
);
4967 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4968 fprintf (dump_file
, " Removed EH side-effects.\n");
4971 /* Likewise for AB side-effects. */
4972 if (can_make_abnormal_goto
4973 && !stmt_can_make_abnormal_goto (stmt
))
4975 bitmap_set_bit (need_ab_cleanup
,
4976 gimple_bb (stmt
)->index
);
4977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4978 fprintf (dump_file
, " Removed AB side-effects.\n");
4985 /* If the statement is a scalar store, see if the expression
4986 has the same value number as its rhs. If so, the store is
4988 if (gimple_assign_single_p (stmt
)
4989 && !gimple_has_volatile_ops (stmt
)
4990 && !is_gimple_reg (gimple_assign_lhs (stmt
))
4991 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4992 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
4995 tree rhs
= gimple_assign_rhs1 (stmt
);
4996 vn_reference_t vnresult
;
4997 val
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_WALKREWRITE
,
4999 if (TREE_CODE (rhs
) == SSA_NAME
)
5000 rhs
= VN_INFO (rhs
)->valnum
;
5002 && operand_equal_p (val
, rhs
, 0))
5004 /* We can only remove the later store if the former aliases
5005 at least all accesses the later one does or if the store
5006 was to readonly memory storing the same value. */
5007 alias_set_type set
= get_alias_set (lhs
);
5009 || vnresult
->set
== set
5010 || alias_set_subset_of (set
, vnresult
->set
))
5012 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5014 fprintf (dump_file
, "Deleted redundant store ");
5015 print_gimple_stmt (dump_file
, stmt
, 0);
5018 /* Queue stmt for removal. */
5019 to_remove
.safe_push (stmt
);
5025 /* If this is a control statement value numbering left edges
5026 unexecuted on force the condition in a way consistent with
5028 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5030 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5031 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5033 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5035 fprintf (dump_file
, "Removing unexecutable edge from ");
5036 print_gimple_stmt (dump_file
, stmt
, 0);
5038 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5039 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5040 gimple_cond_make_true (cond
);
5042 gimple_cond_make_false (cond
);
5044 el_todo
|= TODO_cleanup_cfg
;
5049 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5050 bool was_noreturn
= (is_gimple_call (stmt
)
5051 && gimple_call_noreturn_p (stmt
));
5052 tree vdef
= gimple_vdef (stmt
);
5053 tree vuse
= gimple_vuse (stmt
);
5055 /* If we didn't replace the whole stmt (or propagate the result
5056 into all uses), replace all uses on this stmt with their
5058 bool modified
= false;
5059 use_operand_p use_p
;
5061 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5063 tree use
= USE_FROM_PTR (use_p
);
5064 /* ??? The call code above leaves stmt operands un-updated. */
5065 if (TREE_CODE (use
) != SSA_NAME
)
5068 if (SSA_NAME_IS_DEFAULT_DEF (use
))
5069 /* ??? For default defs BB shouldn't matter, but we have to
5070 solve the inconsistency between rpo eliminate and
5071 dom eliminate avail valueization first. */
5072 sprime
= eliminate_avail (b
, use
);
5074 /* Look for sth available at the definition block of the argument.
5075 This avoids inconsistencies between availability there which
5076 decides if the stmt can be removed and availability at the
5077 use site. The SSA property ensures that things available
5078 at the definition are also available at uses. */
5079 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
5080 if (sprime
&& sprime
!= use
5081 && may_propagate_copy (use
, sprime
)
5082 /* We substitute into debug stmts to avoid excessive
5083 debug temporaries created by removed stmts, but we need
5084 to avoid doing so for inserted sprimes as we never want
5085 to create debug temporaries for them. */
5087 || TREE_CODE (sprime
) != SSA_NAME
5088 || !is_gimple_debug (stmt
)
5089 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
5091 propagate_value (use_p
, sprime
);
5096 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5097 into which is a requirement for the IPA devirt machinery. */
5098 gimple
*old_stmt
= stmt
;
5101 /* If a formerly non-invariant ADDR_EXPR is turned into an
5102 invariant one it was on a separate stmt. */
5103 if (gimple_assign_single_p (stmt
)
5104 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
5105 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
5106 gimple_stmt_iterator prev
= *gsi
;
5108 if (fold_stmt (gsi
))
5110 /* fold_stmt may have created new stmts inbetween
5111 the previous stmt and the folded stmt. Mark
5112 all defs created there as varying to not confuse
5113 the SCCVN machinery as we're using that even during
5115 if (gsi_end_p (prev
))
5116 prev
= gsi_start_bb (b
);
5119 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
5124 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
5125 dit
, SSA_OP_ALL_DEFS
)
5126 /* As existing DEFs may move between stmts
5127 only process new ones. */
5128 if (! has_VN_INFO (def
))
5130 VN_INFO (def
)->valnum
= def
;
5131 VN_INFO (def
)->visited
= true;
5133 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
5139 stmt
= gsi_stmt (*gsi
);
5140 /* In case we folded the stmt away schedule the NOP for removal. */
5141 if (gimple_nop_p (stmt
))
5142 to_remove
.safe_push (stmt
);
5145 /* Visit indirect calls and turn them into direct calls if
5146 possible using the devirtualization machinery. Do this before
5147 checking for required EH/abnormal/noreturn cleanup as devird
5148 may expose more of those. */
5149 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5151 tree fn
= gimple_call_fn (call_stmt
);
5153 && flag_devirtualize
5154 && virtual_method_call_p (fn
))
5156 tree otr_type
= obj_type_ref_class (fn
);
5157 unsigned HOST_WIDE_INT otr_tok
5158 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
5160 ipa_polymorphic_call_context
context (current_function_decl
,
5161 fn
, stmt
, &instance
);
5162 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
5165 vec
<cgraph_node
*> targets
5166 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
5167 otr_tok
, context
, &final
);
5169 dump_possible_polymorphic_call_targets (dump_file
,
5170 obj_type_ref_class (fn
),
5172 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5175 if (targets
.length () == 1)
5176 fn
= targets
[0]->decl
;
5178 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5179 if (dump_enabled_p ())
5181 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5182 "converting indirect call to "
5184 lang_hooks
.decl_printable_name (fn
, 2));
5186 gimple_call_set_fndecl (call_stmt
, fn
);
5187 /* If changing the call to __builtin_unreachable
5188 or similar noreturn function, adjust gimple_call_fntype
5190 if (gimple_call_noreturn_p (call_stmt
)
5191 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
5192 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
5193 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
5195 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
5196 maybe_remove_unused_call_args (cfun
, call_stmt
);
5204 /* When changing a call into a noreturn call, cfg cleanup
5205 is needed to fix up the noreturn call. */
5207 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
5208 to_fixup
.safe_push (stmt
);
5209 /* When changing a condition or switch into one we know what
5210 edge will be executed, schedule a cfg cleanup. */
5211 if ((gimple_code (stmt
) == GIMPLE_COND
5212 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
5213 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
5214 || (gimple_code (stmt
) == GIMPLE_SWITCH
5215 && TREE_CODE (gimple_switch_index
5216 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
5217 el_todo
|= TODO_cleanup_cfg
;
5218 /* If we removed EH side-effects from the statement, clean
5219 its EH information. */
5220 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
5222 bitmap_set_bit (need_eh_cleanup
,
5223 gimple_bb (stmt
)->index
);
5224 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5225 fprintf (dump_file
, " Removed EH side-effects.\n");
5227 /* Likewise for AB side-effects. */
5228 if (can_make_abnormal_goto
5229 && !stmt_can_make_abnormal_goto (stmt
))
5231 bitmap_set_bit (need_ab_cleanup
,
5232 gimple_bb (stmt
)->index
);
5233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5234 fprintf (dump_file
, " Removed AB side-effects.\n");
5237 /* In case the VDEF on the original stmt was released, value-number
5238 it to the VUSE. This is to make vuse_ssa_val able to skip
5239 released virtual operands. */
5240 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
5241 VN_INFO (vdef
)->valnum
= vuse
;
5244 /* Make new values available - for fully redundant LHS we
5245 continue with the next stmt above and skip this. */
5247 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
5248 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
5251 /* Perform elimination for the basic-block B during the domwalk. */
5254 eliminate_dom_walker::before_dom_children (basic_block b
)
5257 avail_stack
.safe_push (NULL_TREE
);
5259 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5260 if (!(b
->flags
& BB_EXECUTABLE
))
5265 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
5267 gphi
*phi
= gsi
.phi ();
5268 tree res
= PHI_RESULT (phi
);
5270 if (virtual_operand_p (res
))
5276 tree sprime
= eliminate_avail (b
, res
);
5280 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5282 fprintf (dump_file
, "Replaced redundant PHI node defining ");
5283 print_generic_expr (dump_file
, res
);
5284 fprintf (dump_file
, " with ");
5285 print_generic_expr (dump_file
, sprime
);
5286 fprintf (dump_file
, "\n");
5289 /* If we inserted this PHI node ourself, it's not an elimination. */
5290 if (! inserted_exprs
5291 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
5294 /* If we will propagate into all uses don't bother to do
5296 if (may_propagate_copy (res
, sprime
))
5298 /* Mark the PHI for removal. */
5299 to_remove
.safe_push (phi
);
5304 remove_phi_node (&gsi
, false);
5306 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
5307 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
5308 gimple
*stmt
= gimple_build_assign (res
, sprime
);
5309 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
5310 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
5314 eliminate_push_avail (b
, res
);
5318 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
5321 eliminate_stmt (b
, &gsi
);
5323 /* Replace destination PHI arguments. */
5326 FOR_EACH_EDGE (e
, ei
, b
->succs
)
5327 if (e
->flags
& EDGE_EXECUTABLE
)
5328 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
5332 gphi
*phi
= gsi
.phi ();
5333 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
5334 tree arg
= USE_FROM_PTR (use_p
);
5335 if (TREE_CODE (arg
) != SSA_NAME
5336 || virtual_operand_p (arg
))
5338 tree sprime
= eliminate_avail (b
, arg
);
5339 if (sprime
&& may_propagate_copy (arg
, sprime
))
5340 propagate_value (use_p
, sprime
);
5343 vn_context_bb
= NULL
;
5348 /* Make no longer available leaders no longer available. */
5351 eliminate_dom_walker::after_dom_children (basic_block
)
5354 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
5356 tree valnum
= VN_INFO (entry
)->valnum
;
5357 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
5359 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
5361 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
5365 /* Remove queued stmts and perform delayed cleanups. */
5368 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
5370 statistics_counter_event (cfun
, "Eliminated", eliminations
);
5371 statistics_counter_event (cfun
, "Insertions", insertions
);
5373 /* We cannot remove stmts during BB walk, especially not release SSA
5374 names there as this confuses the VN machinery. The stmts ending
5375 up in to_remove are either stores or simple copies.
5376 Remove stmts in reverse order to make debug stmt creation possible. */
5377 while (!to_remove
.is_empty ())
5379 bool do_release_defs
= true;
5380 gimple
*stmt
= to_remove
.pop ();
5382 /* When we are value-numbering a region we do not require exit PHIs to
5383 be present so we have to make sure to deal with uses outside of the
5384 region of stmts that we thought are eliminated.
5385 ??? Note we may be confused by uses in dead regions we didn't run
5386 elimination on. Rather than checking individual uses we accept
5387 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5388 contains such example). */
5391 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
5393 tree lhs
= gimple_phi_result (phi
);
5394 if (!has_zero_uses (lhs
))
5396 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5397 fprintf (dump_file
, "Keeping eliminated stmt live "
5398 "as copy because of out-of-region uses\n");
5399 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5400 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5401 gimple_stmt_iterator gsi
5402 = gsi_after_labels (gimple_bb (stmt
));
5403 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5404 do_release_defs
= false;
5407 else if (tree lhs
= gimple_get_lhs (stmt
))
5408 if (TREE_CODE (lhs
) == SSA_NAME
5409 && !has_zero_uses (lhs
))
5411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5412 fprintf (dump_file
, "Keeping eliminated stmt live "
5413 "as copy because of out-of-region uses\n");
5414 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5415 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5416 if (is_gimple_assign (stmt
))
5418 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
5419 update_stmt (gsi_stmt (gsi
));
5424 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5425 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5426 do_release_defs
= false;
5431 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5433 fprintf (dump_file
, "Removing dead stmt ");
5434 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
5437 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5438 if (gimple_code (stmt
) == GIMPLE_PHI
)
5439 remove_phi_node (&gsi
, do_release_defs
);
5442 basic_block bb
= gimple_bb (stmt
);
5443 unlink_stmt_vdef (stmt
);
5444 if (gsi_remove (&gsi
, true))
5445 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
5446 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
5447 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
5448 if (do_release_defs
)
5449 release_defs (stmt
);
5452 /* Removing a stmt may expose a forwarder block. */
5453 el_todo
|= TODO_cleanup_cfg
;
5456 /* Fixup stmts that became noreturn calls. This may require splitting
5457 blocks and thus isn't possible during the dominator walk. Do this
5458 in reverse order so we don't inadvertedly remove a stmt we want to
5459 fixup by visiting a dominating now noreturn call first. */
5460 while (!to_fixup
.is_empty ())
5462 gimple
*stmt
= to_fixup
.pop ();
5464 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5466 fprintf (dump_file
, "Fixing up noreturn call ");
5467 print_gimple_stmt (dump_file
, stmt
, 0);
5470 if (fixup_noreturn_call (stmt
))
5471 el_todo
|= TODO_cleanup_cfg
;
5474 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
5475 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
5478 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
5481 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
5483 if (do_eh_cleanup
|| do_ab_cleanup
)
5484 el_todo
|= TODO_cleanup_cfg
;
5489 /* Eliminate fully redundant computations. */
5492 eliminate_with_rpo_vn (bitmap inserted_exprs
)
5494 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
5496 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
5497 return walker
.eliminate_cleanup ();
5501 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
5502 bool iterate
, bool eliminate
);
5505 run_rpo_vn (vn_lookup_kind kind
)
5507 default_vn_walk_kind
= kind
;
5508 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
5510 /* ??? Prune requirement of these. */
5511 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
5512 constant_value_ids
= BITMAP_ALLOC (NULL
);
5514 /* Initialize the value ids and prune out remaining VN_TOPs
5518 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5520 vn_ssa_aux_t info
= VN_INFO (name
);
5522 || info
->valnum
== VN_TOP
)
5523 info
->valnum
= name
;
5524 if (info
->valnum
== name
)
5525 info
->value_id
= get_next_value_id ();
5526 else if (is_gimple_min_invariant (info
->valnum
))
5527 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
5531 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5533 vn_ssa_aux_t info
= VN_INFO (name
);
5534 if (TREE_CODE (info
->valnum
) == SSA_NAME
5535 && info
->valnum
!= name
5536 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
5537 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
5540 set_hashtable_value_ids ();
5542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5544 fprintf (dump_file
, "Value numbers:\n");
5545 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5547 if (VN_INFO (name
)->visited
5548 && SSA_VAL (name
) != name
)
5550 print_generic_expr (dump_file
, name
);
5551 fprintf (dump_file
, " = ");
5552 print_generic_expr (dump_file
, SSA_VAL (name
));
5553 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
5559 /* Free VN associated data structures. */
5564 free_vn_table (valid_info
);
5565 XDELETE (valid_info
);
5566 obstack_free (&vn_tables_obstack
, NULL
);
5567 obstack_free (&vn_tables_insert_obstack
, NULL
);
5569 vn_ssa_aux_iterator_type it
;
5571 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
5572 if (info
->needs_insertion
)
5573 release_ssa_name (info
->name
);
5574 obstack_free (&vn_ssa_aux_obstack
, NULL
);
5575 delete vn_ssa_aux_hash
;
5577 delete constant_to_value_id
;
5578 constant_to_value_id
= NULL
;
5579 BITMAP_FREE (constant_value_ids
);
5582 /* Adaptor to the elimination engine using RPO availability. */
5584 class rpo_elim
: public eliminate_dom_walker
5587 rpo_elim(basic_block entry_
)
5588 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
) {}
5591 virtual tree
eliminate_avail (basic_block
, tree op
);
5593 virtual void eliminate_push_avail (basic_block
, tree
);
5596 /* Instead of having a local availability lattice for each
5597 basic-block and availability at X defined as union of
5598 the local availabilities at X and its dominators we're
5599 turning this upside down and track availability per
5600 value given values are usually made available at very
5601 few points (at least one).
5602 So we have a value -> vec<location, leader> map where
5603 LOCATION is specifying the basic-block LEADER is made
5604 available for VALUE. We push to this vector in RPO
5605 order thus for iteration we can simply pop the last
5607 LOCATION is the basic-block index and LEADER is its
5608 SSA name version. */
5609 /* ??? We'd like to use auto_vec here with embedded storage
5610 but that doesn't play well until we can provide move
5611 constructors and use std::move on hash-table expansion.
5612 So for now this is a bit more expensive than necessary.
5613 We eventually want to switch to a chaining scheme like
5614 for hashtable entries for unwinding which would make
5615 making the vector part of the vn_ssa_aux structure possible. */
5616 typedef hash_map
<tree
, vec
<std::pair
<int, int> > > rpo_avail_t
;
5617 rpo_avail_t m_rpo_avail
;
5620 /* Global RPO state for access from hooks. */
5621 static rpo_elim
*rpo_avail
;
5623 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5626 vn_lookup_simplify_result (gimple_match_op
*res_op
)
5628 if (!res_op
->code
.is_tree_code ())
5630 tree
*ops
= res_op
->ops
;
5631 unsigned int length
= res_op
->num_ops
;
5632 if (res_op
->code
== CONSTRUCTOR
5633 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5634 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5635 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
5637 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
5638 ops
= XALLOCAVEC (tree
, length
);
5639 for (unsigned i
= 0; i
< length
; ++i
)
5640 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
5642 vn_nary_op_t vnresult
= NULL
;
5643 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
5644 res_op
->type
, ops
, &vnresult
);
5645 /* If this is used from expression simplification make sure to
5646 return an available expression. */
5647 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
5648 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
5652 rpo_elim::~rpo_elim ()
5654 /* Release the avail vectors. */
5655 for (rpo_avail_t::iterator i
= m_rpo_avail
.begin ();
5656 i
!= m_rpo_avail
.end (); ++i
)
5657 (*i
).second
.release ();
5660 /* Return a leader for OPs value that is valid at BB. */
5663 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
5665 tree valnum
= SSA_VAL (op
);
5666 if (TREE_CODE (valnum
) == SSA_NAME
)
5668 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5670 vec
<std::pair
<int, int> > *av
= m_rpo_avail
.get (valnum
);
5671 if (!av
|| av
->is_empty ())
5673 int i
= av
->length () - 1;
5674 if ((*av
)[i
].first
== bb
->index
)
5675 /* On tramp3d 90% of the cases are here. */
5676 return ssa_name ((*av
)[i
].second
);
5679 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, (*av
)[i
].first
);
5680 /* ??? During elimination we have to use availability at the
5681 definition site of a use we try to replace. This
5682 is required to not run into inconsistencies because
5683 of dominated_by_p_w_unex behavior and removing a definition
5684 while not replacing all uses.
5685 ??? We could try to consistently walk dominators
5686 ignoring non-executable regions. The nearest common
5687 dominator of bb and abb is where we can stop walking. We
5688 may also be able to "pre-compute" (bits of) the next immediate
5689 (non-)dominator during the RPO walk when marking edges as
5691 if (dominated_by_p_w_unex (bb
, abb
))
5693 tree leader
= ssa_name ((*av
)[i
].second
);
5694 /* Prevent eliminations that break loop-closed SSA. */
5695 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
5696 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
5697 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5698 (leader
))->loop_father
,
5701 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5703 print_generic_expr (dump_file
, leader
);
5704 fprintf (dump_file
, " is available for ");
5705 print_generic_expr (dump_file
, valnum
);
5706 fprintf (dump_file
, "\n");
5708 /* On tramp3d 99% of the _remaining_ cases succeed at
5712 /* ??? Can we somehow skip to the immediate dominator
5713 RPO index (bb_to_rpo)? Again, maybe not worth, on
5714 tramp3d the worst number of elements in the vector is 9. */
5718 else if (valnum
!= VN_TOP
)
5719 /* valnum is is_gimple_min_invariant. */
5724 /* Make LEADER a leader for its value at BB. */
5727 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
5729 tree valnum
= VN_INFO (leader
)->valnum
;
5730 if (valnum
== VN_TOP
)
5732 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5734 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
5735 print_generic_expr (dump_file
, leader
);
5736 fprintf (dump_file
, " for value ");
5737 print_generic_expr (dump_file
, valnum
);
5738 fprintf (dump_file
, "\n");
5741 vec
<std::pair
<int, int> > &av
= m_rpo_avail
.get_or_insert (valnum
, &existed
);
5744 new (&av
) vec
<std::pair
<int, int> >;
5745 av
.reserve_exact (2);
5747 av
.safe_push (std::make_pair (bb
->index
, SSA_NAME_VERSION (leader
)));
5750 /* Valueization hook for RPO VN plus required state. */
5753 rpo_vn_valueize (tree name
)
5755 if (TREE_CODE (name
) == SSA_NAME
)
5757 vn_ssa_aux_t val
= VN_INFO (name
);
5760 tree tem
= val
->valnum
;
5761 if (tem
!= VN_TOP
&& tem
!= name
)
5763 if (TREE_CODE (tem
) != SSA_NAME
)
5765 /* For all values we only valueize to an available leader
5766 which means we can use SSA name info without restriction. */
5767 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
5776 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5777 inverted condition. */
5780 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
5785 /* a < b -> a {!,<}= b */
5786 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5787 ops
, boolean_true_node
, 0, pred_e
);
5788 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
5789 ops
, boolean_true_node
, 0, pred_e
);
5790 /* a < b -> ! a {>,=} b */
5791 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5792 ops
, boolean_false_node
, 0, pred_e
);
5793 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5794 ops
, boolean_false_node
, 0, pred_e
);
5797 /* a > b -> a {!,>}= b */
5798 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5799 ops
, boolean_true_node
, 0, pred_e
);
5800 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
5801 ops
, boolean_true_node
, 0, pred_e
);
5802 /* a > b -> ! a {<,=} b */
5803 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5804 ops
, boolean_false_node
, 0, pred_e
);
5805 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5806 ops
, boolean_false_node
, 0, pred_e
);
5809 /* a == b -> ! a {<,>} b */
5810 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5811 ops
, boolean_false_node
, 0, pred_e
);
5812 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5813 ops
, boolean_false_node
, 0, pred_e
);
5818 /* Nothing besides inverted condition. */
5824 /* Main stmt worker for RPO VN, process BB. */
5827 process_bb (rpo_elim
&avail
, basic_block bb
,
5828 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
5829 bool do_region
, bitmap exit_bbs
)
5837 /* If we are in loop-closed SSA preserve this state. This is
5838 relevant when called on regions from outside of FRE/PRE. */
5839 bool lc_phi_nodes
= false;
5840 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
5841 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5842 if (e
->src
->loop_father
!= e
->dest
->loop_father
5843 && flow_loop_nested_p (e
->dest
->loop_father
,
5844 e
->src
->loop_father
))
5846 lc_phi_nodes
= true;
5850 /* Value-number all defs in the basic-block. */
5851 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
5854 gphi
*phi
= gsi
.phi ();
5855 tree res
= PHI_RESULT (phi
);
5856 vn_ssa_aux_t res_info
= VN_INFO (res
);
5859 gcc_assert (!res_info
->visited
);
5860 res_info
->valnum
= VN_TOP
;
5861 res_info
->visited
= true;
5864 /* When not iterating force backedge values to varying. */
5865 visit_stmt (phi
, !iterate_phis
);
5866 if (virtual_operand_p (res
))
5870 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5871 how we handle backedges and availability.
5872 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5873 tree val
= res_info
->valnum
;
5874 if (res
!= val
&& !iterate
&& eliminate
)
5876 if (tree leader
= avail
.eliminate_avail (bb
, res
))
5879 /* Preserve loop-closed SSA form. */
5881 || is_gimple_min_invariant (leader
)))
5883 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5885 fprintf (dump_file
, "Replaced redundant PHI node "
5887 print_generic_expr (dump_file
, res
);
5888 fprintf (dump_file
, " with ");
5889 print_generic_expr (dump_file
, leader
);
5890 fprintf (dump_file
, "\n");
5892 avail
.eliminations
++;
5894 if (may_propagate_copy (res
, leader
))
5896 /* Schedule for removal. */
5897 avail
.to_remove
.safe_push (phi
);
5900 /* ??? Else generate a copy stmt. */
5904 /* Only make defs available that not already are. But make
5905 sure loop-closed SSA PHI node defs are picked up for
5909 || ! avail
.eliminate_avail (bb
, res
))
5910 avail
.eliminate_push_avail (bb
, res
);
5913 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5914 we do this when processing the last stmt as we have to do this
5915 before elimination which otherwise forces GIMPLE_CONDs to
5916 if (1 != 0) style when seeing non-executable edges. */
5917 if (gsi_end_p (gsi_start_bb (bb
)))
5919 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5921 if (e
->flags
& EDGE_EXECUTABLE
)
5923 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5925 "marking outgoing edge %d -> %d executable\n",
5926 e
->src
->index
, e
->dest
->index
);
5927 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
5928 e
->flags
|= EDGE_EXECUTABLE
;
5929 e
->dest
->flags
|= BB_EXECUTABLE
;
5932 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5933 !gsi_end_p (gsi
); gsi_next (&gsi
))
5939 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
5941 vn_ssa_aux_t op_info
= VN_INFO (op
);
5942 gcc_assert (!op_info
->visited
);
5943 op_info
->valnum
= VN_TOP
;
5944 op_info
->visited
= true;
5947 /* We somehow have to deal with uses that are not defined
5948 in the processed region. Forcing unvisited uses to
5949 varying here doesn't play well with def-use following during
5950 expression simplification, so we deal with this by checking
5951 the visited flag in SSA_VAL. */
5954 visit_stmt (gsi_stmt (gsi
));
5956 gimple
*last
= gsi_stmt (gsi
);
5958 switch (gimple_code (last
))
5961 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
5962 (as_a
<gswitch
*> (last
))));
5966 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
5967 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
5968 tree val
= gimple_simplify (gimple_cond_code (last
),
5969 boolean_type_node
, lhs
, rhs
,
5971 /* If the condition didn't simplfy see if we have recorded
5972 an expression from sofar taken edges. */
5973 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
5975 vn_nary_op_t vnresult
;
5979 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
5980 boolean_type_node
, ops
,
5982 /* Did we get a predicated value? */
5983 if (! val
&& vnresult
&& vnresult
->predicated_values
)
5985 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
5986 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
5988 fprintf (dump_file
, "Got predicated value ");
5989 print_generic_expr (dump_file
, val
, TDF_NONE
);
5990 fprintf (dump_file
, " for ");
5991 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
5996 e
= find_taken_edge (bb
, val
);
5999 /* If we didn't manage to compute the taken edge then
6000 push predicated expressions for the condition itself
6001 and related conditions to the hashtables. This allows
6002 simplification of redundant conditions which is
6003 important as early cleanup. */
6004 edge true_e
, false_e
;
6005 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
6006 enum tree_code code
= gimple_cond_code (last
);
6007 enum tree_code icode
6008 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
6013 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
6016 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
6019 vn_nary_op_insert_pieces_predicated
6020 (2, code
, boolean_type_node
, ops
,
6021 boolean_true_node
, 0, true_e
);
6023 vn_nary_op_insert_pieces_predicated
6024 (2, code
, boolean_type_node
, ops
,
6025 boolean_false_node
, 0, false_e
);
6026 if (icode
!= ERROR_MARK
)
6029 vn_nary_op_insert_pieces_predicated
6030 (2, icode
, boolean_type_node
, ops
,
6031 boolean_false_node
, 0, true_e
);
6033 vn_nary_op_insert_pieces_predicated
6034 (2, icode
, boolean_type_node
, ops
,
6035 boolean_true_node
, 0, false_e
);
6037 /* Relax for non-integers, inverted condition handled
6039 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6042 insert_related_predicates_on_edge (code
, ops
, true_e
);
6044 insert_related_predicates_on_edge (icode
, ops
, false_e
);
6050 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
6057 todo
= TODO_cleanup_cfg
;
6058 if (!(e
->flags
& EDGE_EXECUTABLE
))
6060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6062 "marking known outgoing %sedge %d -> %d executable\n",
6063 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
6064 e
->src
->index
, e
->dest
->index
);
6065 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
6066 e
->flags
|= EDGE_EXECUTABLE
;
6067 e
->dest
->flags
|= BB_EXECUTABLE
;
6070 else if (gsi_one_before_end_p (gsi
))
6072 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6074 if (e
->flags
& EDGE_EXECUTABLE
)
6076 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6078 "marking outgoing edge %d -> %d executable\n",
6079 e
->src
->index
, e
->dest
->index
);
6080 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
6081 e
->flags
|= EDGE_EXECUTABLE
;
6082 e
->dest
->flags
|= BB_EXECUTABLE
;
6086 /* Eliminate. That also pushes to avail. */
6087 if (eliminate
&& ! iterate
)
6088 avail
.eliminate_stmt (bb
, &gsi
);
6090 /* If not eliminating, make all not already available defs
6092 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
6093 if (! avail
.eliminate_avail (bb
, op
))
6094 avail
.eliminate_push_avail (bb
, op
);
6097 /* Eliminate in destination PHI arguments. Always substitute in dest
6098 PHIs, even for non-executable edges. This handles region
6100 if (!iterate
&& eliminate
)
6101 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6102 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6103 !gsi_end_p (gsi
); gsi_next (&gsi
))
6105 gphi
*phi
= gsi
.phi ();
6106 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6107 tree arg
= USE_FROM_PTR (use_p
);
6108 if (TREE_CODE (arg
) != SSA_NAME
6109 || virtual_operand_p (arg
))
6112 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
6114 sprime
= SSA_VAL (arg
);
6115 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
6116 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
6119 /* Look for sth available at the definition block of the argument.
6120 This avoids inconsistencies between availability there which
6121 decides if the stmt can be removed and availability at the
6122 use site. The SSA property ensures that things available
6123 at the definition are also available at uses. */
6124 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
6128 && may_propagate_copy (arg
, sprime
))
6129 propagate_value (use_p
, sprime
);
6132 vn_context_bb
= NULL
;
6136 /* Unwind state per basic-block. */
6140 /* Times this block has been visited. */
6142 /* Whether to handle this as iteration point or whether to treat
6143 incoming backedge PHI values as varying. */
6146 vn_reference_t ref_top
;
6148 vn_nary_op_t nary_top
;
6151 /* Unwind the RPO VN state for iteration. */
6154 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
6156 gcc_assert (to
->iterate
);
6157 for (; last_inserted_nary
!= to
->nary_top
;
6158 last_inserted_nary
= last_inserted_nary
->next
)
6161 slot
= valid_info
->nary
->find_slot_with_hash
6162 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
6163 /* Predication causes the need to restore previous state. */
6164 if ((*slot
)->unwind_to
)
6165 *slot
= (*slot
)->unwind_to
;
6167 valid_info
->nary
->clear_slot (slot
);
6169 for (; last_inserted_phi
!= to
->phi_top
;
6170 last_inserted_phi
= last_inserted_phi
->next
)
6173 slot
= valid_info
->phis
->find_slot_with_hash
6174 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
6175 valid_info
->phis
->clear_slot (slot
);
6177 for (; last_inserted_ref
!= to
->ref_top
;
6178 last_inserted_ref
= last_inserted_ref
->next
)
6180 vn_reference_t
*slot
;
6181 slot
= valid_info
->references
->find_slot_with_hash
6182 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
6183 (*slot
)->operands
.release ();
6184 valid_info
->references
->clear_slot (slot
);
6186 obstack_free (&vn_tables_obstack
, to
->ob_top
);
6188 /* Prune [rpo_idx, ] from avail. */
6189 /* ??? This is O(number-of-values-in-region) which is
6190 O(region-size) rather than O(iteration-piece). */
6191 for (rpo_elim::rpo_avail_t::iterator i
6192 = avail
.m_rpo_avail
.begin ();
6193 i
!= avail
.m_rpo_avail
.end (); ++i
)
6195 while (! (*i
).second
.is_empty ())
6197 if (bb_to_rpo
[(*i
).second
.last ().first
] < rpo_idx
)
6204 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6205 If ITERATE is true then treat backedges optimistically as not
6206 executed and iterate. If ELIMINATE is true then perform
6207 elimination, otherwise leave that to the caller. */
6210 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6211 bool iterate
, bool eliminate
)
6215 /* We currently do not support region-based iteration when
6216 elimination is requested. */
6217 gcc_assert (!entry
|| !iterate
|| !eliminate
);
6218 /* When iterating we need loop info up-to-date. */
6219 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
6221 bool do_region
= entry
!= NULL
;
6224 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
6225 exit_bbs
= BITMAP_ALLOC (NULL
);
6226 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
6229 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
6230 int n
= rev_post_order_and_mark_dfs_back_seme (fn
, entry
, exit_bbs
,
6232 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6233 for (int i
= 0; i
< n
/ 2; ++i
)
6234 std::swap (rpo
[i
], rpo
[n
-i
-1]);
6237 BITMAP_FREE (exit_bbs
);
6239 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
6240 for (int i
= 0; i
< n
; ++i
)
6241 bb_to_rpo
[rpo
[i
]] = i
;
6243 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
6245 rpo_elim
avail (entry
->dest
);
6248 /* Verify we have no extra entries into the region. */
6249 if (flag_checking
&& do_region
)
6251 auto_bb_flag
bb_in_region (fn
);
6252 for (int i
= 0; i
< n
; ++i
)
6254 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6255 bb
->flags
|= bb_in_region
;
6257 /* We can't merge the first two loops because we cannot rely
6258 on EDGE_DFS_BACK for edges not within the region. But if
6259 we decide to always have the bb_in_region flag we can
6260 do the checking during the RPO walk itself (but then it's
6261 also easy to handle MEME conservatively). */
6262 for (int i
= 0; i
< n
; ++i
)
6264 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6267 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6268 gcc_assert (e
== entry
|| (e
->src
->flags
& bb_in_region
));
6270 for (int i
= 0; i
< n
; ++i
)
6272 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6273 bb
->flags
&= ~bb_in_region
;
6277 /* Create the VN state. For the initial size of the various hashtables
6278 use a heuristic based on region size and number of SSA names. */
6279 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
6280 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
6281 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
6283 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
6284 gcc_obstack_init (&vn_ssa_aux_obstack
);
6286 gcc_obstack_init (&vn_tables_obstack
);
6287 gcc_obstack_init (&vn_tables_insert_obstack
);
6288 valid_info
= XCNEW (struct vn_tables_s
);
6289 allocate_vn_table (valid_info
, region_size
);
6290 last_inserted_ref
= NULL
;
6291 last_inserted_phi
= NULL
;
6292 last_inserted_nary
= NULL
;
6294 vn_valueize
= rpo_vn_valueize
;
6296 /* Initialize the unwind state and edge/BB executable state. */
6297 for (int i
= 0; i
< n
; ++i
)
6299 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6300 rpo_state
[i
].visited
= 0;
6301 bool has_backedges
= false;
6304 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6306 if (e
->flags
& EDGE_DFS_BACK
)
6307 has_backedges
= true;
6308 if (! iterate
&& (e
->flags
& EDGE_DFS_BACK
))
6309 e
->flags
|= EDGE_EXECUTABLE
;
6311 e
->flags
&= ~EDGE_EXECUTABLE
;
6313 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
6314 bb
->flags
&= ~BB_EXECUTABLE
;
6316 entry
->flags
|= EDGE_EXECUTABLE
;
6317 entry
->dest
->flags
|= BB_EXECUTABLE
;
6319 /* As heuristic to improve compile-time we handle only the N innermost
6320 loops and the outermost one optimistically. */
6324 unsigned max_depth
= PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH
);
6325 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
6326 if (loop_depth (loop
) > max_depth
)
6327 for (unsigned i
= 2;
6328 i
< loop_depth (loop
) - max_depth
; ++i
)
6330 basic_block header
= superloop_at_depth (loop
, i
)->header
;
6331 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= false;
6334 FOR_EACH_EDGE (e
, ei
, header
->preds
)
6335 if (e
->flags
& EDGE_DFS_BACK
)
6336 e
->flags
|= EDGE_EXECUTABLE
;
6340 /* Go and process all blocks, iterating as necessary. */
6345 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
6347 /* If the block has incoming backedges remember unwind state. This
6348 is required even for non-executable blocks since in irreducible
6349 regions we might reach them via the backedge and re-start iterating
6351 Note we can individually mark blocks with incoming backedges to
6352 not iterate where we then handle PHIs conservatively. We do that
6353 heuristically to reduce compile-time for degenerate cases. */
6354 if (rpo_state
[idx
].iterate
)
6356 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
6357 rpo_state
[idx
].ref_top
= last_inserted_ref
;
6358 rpo_state
[idx
].phi_top
= last_inserted_phi
;
6359 rpo_state
[idx
].nary_top
= last_inserted_nary
;
6362 if (!(bb
->flags
& BB_EXECUTABLE
))
6364 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6365 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
6371 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6372 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
6374 todo
|= process_bb (avail
, bb
,
6375 rpo_state
[idx
].visited
!= 0,
6376 rpo_state
[idx
].iterate
,
6377 iterate
, eliminate
, do_region
, exit_bbs
);
6378 rpo_state
[idx
].visited
++;
6382 /* Verify if changed values flow over executable outgoing backedges
6383 and those change destination PHI values (that's the thing we
6384 can easily verify). Reduce over all such edges to the farthest
6386 int iterate_to
= -1;
6389 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6390 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
6391 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
6392 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
6394 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6395 fprintf (dump_file
, "Looking for changed values of backedge "
6396 "%d->%d destination PHIs\n",
6397 e
->src
->index
, e
->dest
->index
);
6398 vn_context_bb
= e
->dest
;
6400 for (gsi
= gsi_start_phis (e
->dest
);
6401 !gsi_end_p (gsi
); gsi_next (&gsi
))
6403 bool inserted
= false;
6404 /* While we'd ideally just iterate on value changes
6405 we CSE PHIs and do that even across basic-block
6406 boundaries. So even hashtable state changes can
6407 be important (which is roughly equivalent to
6408 PHI argument value changes). To not excessively
6409 iterate because of that we track whether a PHI
6410 was CSEd to with GF_PLF_1. */
6411 bool phival_changed
;
6412 if ((phival_changed
= visit_phi (gsi
.phi (),
6414 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
6417 && dump_file
&& (dump_flags
& TDF_DETAILS
))
6418 fprintf (dump_file
, "PHI was CSEd and hashtable "
6419 "state (changed)\n");
6420 int destidx
= bb_to_rpo
[e
->dest
->index
];
6421 if (iterate_to
== -1
6422 || destidx
< iterate_to
)
6423 iterate_to
= destidx
;
6427 vn_context_bb
= NULL
;
6429 if (iterate_to
!= -1)
6431 do_unwind (&rpo_state
[iterate_to
], iterate_to
,
6434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6435 fprintf (dump_file
, "Iterating to %d BB%d\n",
6436 iterate_to
, rpo
[iterate_to
]);
6445 /* If statistics or dump file active. */
6447 unsigned max_visited
= 1;
6448 for (int i
= 0; i
< n
; ++i
)
6450 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6451 if (bb
->flags
& BB_EXECUTABLE
)
6453 statistics_histogram_event (cfun
, "RPO block visited times",
6454 rpo_state
[i
].visited
);
6455 if (rpo_state
[i
].visited
> max_visited
)
6456 max_visited
= rpo_state
[i
].visited
;
6458 unsigned nvalues
= 0, navail
= 0;
6459 for (rpo_elim::rpo_avail_t::iterator i
= avail
.m_rpo_avail
.begin ();
6460 i
!= avail
.m_rpo_avail
.end (); ++i
)
6463 navail
+= (*i
).second
.length ();
6465 statistics_counter_event (cfun
, "RPO blocks", n
);
6466 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
6467 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
6468 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
6469 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
6470 statistics_histogram_event (cfun
, "RPO num avail", navail
);
6471 statistics_histogram_event (cfun
, "RPO num lattice",
6472 vn_ssa_aux_hash
->elements ());
6473 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
6475 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
6476 " blocks in total discovering %d executable blocks iterating "
6477 "%d.%d times, a block was visited max. %u times\n",
6479 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
6481 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
6482 "and %" PRIu64
" lattice elements\n",
6483 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
6488 /* When !iterate we already performed elimination during the RPO
6492 /* Elimination for region-based VN needs to be done within the
6494 gcc_assert (! do_region
);
6495 /* Note we can't use avail.walk here because that gets confused
6496 by the existing availability and it will be less efficient
6498 todo
|= eliminate_with_rpo_vn (NULL
);
6501 todo
|= avail
.eliminate_cleanup (do_region
);
6507 XDELETEVEC (bb_to_rpo
);
6513 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6514 on the SEME region specified by ENTRY and EXIT_BBS. */
6517 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
6519 default_vn_walk_kind
= VN_WALKREWRITE
;
6520 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
6528 const pass_data pass_data_fre
=
6530 GIMPLE_PASS
, /* type */
6532 OPTGROUP_NONE
, /* optinfo_flags */
6533 TV_TREE_FRE
, /* tv_id */
6534 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6535 0, /* properties_provided */
6536 0, /* properties_destroyed */
6537 0, /* todo_flags_start */
6538 0, /* todo_flags_finish */
6541 class pass_fre
: public gimple_opt_pass
6544 pass_fre (gcc::context
*ctxt
)
6545 : gimple_opt_pass (pass_data_fre
, ctxt
)
6548 /* opt_pass methods: */
6549 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
6550 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
6551 virtual unsigned int execute (function
*);
6553 }; // class pass_fre
6556 pass_fre::execute (function
*fun
)
6560 /* At -O[1g] use the cheap non-iterating mode. */
6561 calculate_dominance_info (CDI_DOMINATORS
);
6563 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6565 default_vn_walk_kind
= VN_WALKREWRITE
;
6566 todo
= do_rpo_vn (fun
, NULL
, NULL
, optimize
> 1, true);
6570 loop_optimizer_finalize ();
6578 make_pass_fre (gcc::context
*ctxt
)
6580 return new pass_fre (ctxt
);
6583 #undef BB_EXECUTABLE