1 /* SCC value numbering for trees
2 Copyright (C) 2006-2018 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
30 #include "insn-config.h"
34 #include "gimple-pretty-print.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
39 #include "tree-inline.h"
40 #include "internal-fn.h"
41 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-sccvn.h"
73 /* This algorithm is based on the SCC algorithm presented by Keith
74 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
75 (http://citeseer.ist.psu.edu/41805.html). In
76 straight line code, it is equivalent to a regular hash based value
77 numbering that is performed in reverse postorder.
79 For code with cycles, there are two alternatives, both of which
80 require keeping the hashtables separate from the actual list of
81 value numbers for SSA names.
83 1. Iterate value numbering in an RPO walk of the blocks, removing
84 all the entries from the hashtable after each iteration (but
85 keeping the SSA name->value number mapping between iterations).
86 Iterate until it does not change.
88 2. Perform value numbering as part of an SCC walk on the SSA graph,
89 iterating only the cycles in the SSA graph until they do not change
90 (using a separate, optimistic hashtable for value numbering the SCC
93 The second is not just faster in practice (because most SSA graph
94 cycles do not involve all the variables in the graph), it also has
97 One of these nice properties is that when we pop an SCC off the
98 stack, we are guaranteed to have processed all the operands coming from
99 *outside of that SCC*, so we do not need to do anything special to
100 ensure they have value numbers.
102 Another nice property is that the SCC walk is done as part of a DFS
103 of the SSA graph, which makes it easy to perform combining and
104 simplifying operations at the same time.
106 The code below is deliberately written in a way that makes it easy
107 to separate the SCC walk from the other work it does.
109 In order to propagate constants through the code, we track which
110 expressions contain constants, and use those while folding. In
111 theory, we could also track expressions whose value numbers are
112 replaced, in case we end up folding based on expression
115 In order to value number memory, we assign value numbers to vuses.
116 This enables us to note that, for example, stores to the same
117 address of the same value from the same starting memory states are
121 1. We can iterate only the changing portions of the SCC's, but
122 I have not seen an SCC big enough for this to be a win.
123 2. If you differentiate between phi nodes for loops and phi nodes
124 for if-then-else, you can properly consider phi nodes in different
125 blocks for equivalence.
126 3. We could value number vuses in more cases, particularly, whole
130 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
131 #define BB_EXECUTABLE BB_VISITED
133 static tree
*last_vuse_ptr
;
134 static vn_lookup_kind vn_walk_kind
;
135 static vn_lookup_kind default_vn_walk_kind
;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
141 typedef vn_nary_op_s
*compare_type
;
142 static inline hashval_t
hash (const vn_nary_op_s
*);
143 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
146 /* Return the computed hashcode for nary operation P1. */
149 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
151 return vno1
->hashcode
;
154 /* Compare nary operations P1 and P2 and return true if they are
158 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
160 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
163 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
167 /* vn_phi hashtable helpers. */
170 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
172 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
174 static inline hashval_t
hash (const vn_phi_s
*);
175 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
178 /* Return the computed hashcode for phi operation P1. */
181 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
183 return vp1
->hashcode
;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
191 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
194 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
202 vn_reference_op_eq (const void *p1
, const void *p2
)
204 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
205 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
207 return (vro1
->opcode
== vro2
->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1
->type
== vro2
->type
210 || (vro1
->type
&& vro2
->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
212 TYPE_MAIN_VARIANT (vro2
->type
))))
213 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
214 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
215 && expressions_equal_p (vro1
->op2
, vro2
->op2
));
218 /* Free a reference operation structure VP. */
221 free_reference (vn_reference_s
*vr
)
223 vr
->operands
.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
231 static inline hashval_t
hash (const vn_reference_s
*);
232 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
235 /* Return the hashcode for a given reference operation P1. */
238 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
240 return vr1
->hashcode
;
244 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
246 return v
== c
|| vn_reference_eq (v
, c
);
249 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
253 /* The set of VN hashtables. */
255 typedef struct vn_tables_s
257 vn_nary_op_table_type
*nary
;
258 vn_phi_table_type
*phis
;
259 vn_reference_table_type
*references
;
263 /* vn_constant hashtable helpers. */
265 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
267 static inline hashval_t
hash (const vn_constant_s
*);
268 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
271 /* Hash table hash function for vn_constant_t. */
274 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
276 return vc1
->hashcode
;
279 /* Hash table equality function for vn_constant_t. */
282 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
284 if (vc1
->hashcode
!= vc2
->hashcode
)
287 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
290 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
291 static bitmap constant_value_ids
;
294 /* Obstack we allocate the vn-tables elements from. */
295 static obstack vn_tables_obstack
;
296 /* Special obstack we never unwind. */
297 static obstack vn_tables_insert_obstack
;
299 static vn_reference_t last_inserted_ref
;
300 static vn_phi_t last_inserted_phi
;
301 static vn_nary_op_t last_inserted_nary
;
303 /* Valid hashtables storing information we have proven to be
305 static vn_tables_t valid_info
;
308 /* Valueization hook. Valueize NAME if it is an SSA name, otherwise
310 tree (*vn_valueize
) (tree
);
313 /* This represents the top of the VN lattice, which is the universal
318 /* Unique counter for our value ids. */
320 static unsigned int next_value_id
;
323 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
324 are allocated on an obstack for locality reasons, and to free them
325 without looping over the vec. */
327 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
329 typedef vn_ssa_aux_t value_type
;
330 typedef tree compare_type
;
331 static inline hashval_t
hash (const value_type
&);
332 static inline bool equal (const value_type
&, const compare_type
&);
333 static inline void mark_deleted (value_type
&) {}
334 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
335 static inline bool is_deleted (value_type
&) { return false; }
336 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
340 vn_ssa_aux_hasher::hash (const value_type
&entry
)
342 return SSA_NAME_VERSION (entry
->name
);
346 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
348 return name
== entry
->name
;
351 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
352 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
353 static struct obstack vn_ssa_aux_obstack
;
355 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
356 static unsigned int vn_nary_length_from_stmt (gimple
*);
357 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
358 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
359 vn_nary_op_table_type
*, bool);
360 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gimple
*);
361 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
362 enum tree_code
, tree
, tree
*);
363 static tree
vn_lookup_simplify_result (gimple_match_op
*);
365 /* Return whether there is value numbering information for a given SSA name. */
368 has_VN_INFO (tree name
)
370 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
377 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
382 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
383 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
384 newinfo
->name
= name
;
385 newinfo
->valnum
= VN_TOP
;
386 /* We are using the visited flag to handle uses with defs not within the
387 region being value-numbered. */
388 newinfo
->visited
= false;
390 /* Given we create the VN_INFOs on-demand now we have to do initialization
391 different than VN_TOP here. */
392 if (SSA_NAME_IS_DEFAULT_DEF (name
))
393 switch (TREE_CODE (SSA_NAME_VAR (name
)))
396 /* All undefined vars are VARYING. */
397 newinfo
->valnum
= name
;
398 newinfo
->visited
= true;
402 /* Parameters are VARYING but we can record a condition
403 if we know it is a non-NULL pointer. */
404 newinfo
->visited
= true;
405 newinfo
->valnum
= name
;
406 if (POINTER_TYPE_P (TREE_TYPE (name
))
407 && nonnull_arg_p (SSA_NAME_VAR (name
)))
411 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
413 /* Allocate from non-unwinding stack. */
414 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
415 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
416 boolean_type_node
, ops
);
417 nary
->predicated_values
= 0;
418 nary
->u
.result
= boolean_true_node
;
419 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
420 gcc_assert (nary
->unwind_to
== NULL
);
421 /* Also do not link it into the undo chain. */
422 last_inserted_nary
= nary
->next
;
423 nary
->next
= (vn_nary_op_t
)(void *)-1;
424 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
425 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
426 boolean_type_node
, ops
);
427 nary
->predicated_values
= 0;
428 nary
->u
.result
= boolean_false_node
;
429 vn_nary_op_insert_into (nary
, valid_info
->nary
, true);
430 gcc_assert (nary
->unwind_to
== NULL
);
431 last_inserted_nary
= nary
->next
;
432 nary
->next
= (vn_nary_op_t
)(void *)-1;
433 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
435 fprintf (dump_file
, "Recording ");
436 print_generic_expr (dump_file
, name
, TDF_SLIM
);
437 fprintf (dump_file
, " != 0\n");
443 /* If the result is passed by invisible reference the default
444 def is initialized, otherwise it's uninitialized. Still
445 undefined is varying. */
446 newinfo
->visited
= true;
447 newinfo
->valnum
= name
;
456 /* Return the SSA value of X. */
459 SSA_VAL (tree x
, bool *visited
= NULL
)
461 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
463 *visited
= tem
&& tem
->visited
;
464 return tem
&& tem
->visited
? tem
->valnum
: x
;
467 /* Return whether X was visited. */
472 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
473 return tem
&& tem
->visited
;
476 /* Return the SSA value of the VUSE x, supporting released VDEFs
477 during elimination which will value-number the VDEF to the
478 associated VUSE (but not substitute in the whole lattice). */
481 vuse_ssa_val (tree x
)
489 gcc_assert (x
!= VN_TOP
);
491 while (SSA_NAME_IN_FREE_LIST (x
));
496 /* Similar to the above but used as callback for walk_non_aliases_vuses
497 and thus should stop at unvisited VUSE to not walk across region
501 vuse_valueize (tree vuse
)
506 vuse
= SSA_VAL (vuse
, &visited
);
509 gcc_assert (vuse
!= VN_TOP
);
511 while (SSA_NAME_IN_FREE_LIST (vuse
));
516 /* Return the vn_kind the expression computed by the stmt should be
520 vn_get_stmt_kind (gimple
*stmt
)
522 switch (gimple_code (stmt
))
530 enum tree_code code
= gimple_assign_rhs_code (stmt
);
531 tree rhs1
= gimple_assign_rhs1 (stmt
);
532 switch (get_gimple_rhs_class (code
))
534 case GIMPLE_UNARY_RHS
:
535 case GIMPLE_BINARY_RHS
:
536 case GIMPLE_TERNARY_RHS
:
538 case GIMPLE_SINGLE_RHS
:
539 switch (TREE_CODE_CLASS (code
))
542 /* VOP-less references can go through unary case. */
543 if ((code
== REALPART_EXPR
544 || code
== IMAGPART_EXPR
545 || code
== VIEW_CONVERT_EXPR
546 || code
== BIT_FIELD_REF
)
547 && TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
)
551 case tcc_declaration
:
558 if (code
== ADDR_EXPR
)
559 return (is_gimple_min_invariant (rhs1
)
560 ? VN_CONSTANT
: VN_REFERENCE
);
561 else if (code
== CONSTRUCTOR
)
574 /* Lookup a value id for CONSTANT and return it. If it does not
578 get_constant_value_id (tree constant
)
580 vn_constant_s
**slot
;
581 struct vn_constant_s vc
;
583 vc
.hashcode
= vn_hash_constant_with_type (constant
);
584 vc
.constant
= constant
;
585 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
587 return (*slot
)->value_id
;
591 /* Lookup a value id for CONSTANT, and if it does not exist, create a
592 new one and return it. If it does exist, return it. */
595 get_or_alloc_constant_value_id (tree constant
)
597 vn_constant_s
**slot
;
598 struct vn_constant_s vc
;
601 /* If the hashtable isn't initialized we're not running from PRE and thus
602 do not need value-ids. */
603 if (!constant_to_value_id
)
606 vc
.hashcode
= vn_hash_constant_with_type (constant
);
607 vc
.constant
= constant
;
608 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
610 return (*slot
)->value_id
;
612 vcp
= XNEW (struct vn_constant_s
);
613 vcp
->hashcode
= vc
.hashcode
;
614 vcp
->constant
= constant
;
615 vcp
->value_id
= get_next_value_id ();
617 bitmap_set_bit (constant_value_ids
, vcp
->value_id
);
618 return vcp
->value_id
;
621 /* Return true if V is a value id for a constant. */
624 value_id_constant_p (unsigned int v
)
626 return bitmap_bit_p (constant_value_ids
, v
);
629 /* Compute the hash for a reference operand VRO1. */
632 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
634 hstate
.add_int (vro1
->opcode
);
636 inchash::add_expr (vro1
->op0
, hstate
);
638 inchash::add_expr (vro1
->op1
, hstate
);
640 inchash::add_expr (vro1
->op2
, hstate
);
643 /* Compute a hash for the reference operation VR1 and return it. */
646 vn_reference_compute_hash (const vn_reference_t vr1
)
648 inchash::hash hstate
;
651 vn_reference_op_t vro
;
655 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
657 if (vro
->opcode
== MEM_REF
)
659 else if (vro
->opcode
!= ADDR_EXPR
)
661 if (maybe_ne (vro
->off
, -1))
663 if (known_eq (off
, -1))
669 if (maybe_ne (off
, -1)
670 && maybe_ne (off
, 0))
671 hstate
.add_poly_int (off
);
674 && vro
->opcode
== ADDR_EXPR
)
678 tree op
= TREE_OPERAND (vro
->op0
, 0);
679 hstate
.add_int (TREE_CODE (op
));
680 inchash::add_expr (op
, hstate
);
684 vn_reference_op_compute_hash (vro
, hstate
);
687 result
= hstate
.end ();
688 /* ??? We would ICE later if we hash instead of adding that in. */
690 result
+= SSA_NAME_VERSION (vr1
->vuse
);
695 /* Return true if reference operations VR1 and VR2 are equivalent. This
696 means they have the same set of operands and vuses. */
699 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
703 /* Early out if this is not a hash collision. */
704 if (vr1
->hashcode
!= vr2
->hashcode
)
707 /* The VOP needs to be the same. */
708 if (vr1
->vuse
!= vr2
->vuse
)
711 /* If the operands are the same we are done. */
712 if (vr1
->operands
== vr2
->operands
)
715 if (!expressions_equal_p (TYPE_SIZE (vr1
->type
), TYPE_SIZE (vr2
->type
)))
718 if (INTEGRAL_TYPE_P (vr1
->type
)
719 && INTEGRAL_TYPE_P (vr2
->type
))
721 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
724 else if (INTEGRAL_TYPE_P (vr1
->type
)
725 && (TYPE_PRECISION (vr1
->type
)
726 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
728 else if (INTEGRAL_TYPE_P (vr2
->type
)
729 && (TYPE_PRECISION (vr2
->type
)
730 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
737 poly_int64 off1
= 0, off2
= 0;
738 vn_reference_op_t vro1
, vro2
;
739 vn_reference_op_s tem1
, tem2
;
740 bool deref1
= false, deref2
= false;
741 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
743 if (vro1
->opcode
== MEM_REF
)
745 /* Do not look through a storage order barrier. */
746 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
748 if (known_eq (vro1
->off
, -1))
752 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
754 if (vro2
->opcode
== MEM_REF
)
756 /* Do not look through a storage order barrier. */
757 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
759 if (known_eq (vro2
->off
, -1))
763 if (maybe_ne (off1
, off2
))
765 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
767 memset (&tem1
, 0, sizeof (tem1
));
768 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
769 tem1
.type
= TREE_TYPE (tem1
.op0
);
770 tem1
.opcode
= TREE_CODE (tem1
.op0
);
774 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
776 memset (&tem2
, 0, sizeof (tem2
));
777 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
778 tem2
.type
= TREE_TYPE (tem2
.op0
);
779 tem2
.opcode
= TREE_CODE (tem2
.op0
);
783 if (deref1
!= deref2
)
785 if (!vn_reference_op_eq (vro1
, vro2
))
790 while (vr1
->operands
.length () != i
791 || vr2
->operands
.length () != j
);
796 /* Copy the operations present in load/store REF into RESULT, a vector of
797 vn_reference_op_s's. */
800 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
802 if (TREE_CODE (ref
) == TARGET_MEM_REF
)
804 vn_reference_op_s temp
;
808 memset (&temp
, 0, sizeof (temp
));
809 temp
.type
= TREE_TYPE (ref
);
810 temp
.opcode
= TREE_CODE (ref
);
811 temp
.op0
= TMR_INDEX (ref
);
812 temp
.op1
= TMR_STEP (ref
);
813 temp
.op2
= TMR_OFFSET (ref
);
815 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
816 temp
.base
= MR_DEPENDENCE_BASE (ref
);
817 result
->quick_push (temp
);
819 memset (&temp
, 0, sizeof (temp
));
820 temp
.type
= NULL_TREE
;
821 temp
.opcode
= ERROR_MARK
;
822 temp
.op0
= TMR_INDEX2 (ref
);
824 result
->quick_push (temp
);
826 memset (&temp
, 0, sizeof (temp
));
827 temp
.type
= NULL_TREE
;
828 temp
.opcode
= TREE_CODE (TMR_BASE (ref
));
829 temp
.op0
= TMR_BASE (ref
);
831 result
->quick_push (temp
);
835 /* For non-calls, store the information that makes up the address. */
839 vn_reference_op_s temp
;
841 memset (&temp
, 0, sizeof (temp
));
842 temp
.type
= TREE_TYPE (ref
);
843 temp
.opcode
= TREE_CODE (ref
);
849 temp
.op0
= TREE_OPERAND (ref
, 1);
852 temp
.op0
= TREE_OPERAND (ref
, 1);
856 /* The base address gets its own vn_reference_op_s structure. */
857 temp
.op0
= TREE_OPERAND (ref
, 1);
858 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
860 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
861 temp
.base
= MR_DEPENDENCE_BASE (ref
);
862 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
865 /* Record bits, position and storage order. */
866 temp
.op0
= TREE_OPERAND (ref
, 1);
867 temp
.op1
= TREE_OPERAND (ref
, 2);
868 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
870 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
873 /* The field decl is enough to unambiguously specify the field,
874 a matching type is not necessary and a mismatching type
875 is always a spurious difference. */
876 temp
.type
= NULL_TREE
;
877 temp
.op0
= TREE_OPERAND (ref
, 1);
878 temp
.op1
= TREE_OPERAND (ref
, 2);
880 tree this_offset
= component_ref_field_offset (ref
);
882 && poly_int_tree_p (this_offset
))
884 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
885 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
888 = (wi::to_poly_offset (this_offset
)
889 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
890 /* Probibit value-numbering zero offset components
891 of addresses the same before the pass folding
892 __builtin_object_size had a chance to run
893 (checking cfun->after_inlining does the
895 if (TREE_CODE (orig
) != ADDR_EXPR
897 || cfun
->after_inlining
)
898 off
.to_shwi (&temp
.off
);
903 case ARRAY_RANGE_REF
:
906 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
907 /* Record index as operand. */
908 temp
.op0
= TREE_OPERAND (ref
, 1);
909 /* Always record lower bounds and element size. */
910 temp
.op1
= array_ref_low_bound (ref
);
911 /* But record element size in units of the type alignment. */
912 temp
.op2
= TREE_OPERAND (ref
, 3);
913 temp
.align
= eltype
->type_common
.align
;
915 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
916 size_int (TYPE_ALIGN_UNIT (eltype
)));
917 if (poly_int_tree_p (temp
.op0
)
918 && poly_int_tree_p (temp
.op1
)
919 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
921 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
922 - wi::to_poly_offset (temp
.op1
))
923 * wi::to_offset (temp
.op2
)
924 * vn_ref_op_align_unit (&temp
));
925 off
.to_shwi (&temp
.off
);
930 if (DECL_HARD_REGISTER (ref
))
939 /* Canonicalize decls to MEM[&decl] which is what we end up with
940 when valueizing MEM[ptr] with ptr = &decl. */
941 temp
.opcode
= MEM_REF
;
942 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
944 result
->safe_push (temp
);
945 temp
.opcode
= ADDR_EXPR
;
946 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
947 temp
.type
= TREE_TYPE (temp
.op0
);
961 if (is_gimple_min_invariant (ref
))
967 /* These are only interesting for their operands, their
968 existence, and their type. They will never be the last
969 ref in the chain of references (IE they require an
970 operand), so we don't have to put anything
971 for op* as it will be handled by the iteration */
975 case VIEW_CONVERT_EXPR
:
977 temp
.reverse
= storage_order_barrier_p (ref
);
980 /* This is only interesting for its constant offset. */
981 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
986 result
->safe_push (temp
);
988 if (REFERENCE_CLASS_P (ref
)
989 || TREE_CODE (ref
) == MODIFY_EXPR
990 || TREE_CODE (ref
) == WITH_SIZE_EXPR
991 || (TREE_CODE (ref
) == ADDR_EXPR
992 && !is_gimple_min_invariant (ref
)))
993 ref
= TREE_OPERAND (ref
, 0);
999 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1000 operands in *OPS, the reference alias set SET and the reference type TYPE.
1001 Return true if something useful was produced. */
1004 ao_ref_init_from_vn_reference (ao_ref
*ref
,
1005 alias_set_type set
, tree type
,
1006 vec
<vn_reference_op_s
> ops
)
1008 vn_reference_op_t op
;
1010 tree base
= NULL_TREE
;
1011 tree
*op0_p
= &base
;
1012 poly_offset_int offset
= 0;
1013 poly_offset_int max_size
;
1014 poly_offset_int size
= -1;
1015 tree size_tree
= NULL_TREE
;
1016 alias_set_type base_alias_set
= -1;
1018 /* First get the final access size from just the outermost expression. */
1020 if (op
->opcode
== COMPONENT_REF
)
1021 size_tree
= DECL_SIZE (op
->op0
);
1022 else if (op
->opcode
== BIT_FIELD_REF
)
1023 size_tree
= op
->op0
;
1026 machine_mode mode
= TYPE_MODE (type
);
1027 if (mode
== BLKmode
)
1028 size_tree
= TYPE_SIZE (type
);
1030 size
= GET_MODE_BITSIZE (mode
);
1032 if (size_tree
!= NULL_TREE
1033 && poly_int_tree_p (size_tree
))
1034 size
= wi::to_poly_offset (size_tree
);
1036 /* Initially, maxsize is the same as the accessed element size.
1037 In the following it will only grow (or become -1). */
1040 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1041 and find the ultimate containing object. */
1042 FOR_EACH_VEC_ELT (ops
, i
, op
)
1046 /* These may be in the reference ops, but we cannot do anything
1047 sensible with them here. */
1049 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1050 if (base
!= NULL_TREE
1051 && TREE_CODE (base
) == MEM_REF
1053 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1055 vn_reference_op_t pop
= &ops
[i
-1];
1056 base
= TREE_OPERAND (op
->op0
, 0);
1057 if (known_eq (pop
->off
, -1))
1063 offset
+= pop
->off
* BITS_PER_UNIT
;
1071 /* Record the base objects. */
1073 base_alias_set
= get_deref_alias_set (op
->op0
);
1074 *op0_p
= build2 (MEM_REF
, op
->type
,
1075 NULL_TREE
, op
->op0
);
1076 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1077 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1078 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1089 /* And now the usual component-reference style ops. */
1091 offset
+= wi::to_poly_offset (op
->op1
);
1096 tree field
= op
->op0
;
1097 /* We do not have a complete COMPONENT_REF tree here so we
1098 cannot use component_ref_field_offset. Do the interesting
1100 tree this_offset
= DECL_FIELD_OFFSET (field
);
1102 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1106 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1107 << LOG2_BITS_PER_UNIT
);
1108 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1114 case ARRAY_RANGE_REF
:
1116 /* We recorded the lower bound and the element size. */
1117 if (!poly_int_tree_p (op
->op0
)
1118 || !poly_int_tree_p (op
->op1
)
1119 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1123 poly_offset_int woffset
1124 = wi::sext (wi::to_poly_offset (op
->op0
)
1125 - wi::to_poly_offset (op
->op1
),
1126 TYPE_PRECISION (TREE_TYPE (op
->op0
)));
1127 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1128 woffset
<<= LOG2_BITS_PER_UNIT
;
1140 case VIEW_CONVERT_EXPR
:
1157 if (base
== NULL_TREE
)
1160 ref
->ref
= NULL_TREE
;
1162 ref
->ref_alias_set
= set
;
1163 if (base_alias_set
!= -1)
1164 ref
->base_alias_set
= base_alias_set
;
1166 ref
->base_alias_set
= get_alias_set (base
);
1167 /* We discount volatiles from value-numbering elsewhere. */
1168 ref
->volatile_p
= false;
1170 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1178 if (!offset
.to_shwi (&ref
->offset
))
1185 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1191 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1192 vn_reference_op_s's. */
1195 copy_reference_ops_from_call (gcall
*call
,
1196 vec
<vn_reference_op_s
> *result
)
1198 vn_reference_op_s temp
;
1200 tree lhs
= gimple_call_lhs (call
);
1203 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1204 different. By adding the lhs here in the vector, we ensure that the
1205 hashcode is different, guaranteeing a different value number. */
1206 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1208 memset (&temp
, 0, sizeof (temp
));
1209 temp
.opcode
= MODIFY_EXPR
;
1210 temp
.type
= TREE_TYPE (lhs
);
1213 result
->safe_push (temp
);
1216 /* Copy the type, opcode, function, static chain and EH region, if any. */
1217 memset (&temp
, 0, sizeof (temp
));
1218 temp
.type
= gimple_call_return_type (call
);
1219 temp
.opcode
= CALL_EXPR
;
1220 temp
.op0
= gimple_call_fn (call
);
1221 temp
.op1
= gimple_call_chain (call
);
1222 if (stmt_could_throw_p (call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1223 temp
.op2
= size_int (lr
);
1225 result
->safe_push (temp
);
1227 /* Copy the call arguments. As they can be references as well,
1228 just chain them together. */
1229 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1231 tree callarg
= gimple_call_arg (call
, i
);
1232 copy_reference_ops_from_ref (callarg
, result
);
1236 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1237 *I_P to point to the last element of the replacement. */
1239 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1242 unsigned int i
= *i_p
;
1243 vn_reference_op_t op
= &(*ops
)[i
];
1244 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1246 poly_int64 addr_offset
= 0;
1248 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1249 from .foo.bar to the preceding MEM_REF offset and replace the
1250 address with &OBJ. */
1251 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (op
->op0
, 0),
1253 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1254 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1257 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1260 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1261 op
->op0
= build_fold_addr_expr (addr_base
);
1262 if (tree_fits_shwi_p (mem_op
->op0
))
1263 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1271 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1272 *I_P to point to the last element of the replacement. */
1274 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1277 unsigned int i
= *i_p
;
1278 vn_reference_op_t op
= &(*ops
)[i
];
1279 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1281 enum tree_code code
;
1282 poly_offset_int off
;
1284 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1285 if (!is_gimple_assign (def_stmt
))
1288 code
= gimple_assign_rhs_code (def_stmt
);
1289 if (code
!= ADDR_EXPR
1290 && code
!= POINTER_PLUS_EXPR
)
1293 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1295 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1296 from .foo.bar to the preceding MEM_REF offset and replace the
1297 address with &OBJ. */
1298 if (code
== ADDR_EXPR
)
1300 tree addr
, addr_base
;
1301 poly_int64 addr_offset
;
1303 addr
= gimple_assign_rhs1 (def_stmt
);
1304 addr_base
= get_addr_base_and_unit_offset (TREE_OPERAND (addr
, 0),
1306 /* If that didn't work because the address isn't invariant propagate
1307 the reference tree from the address operation in case the current
1308 dereference isn't offsetted. */
1310 && *i_p
== ops
->length () - 1
1311 && known_eq (off
, 0)
1312 /* This makes us disable this transform for PRE where the
1313 reference ops might be also used for code insertion which
1315 && default_vn_walk_kind
== VN_WALKREWRITE
)
1317 auto_vec
<vn_reference_op_s
, 32> tem
;
1318 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1319 /* Make sure to preserve TBAA info. The only objects not
1320 wrapped in MEM_REFs that can have their address taken are
1322 if (tem
.length () >= 2
1323 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1325 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1327 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1328 wi::to_poly_wide (new_mem_op
->op0
));
1331 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1334 ops
->safe_splice (tem
);
1339 || TREE_CODE (addr_base
) != MEM_REF
1340 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1341 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
, 0))))
1345 off
+= mem_ref_offset (addr_base
);
1346 op
->op0
= TREE_OPERAND (addr_base
, 0);
1351 ptr
= gimple_assign_rhs1 (def_stmt
);
1352 ptroff
= gimple_assign_rhs2 (def_stmt
);
1353 if (TREE_CODE (ptr
) != SSA_NAME
1354 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1355 /* Make sure to not endlessly recurse.
1356 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1357 happen when we value-number a PHI to its backedge value. */
1358 || SSA_VAL (ptr
) == op
->op0
1359 || !poly_int_tree_p (ptroff
))
1362 off
+= wi::to_poly_offset (ptroff
);
1366 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1367 if (tree_fits_shwi_p (mem_op
->op0
))
1368 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1371 /* ??? Can end up with endless recursion here!?
1372 gcc.c-torture/execute/strcmp-1.c */
1373 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1374 op
->op0
= SSA_VAL (op
->op0
);
1375 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1376 op
->opcode
= TREE_CODE (op
->op0
);
1379 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1380 vn_reference_maybe_forwprop_address (ops
, i_p
);
1381 else if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1382 vn_reference_fold_indirect (ops
, i_p
);
1386 /* Optimize the reference REF to a constant if possible or return
1387 NULL_TREE if not. */
1390 fully_constant_vn_reference_p (vn_reference_t ref
)
1392 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1393 vn_reference_op_t op
;
1395 /* Try to simplify the translated expression if it is
1396 a call to a builtin function with at most two arguments. */
1398 if (op
->opcode
== CALL_EXPR
1399 && TREE_CODE (op
->op0
) == ADDR_EXPR
1400 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1401 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0))
1402 && operands
.length () >= 2
1403 && operands
.length () <= 3)
1405 vn_reference_op_t arg0
, arg1
= NULL
;
1406 bool anyconst
= false;
1407 arg0
= &operands
[1];
1408 if (operands
.length () > 2)
1409 arg1
= &operands
[2];
1410 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1411 || (arg0
->opcode
== ADDR_EXPR
1412 && is_gimple_min_invariant (arg0
->op0
)))
1415 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1416 || (arg1
->opcode
== ADDR_EXPR
1417 && is_gimple_min_invariant (arg1
->op0
))))
1421 tree folded
= build_call_expr (TREE_OPERAND (op
->op0
, 0),
1424 arg1
? arg1
->op0
: NULL
);
1426 && TREE_CODE (folded
) == NOP_EXPR
)
1427 folded
= TREE_OPERAND (folded
, 0);
1429 && is_gimple_min_invariant (folded
))
1434 /* Simplify reads from constants or constant initializers. */
1435 else if (BITS_PER_UNIT
== 8
1436 && COMPLETE_TYPE_P (ref
->type
)
1437 && is_gimple_reg_type (ref
->type
))
1441 if (INTEGRAL_TYPE_P (ref
->type
))
1442 size
= TYPE_PRECISION (ref
->type
);
1443 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1444 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1447 if (size
% BITS_PER_UNIT
!= 0
1448 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1450 size
/= BITS_PER_UNIT
;
1452 for (i
= 0; i
< operands
.length (); ++i
)
1454 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1459 if (known_eq (operands
[i
].off
, -1))
1461 off
+= operands
[i
].off
;
1462 if (operands
[i
].opcode
== MEM_REF
)
1468 vn_reference_op_t base
= &operands
[--i
];
1469 tree ctor
= error_mark_node
;
1470 tree decl
= NULL_TREE
;
1471 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1473 else if (base
->opcode
== MEM_REF
1474 && base
[1].opcode
== ADDR_EXPR
1475 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1476 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1477 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1479 decl
= TREE_OPERAND (base
[1].op0
, 0);
1480 if (TREE_CODE (decl
) == STRING_CST
)
1483 ctor
= ctor_for_folding (decl
);
1485 if (ctor
== NULL_TREE
)
1486 return build_zero_cst (ref
->type
);
1487 else if (ctor
!= error_mark_node
)
1489 HOST_WIDE_INT const_off
;
1492 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1493 off
* BITS_PER_UNIT
,
1494 size
* BITS_PER_UNIT
, decl
);
1497 STRIP_USELESS_TYPE_CONVERSION (res
);
1498 if (is_gimple_min_invariant (res
))
1502 else if (off
.is_constant (&const_off
))
1504 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1505 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1507 return native_interpret_expr (ref
->type
, buf
, len
);
1515 /* Return true if OPS contain a storage order barrier. */
1518 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1520 vn_reference_op_t op
;
1523 FOR_EACH_VEC_ELT (ops
, i
, op
)
1524 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1530 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1531 structures into their value numbers. This is done in-place, and
1532 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1533 whether any operands were valueized. */
1535 static vec
<vn_reference_op_s
>
1536 valueize_refs_1 (vec
<vn_reference_op_s
> orig
, bool *valueized_anything
,
1537 bool with_avail
= false)
1539 vn_reference_op_t vro
;
1542 *valueized_anything
= false;
1544 FOR_EACH_VEC_ELT (orig
, i
, vro
)
1546 if (vro
->opcode
== SSA_NAME
1547 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1549 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1550 if (tem
!= vro
->op0
)
1552 *valueized_anything
= true;
1555 /* If it transforms from an SSA_NAME to a constant, update
1557 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1558 vro
->opcode
= TREE_CODE (vro
->op0
);
1560 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1562 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1563 if (tem
!= vro
->op1
)
1565 *valueized_anything
= true;
1569 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1571 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1572 if (tem
!= vro
->op2
)
1574 *valueized_anything
= true;
1578 /* If it transforms from an SSA_NAME to an address, fold with
1579 a preceding indirect reference. */
1582 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1583 && orig
[i
- 1].opcode
== MEM_REF
)
1585 if (vn_reference_fold_indirect (&orig
, &i
))
1586 *valueized_anything
= true;
1589 && vro
->opcode
== SSA_NAME
1590 && orig
[i
- 1].opcode
== MEM_REF
)
1592 if (vn_reference_maybe_forwprop_address (&orig
, &i
))
1593 *valueized_anything
= true;
1595 /* If it transforms a non-constant ARRAY_REF into a constant
1596 one, adjust the constant offset. */
1597 else if (vro
->opcode
== ARRAY_REF
1598 && known_eq (vro
->off
, -1)
1599 && poly_int_tree_p (vro
->op0
)
1600 && poly_int_tree_p (vro
->op1
)
1601 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1603 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1604 - wi::to_poly_offset (vro
->op1
))
1605 * wi::to_offset (vro
->op2
)
1606 * vn_ref_op_align_unit (vro
));
1607 off
.to_shwi (&vro
->off
);
1614 static vec
<vn_reference_op_s
>
1615 valueize_refs (vec
<vn_reference_op_s
> orig
)
1618 return valueize_refs_1 (orig
, &tem
);
1621 static vec
<vn_reference_op_s
> shared_lookup_references
;
1623 /* Create a vector of vn_reference_op_s structures from REF, a
1624 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1625 this function. *VALUEIZED_ANYTHING will specify whether any
1626 operands were valueized. */
1628 static vec
<vn_reference_op_s
>
1629 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1633 shared_lookup_references
.truncate (0);
1634 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1635 shared_lookup_references
= valueize_refs_1 (shared_lookup_references
,
1636 valueized_anything
);
1637 return shared_lookup_references
;
1640 /* Create a vector of vn_reference_op_s structures from CALL, a
1641 call statement. The vector is shared among all callers of
1644 static vec
<vn_reference_op_s
>
1645 valueize_shared_reference_ops_from_call (gcall
*call
)
1649 shared_lookup_references
.truncate (0);
1650 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1651 shared_lookup_references
= valueize_refs (shared_lookup_references
);
1652 return shared_lookup_references
;
1655 /* Lookup a SCCVN reference operation VR in the current hash table.
1656 Returns the resulting value number if it exists in the hash table,
1657 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1658 vn_reference_t stored in the hashtable if something is found. */
1661 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1663 vn_reference_s
**slot
;
1666 hash
= vr
->hashcode
;
1667 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1671 *vnresult
= (vn_reference_t
)*slot
;
1672 return ((vn_reference_t
)*slot
)->result
;
1678 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1679 with the current VUSE and performs the expression lookup. */
1682 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
,
1683 unsigned int cnt
, void *vr_
)
1685 vn_reference_t vr
= (vn_reference_t
)vr_
;
1686 vn_reference_s
**slot
;
1689 /* This bounds the stmt walks we perform on reference lookups
1690 to O(1) instead of O(N) where N is the number of dominating
1692 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
1696 *last_vuse_ptr
= vuse
;
1698 /* Fixup vuse and hash. */
1700 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
1701 vr
->vuse
= vuse_ssa_val (vuse
);
1703 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
1705 hash
= vr
->hashcode
;
1706 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1713 /* Lookup an existing or insert a new vn_reference entry into the
1714 value table for the VUSE, SET, TYPE, OPERANDS reference which
1715 has the value VALUE which is either a constant or an SSA name. */
1717 static vn_reference_t
1718 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
1721 vec
<vn_reference_op_s
,
1726 vn_reference_t result
;
1728 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
1729 vr1
.operands
= operands
;
1732 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
1733 if (vn_reference_lookup_1 (&vr1
, &result
))
1735 if (TREE_CODE (value
) == SSA_NAME
)
1736 value_id
= VN_INFO (value
)->value_id
;
1738 value_id
= get_or_alloc_constant_value_id (value
);
1739 return vn_reference_insert_pieces (vuse
, set
, type
,
1740 operands
.copy (), value
, value_id
);
1743 /* Return a value-number for RCODE OPS... either by looking up an existing
1744 value-number for the simplified result or by inserting the operation if
1748 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
)
1750 tree result
= NULL_TREE
;
1751 /* We will be creating a value number for
1753 So first simplify and lookup this expression to see if it
1754 is already available. */
1755 mprts_hook
= vn_lookup_simplify_result
;
1757 switch (TREE_CODE_LENGTH ((tree_code
) res_op
->code
))
1760 res
= gimple_resimplify1 (NULL
, res_op
, vn_valueize
);
1763 res
= gimple_resimplify2 (NULL
, res_op
, vn_valueize
);
1766 res
= gimple_resimplify3 (NULL
, res_op
, vn_valueize
);
1770 gimple
*new_stmt
= NULL
;
1772 && gimple_simplified_result_is_gimple_val (res_op
))
1774 /* The expression is already available. */
1775 result
= res_op
->ops
[0];
1776 /* Valueize it, simplification returns sth in AVAIL only. */
1777 if (TREE_CODE (result
) == SSA_NAME
)
1778 result
= SSA_VAL (result
);
1782 tree val
= vn_lookup_simplify_result (res_op
);
1785 gimple_seq stmts
= NULL
;
1786 result
= maybe_push_res_to_seq (res_op
, &stmts
);
1789 gcc_assert (gimple_seq_singleton_p (stmts
));
1790 new_stmt
= gimple_seq_first_stmt (stmts
);
1794 /* The expression is already available. */
1799 /* The expression is not yet available, value-number lhs to
1800 the new SSA_NAME we created. */
1801 /* Initialize value-number information properly. */
1802 vn_ssa_aux_t result_info
= VN_INFO (result
);
1803 result_info
->valnum
= result
;
1804 result_info
->value_id
= get_next_value_id ();
1805 result_info
->visited
= 1;
1806 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
1808 result_info
->needs_insertion
= true;
1809 /* ??? PRE phi-translation inserts NARYs without corresponding
1810 SSA name result. Re-use those but set their result according
1811 to the stmt we just built. */
1812 vn_nary_op_t nary
= NULL
;
1813 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
1816 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
1817 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
1819 /* As all "inserted" statements are singleton SCCs, insert
1820 to the valid table. This is strictly needed to
1821 avoid re-generating new value SSA_NAMEs for the same
1822 expression during SCC iteration over and over (the
1823 optimistic table gets cleared after each iteration).
1824 We do not need to insert into the optimistic table, as
1825 lookups there will fall back to the valid table. */
1828 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
1830 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
1831 vno1
->value_id
= result_info
->value_id
;
1832 vno1
->length
= length
;
1833 vno1
->predicated_values
= 0;
1834 vno1
->u
.result
= result
;
1835 init_vn_nary_op_from_stmt (vno1
, new_stmt
);
1836 vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
1837 /* Also do not link it into the undo chain. */
1838 last_inserted_nary
= vno1
->next
;
1839 vno1
->next
= (vn_nary_op_t
)(void *)-1;
1841 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1843 fprintf (dump_file
, "Inserting name ");
1844 print_generic_expr (dump_file
, result
);
1845 fprintf (dump_file
, " for expression ");
1846 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
1847 fprintf (dump_file
, "\n");
1853 /* Return a value-number for RCODE OPS... either by looking up an existing
1854 value-number for the simplified result or by inserting the operation. */
1857 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
1859 return vn_nary_build_or_lookup_1 (res_op
, true);
1862 /* Try to simplify the expression RCODE OPS... of type TYPE and return
1863 its value if present. */
1866 vn_nary_simplify (vn_nary_op_t nary
)
1868 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
1870 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
1871 nary
->type
, nary
->length
);
1872 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
1873 return vn_nary_build_or_lookup_1 (&op
, false);
1876 basic_block vn_context_bb
;
1878 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1879 from the statement defining VUSE and if not successful tries to
1880 translate *REFP and VR_ through an aggregate copy at the definition
1881 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
1882 of *REF and *VR. If only disambiguation was performed then
1883 *DISAMBIGUATE_ONLY is set to true. */
1886 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *vr_
,
1887 bool *disambiguate_only
)
1889 vn_reference_t vr
= (vn_reference_t
)vr_
;
1890 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
1891 tree base
= ao_ref_base (ref
);
1892 HOST_WIDE_INT offseti
, maxsizei
;
1893 static vec
<vn_reference_op_s
> lhs_ops
;
1895 bool lhs_ref_ok
= false;
1896 poly_int64 copy_size
;
1898 /* First try to disambiguate after value-replacing in the definitions LHS. */
1899 if (is_gimple_assign (def_stmt
))
1901 tree lhs
= gimple_assign_lhs (def_stmt
);
1902 bool valueized_anything
= false;
1903 /* Avoid re-allocation overhead. */
1904 lhs_ops
.truncate (0);
1905 basic_block saved_rpo_bb
= vn_context_bb
;
1906 vn_context_bb
= gimple_bb (def_stmt
);
1907 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
1908 lhs_ops
= valueize_refs_1 (lhs_ops
, &valueized_anything
, true);
1909 vn_context_bb
= saved_rpo_bb
;
1910 if (valueized_anything
)
1912 lhs_ref_ok
= ao_ref_init_from_vn_reference (&lhs_ref
,
1913 get_alias_set (lhs
),
1914 TREE_TYPE (lhs
), lhs_ops
);
1916 && !refs_may_alias_p_1 (ref
, &lhs_ref
, true))
1918 *disambiguate_only
= true;
1924 ao_ref_init (&lhs_ref
, lhs
);
1928 /* If we reach a clobbering statement try to skip it and see if
1929 we find a VN result with exactly the same value as the
1930 possible clobber. In this case we can ignore the clobber
1931 and return the found value.
1932 Note that we don't need to worry about partial overlapping
1933 accesses as we then can use TBAA to disambiguate against the
1934 clobbering statement when looking up a load (thus the
1935 VN_WALKREWRITE guard). */
1936 if (vn_walk_kind
== VN_WALKREWRITE
1937 && is_gimple_reg_type (TREE_TYPE (lhs
))
1938 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
))
1940 tree
*saved_last_vuse_ptr
= last_vuse_ptr
;
1941 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
1942 last_vuse_ptr
= NULL
;
1943 tree saved_vuse
= vr
->vuse
;
1944 hashval_t saved_hashcode
= vr
->hashcode
;
1945 void *res
= vn_reference_lookup_2 (ref
,
1946 gimple_vuse (def_stmt
), 0, vr
);
1947 /* Need to restore vr->vuse and vr->hashcode. */
1948 vr
->vuse
= saved_vuse
;
1949 vr
->hashcode
= saved_hashcode
;
1950 last_vuse_ptr
= saved_last_vuse_ptr
;
1951 if (res
&& res
!= (void *)-1)
1953 vn_reference_t vnresult
= (vn_reference_t
) res
;
1954 if (vnresult
->result
1955 && operand_equal_p (vnresult
->result
,
1956 gimple_assign_rhs1 (def_stmt
), 0))
1961 else if (gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
1962 && gimple_call_num_args (def_stmt
) <= 4)
1964 /* For builtin calls valueize its arguments and call the
1965 alias oracle again. Valueization may improve points-to
1966 info of pointers and constify size and position arguments.
1967 Originally this was motivated by PR61034 which has
1968 conditional calls to free falsely clobbering ref because
1969 of imprecise points-to info of the argument. */
1971 bool valueized_anything
= false;
1972 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1974 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
1975 tree val
= vn_valueize (oldargs
[i
]);
1976 if (val
!= oldargs
[i
])
1978 gimple_call_set_arg (def_stmt
, i
, val
);
1979 valueized_anything
= true;
1982 if (valueized_anything
)
1984 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
1986 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
1987 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
1990 *disambiguate_only
= true;
1996 if (*disambiguate_only
)
1999 /* If we cannot constrain the size of the reference we cannot
2000 test if anything kills it. */
2001 if (!ref
->max_size_known_p ())
2004 poly_int64 offset
= ref
->offset
;
2005 poly_int64 maxsize
= ref
->max_size
;
2007 /* We can't deduce anything useful from clobbers. */
2008 if (gimple_clobber_p (def_stmt
))
2011 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2012 from that definition.
2014 if (is_gimple_reg_type (vr
->type
)
2015 && gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2016 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2017 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2018 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2019 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2020 && offset
.is_constant (&offseti
)
2021 && offseti
% BITS_PER_UNIT
== 0))
2022 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2023 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2024 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2027 poly_int64 offset2
, size2
, maxsize2
;
2029 tree ref2
= gimple_call_arg (def_stmt
, 0);
2030 if (TREE_CODE (ref2
) == SSA_NAME
)
2032 ref2
= SSA_VAL (ref2
);
2033 if (TREE_CODE (ref2
) == SSA_NAME
2034 && (TREE_CODE (base
) != MEM_REF
2035 || TREE_OPERAND (base
, 0) != ref2
))
2037 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2038 if (gimple_assign_single_p (def_stmt
)
2039 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2040 ref2
= gimple_assign_rhs1 (def_stmt
);
2043 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2045 ref2
= TREE_OPERAND (ref2
, 0);
2046 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2048 if (!known_size_p (maxsize2
)
2049 || !known_eq (maxsize2
, size2
)
2050 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2053 else if (TREE_CODE (ref2
) == SSA_NAME
)
2056 if (TREE_CODE (base
) != MEM_REF
2057 || !(mem_ref_offset (base
) << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2061 if (TREE_OPERAND (base
, 0) != ref2
)
2063 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2064 if (is_gimple_assign (def
)
2065 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2066 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2067 && poly_int_tree_p (gimple_assign_rhs2 (def
))
2068 && (wi::to_poly_offset (gimple_assign_rhs2 (def
))
2069 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2071 ref2
= gimple_assign_rhs1 (def
);
2072 if (TREE_CODE (ref2
) == SSA_NAME
)
2073 ref2
= SSA_VAL (ref2
);
2081 tree len
= gimple_call_arg (def_stmt
, 2);
2082 if (known_subrange_p (offset
, maxsize
, offset2
,
2083 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2086 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2087 val
= build_zero_cst (vr
->type
);
2088 else if (INTEGRAL_TYPE_P (vr
->type
)
2089 && known_eq (ref
->size
, 8))
2091 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2092 vr
->type
, gimple_call_arg (def_stmt
, 1));
2093 val
= vn_nary_build_or_lookup (&res_op
);
2095 || (TREE_CODE (val
) == SSA_NAME
2096 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2101 unsigned len
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
));
2102 unsigned char *buf
= XALLOCAVEC (unsigned char, len
);
2103 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2105 val
= native_interpret_expr (vr
->type
, buf
, len
);
2109 return vn_reference_lookup_or_insert_for_pieces
2110 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2114 /* 2) Assignment from an empty CONSTRUCTOR. */
2115 else if (is_gimple_reg_type (vr
->type
)
2116 && gimple_assign_single_p (def_stmt
)
2117 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2118 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2121 poly_int64 offset2
, size2
, maxsize2
;
2123 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2124 &offset2
, &size2
, &maxsize2
, &reverse
);
2125 if (known_size_p (maxsize2
)
2126 && operand_equal_p (base
, base2
, 0)
2127 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2129 tree val
= build_zero_cst (vr
->type
);
2130 return vn_reference_lookup_or_insert_for_pieces
2131 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2135 /* 3) Assignment from a constant. We can use folds native encode/interpret
2136 routines to extract the assigned bits. */
2137 else if (known_eq (ref
->size
, maxsize
)
2138 && is_gimple_reg_type (vr
->type
)
2139 && !contains_storage_order_barrier_p (vr
->operands
)
2140 && gimple_assign_single_p (def_stmt
)
2141 && CHAR_BIT
== 8 && BITS_PER_UNIT
== 8
2142 /* native_encode and native_decode operate on arrays of bytes
2143 and so fundamentally need a compile-time size and offset. */
2144 && maxsize
.is_constant (&maxsizei
)
2145 && maxsizei
% BITS_PER_UNIT
== 0
2146 && offset
.is_constant (&offseti
)
2147 && offseti
% BITS_PER_UNIT
== 0
2148 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2149 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2150 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2153 HOST_WIDE_INT offset2
, size2
;
2155 base2
= get_ref_base_and_extent_hwi (gimple_assign_lhs (def_stmt
),
2156 &offset2
, &size2
, &reverse
);
2159 && size2
% BITS_PER_UNIT
== 0
2160 && offset2
% BITS_PER_UNIT
== 0
2161 && operand_equal_p (base
, base2
, 0)
2162 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2164 /* We support up to 512-bit values (for V8DFmode). */
2165 unsigned char buffer
[64];
2168 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2169 if (TREE_CODE (rhs
) == SSA_NAME
)
2170 rhs
= SSA_VAL (rhs
);
2171 len
= native_encode_expr (gimple_assign_rhs1 (def_stmt
),
2172 buffer
, sizeof (buffer
),
2173 (offseti
- offset2
) / BITS_PER_UNIT
);
2174 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
2176 tree type
= vr
->type
;
2177 /* Make sure to interpret in a type that has a range
2178 covering the whole access size. */
2179 if (INTEGRAL_TYPE_P (vr
->type
)
2180 && maxsizei
!= TYPE_PRECISION (vr
->type
))
2181 type
= build_nonstandard_integer_type (maxsizei
,
2182 TYPE_UNSIGNED (type
));
2183 tree val
= native_interpret_expr (type
, buffer
,
2184 maxsizei
/ BITS_PER_UNIT
);
2185 /* If we chop off bits because the types precision doesn't
2186 match the memory access size this is ok when optimizing
2187 reads but not when called from the DSE code during
2190 && type
!= vr
->type
)
2192 if (! int_fits_type_p (val
, vr
->type
))
2195 val
= fold_convert (vr
->type
, val
);
2199 return vn_reference_lookup_or_insert_for_pieces
2200 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2205 /* 4) Assignment from an SSA name which definition we may be able
2206 to access pieces from. */
2207 else if (known_eq (ref
->size
, maxsize
)
2208 && is_gimple_reg_type (vr
->type
)
2209 && !contains_storage_order_barrier_p (vr
->operands
)
2210 && gimple_assign_single_p (def_stmt
)
2211 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
2214 poly_int64 offset2
, size2
, maxsize2
;
2216 base2
= get_ref_base_and_extent (gimple_assign_lhs (def_stmt
),
2217 &offset2
, &size2
, &maxsize2
,
2220 && known_size_p (maxsize2
)
2221 && known_eq (maxsize2
, size2
)
2222 && operand_equal_p (base
, base2
, 0)
2223 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
2224 /* ??? We can't handle bitfield precision extracts without
2225 either using an alternate type for the BIT_FIELD_REF and
2226 then doing a conversion or possibly adjusting the offset
2227 according to endianness. */
2228 && (! INTEGRAL_TYPE_P (vr
->type
)
2229 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
2230 && multiple_p (ref
->size
, BITS_PER_UNIT
))
2232 gimple_match_op
op (gimple_match_cond::UNCOND
,
2233 BIT_FIELD_REF
, vr
->type
,
2234 vn_valueize (gimple_assign_rhs1 (def_stmt
)),
2235 bitsize_int (ref
->size
),
2236 bitsize_int (offset
- offset2
));
2237 tree val
= vn_nary_build_or_lookup (&op
);
2239 && (TREE_CODE (val
) != SSA_NAME
2240 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2242 vn_reference_t res
= vn_reference_lookup_or_insert_for_pieces
2243 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2249 /* 5) For aggregate copies translate the reference through them if
2250 the copy kills ref. */
2251 else if (vn_walk_kind
== VN_WALKREWRITE
2252 && gimple_assign_single_p (def_stmt
)
2253 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
2254 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
2255 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
2259 auto_vec
<vn_reference_op_s
> rhs
;
2260 vn_reference_op_t vro
;
2266 /* See if the assignment kills REF. */
2267 base2
= ao_ref_base (&lhs_ref
);
2268 if (!lhs_ref
.max_size_known_p ()
2270 && (TREE_CODE (base
) != MEM_REF
2271 || TREE_CODE (base2
) != MEM_REF
2272 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
2273 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
2274 TREE_OPERAND (base2
, 1))))
2275 || !stmt_kills_ref_p (def_stmt
, ref
))
2278 /* Find the common base of ref and the lhs. lhs_ops already
2279 contains valueized operands for the lhs. */
2280 i
= vr
->operands
.length () - 1;
2281 j
= lhs_ops
.length () - 1;
2282 while (j
>= 0 && i
>= 0
2283 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
2289 /* ??? The innermost op should always be a MEM_REF and we already
2290 checked that the assignment to the lhs kills vr. Thus for
2291 aggregate copies using char[] types the vn_reference_op_eq
2292 may fail when comparing types for compatibility. But we really
2293 don't care here - further lookups with the rewritten operands
2294 will simply fail if we messed up types too badly. */
2295 poly_int64 extra_off
= 0;
2296 if (j
== 0 && i
>= 0
2297 && lhs_ops
[0].opcode
== MEM_REF
2298 && maybe_ne (lhs_ops
[0].off
, -1))
2300 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
2302 else if (vr
->operands
[i
].opcode
== MEM_REF
2303 && maybe_ne (vr
->operands
[i
].off
, -1))
2305 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
2310 /* i now points to the first additional op.
2311 ??? LHS may not be completely contained in VR, one or more
2312 VIEW_CONVERT_EXPRs could be in its way. We could at least
2313 try handling outermost VIEW_CONVERT_EXPRs. */
2317 /* Punt if the additional ops contain a storage order barrier. */
2318 for (k
= i
; k
>= 0; k
--)
2320 vro
= &vr
->operands
[k
];
2321 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
2325 /* Now re-write REF to be based on the rhs of the assignment. */
2326 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt
), &rhs
);
2328 /* Apply an extra offset to the inner MEM_REF of the RHS. */
2329 if (maybe_ne (extra_off
, 0))
2331 if (rhs
.length () < 2)
2333 int ix
= rhs
.length () - 2;
2334 if (rhs
[ix
].opcode
!= MEM_REF
2335 || known_eq (rhs
[ix
].off
, -1))
2337 rhs
[ix
].off
+= extra_off
;
2338 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
2339 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
2343 /* We need to pre-pend vr->operands[0..i] to rhs. */
2344 vec
<vn_reference_op_s
> old
= vr
->operands
;
2345 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
2346 vr
->operands
.safe_grow (i
+ 1 + rhs
.length ());
2348 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
2349 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
2350 vr
->operands
[i
+ 1 + j
] = *vro
;
2351 vr
->operands
= valueize_refs (vr
->operands
);
2352 if (old
== shared_lookup_references
)
2353 shared_lookup_references
= vr
->operands
;
2354 vr
->hashcode
= vn_reference_compute_hash (vr
);
2356 /* Try folding the new reference to a constant. */
2357 tree val
= fully_constant_vn_reference_p (vr
);
2359 return vn_reference_lookup_or_insert_for_pieces
2360 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2362 /* Adjust *ref from the new operands. */
2363 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2365 /* This can happen with bitfields. */
2366 if (maybe_ne (ref
->size
, r
.size
))
2370 /* Do not update last seen VUSE after translating. */
2371 last_vuse_ptr
= NULL
;
2373 /* Keep looking for the adjusted *REF / VR pair. */
2377 /* 6) For memcpy copies translate the reference through them if
2378 the copy kills ref. */
2379 else if (vn_walk_kind
== VN_WALKREWRITE
2380 && is_gimple_reg_type (vr
->type
)
2381 /* ??? Handle BCOPY as well. */
2382 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
2383 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
2384 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
))
2385 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2386 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
2387 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
2388 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
2389 && poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
))
2393 poly_int64 rhs_offset
, lhs_offset
;
2394 vn_reference_op_s op
;
2395 poly_uint64 mem_offset
;
2396 poly_int64 at
, byte_maxsize
;
2398 /* Only handle non-variable, addressable refs. */
2399 if (maybe_ne (ref
->size
, maxsize
)
2400 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
2401 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
2404 /* Extract a pointer base and an offset for the destination. */
2405 lhs
= gimple_call_arg (def_stmt
, 0);
2407 if (TREE_CODE (lhs
) == SSA_NAME
)
2409 lhs
= vn_valueize (lhs
);
2410 if (TREE_CODE (lhs
) == SSA_NAME
)
2412 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
2413 if (gimple_assign_single_p (def_stmt
)
2414 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2415 lhs
= gimple_assign_rhs1 (def_stmt
);
2418 if (TREE_CODE (lhs
) == ADDR_EXPR
)
2420 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
2424 if (TREE_CODE (tem
) == MEM_REF
2425 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2427 lhs
= TREE_OPERAND (tem
, 0);
2428 if (TREE_CODE (lhs
) == SSA_NAME
)
2429 lhs
= vn_valueize (lhs
);
2430 lhs_offset
+= mem_offset
;
2432 else if (DECL_P (tem
))
2433 lhs
= build_fold_addr_expr (tem
);
2437 if (TREE_CODE (lhs
) != SSA_NAME
2438 && TREE_CODE (lhs
) != ADDR_EXPR
)
2441 /* Extract a pointer base and an offset for the source. */
2442 rhs
= gimple_call_arg (def_stmt
, 1);
2444 if (TREE_CODE (rhs
) == SSA_NAME
)
2445 rhs
= vn_valueize (rhs
);
2446 if (TREE_CODE (rhs
) == ADDR_EXPR
)
2448 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
2452 if (TREE_CODE (tem
) == MEM_REF
2453 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
2455 rhs
= TREE_OPERAND (tem
, 0);
2456 rhs_offset
+= mem_offset
;
2458 else if (DECL_P (tem
)
2459 || TREE_CODE (tem
) == STRING_CST
)
2460 rhs
= build_fold_addr_expr (tem
);
2464 if (TREE_CODE (rhs
) != SSA_NAME
2465 && TREE_CODE (rhs
) != ADDR_EXPR
)
2468 /* The bases of the destination and the references have to agree. */
2469 if (TREE_CODE (base
) == MEM_REF
)
2471 if (TREE_OPERAND (base
, 0) != lhs
2472 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
2476 else if (!DECL_P (base
)
2477 || TREE_CODE (lhs
) != ADDR_EXPR
2478 || TREE_OPERAND (lhs
, 0) != base
)
2481 /* If the access is completely outside of the memcpy destination
2482 area there is no aliasing. */
2483 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
2485 /* And the access has to be contained within the memcpy destination. */
2486 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
2489 /* Make room for 2 operands in the new reference. */
2490 if (vr
->operands
.length () < 2)
2492 vec
<vn_reference_op_s
> old
= vr
->operands
;
2493 vr
->operands
.safe_grow_cleared (2);
2494 if (old
== shared_lookup_references
)
2495 shared_lookup_references
= vr
->operands
;
2498 vr
->operands
.truncate (2);
2500 /* The looked-through reference is a simple MEM_REF. */
2501 memset (&op
, 0, sizeof (op
));
2503 op
.opcode
= MEM_REF
;
2504 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
2505 op
.off
= at
- lhs_offset
+ rhs_offset
;
2506 vr
->operands
[0] = op
;
2507 op
.type
= TREE_TYPE (rhs
);
2508 op
.opcode
= TREE_CODE (rhs
);
2511 vr
->operands
[1] = op
;
2512 vr
->hashcode
= vn_reference_compute_hash (vr
);
2514 /* Try folding the new reference to a constant. */
2515 tree val
= fully_constant_vn_reference_p (vr
);
2517 return vn_reference_lookup_or_insert_for_pieces
2518 (vuse
, vr
->set
, vr
->type
, vr
->operands
, val
);
2520 /* Adjust *ref from the new operands. */
2521 if (!ao_ref_init_from_vn_reference (&r
, vr
->set
, vr
->type
, vr
->operands
))
2523 /* This can happen with bitfields. */
2524 if (maybe_ne (ref
->size
, r
.size
))
2528 /* Do not update last seen VUSE after translating. */
2529 last_vuse_ptr
= NULL
;
2531 /* Keep looking for the adjusted *REF / VR pair. */
2535 /* Bail out and stop walking. */
2539 /* Return a reference op vector from OP that can be used for
2540 vn_reference_lookup_pieces. The caller is responsible for releasing
2543 vec
<vn_reference_op_s
>
2544 vn_reference_operands_for_lookup (tree op
)
2547 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
2550 /* Lookup a reference operation by it's parts, in the current hash table.
2551 Returns the resulting value number if it exists in the hash table,
2552 NULL_TREE otherwise. VNRESULT will be filled in with the actual
2553 vn_reference_t stored in the hashtable if something is found. */
2556 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
, tree type
,
2557 vec
<vn_reference_op_s
> operands
,
2558 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
2560 struct vn_reference_s vr1
;
2568 vr1
.vuse
= vuse_ssa_val (vuse
);
2569 shared_lookup_references
.truncate (0);
2570 shared_lookup_references
.safe_grow (operands
.length ());
2571 memcpy (shared_lookup_references
.address (),
2572 operands
.address (),
2573 sizeof (vn_reference_op_s
)
2574 * operands
.length ());
2575 vr1
.operands
= operands
= shared_lookup_references
2576 = valueize_refs (shared_lookup_references
);
2579 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2580 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2583 vn_reference_lookup_1 (&vr1
, vnresult
);
2585 && kind
!= VN_NOWALK
2589 vn_walk_kind
= kind
;
2590 if (ao_ref_init_from_vn_reference (&r
, set
, type
, vr1
.operands
))
2592 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2593 vn_reference_lookup_2
,
2594 vn_reference_lookup_3
,
2595 vuse_valueize
, &vr1
);
2596 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2600 return (*vnresult
)->result
;
2605 /* Lookup OP in the current hash table, and return the resulting value
2606 number if it exists in the hash table. Return NULL_TREE if it does
2607 not exist in the hash table or if the result field of the structure
2608 was NULL.. VNRESULT will be filled in with the vn_reference_t
2609 stored in the hashtable if one exists. When TBAA_P is false assume
2610 we are looking up a store and treat it as having alias-set zero. */
2613 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
2614 vn_reference_t
*vnresult
, bool tbaa_p
)
2616 vec
<vn_reference_op_s
> operands
;
2617 struct vn_reference_s vr1
;
2619 bool valuezied_anything
;
2624 vr1
.vuse
= vuse_ssa_val (vuse
);
2625 vr1
.operands
= operands
2626 = valueize_shared_reference_ops_from_ref (op
, &valuezied_anything
);
2627 vr1
.type
= TREE_TYPE (op
);
2628 vr1
.set
= tbaa_p
? get_alias_set (op
) : 0;
2629 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2630 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
2633 if (kind
!= VN_NOWALK
2636 vn_reference_t wvnresult
;
2638 /* Make sure to use a valueized reference if we valueized anything.
2639 Otherwise preserve the full reference for advanced TBAA. */
2640 if (!valuezied_anything
2641 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.type
,
2643 ao_ref_init (&r
, op
);
2645 r
.ref_alias_set
= r
.base_alias_set
= 0;
2646 vn_walk_kind
= kind
;
2648 (vn_reference_t
)walk_non_aliased_vuses (&r
, vr1
.vuse
,
2649 vn_reference_lookup_2
,
2650 vn_reference_lookup_3
,
2651 vuse_valueize
, &vr1
);
2652 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
2656 *vnresult
= wvnresult
;
2657 return wvnresult
->result
;
2663 return vn_reference_lookup_1 (&vr1
, vnresult
);
2666 /* Lookup CALL in the current hash table and return the entry in
2667 *VNRESULT if found. Populates *VR for the hashtable lookup. */
2670 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
2676 tree vuse
= gimple_vuse (call
);
2678 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2679 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
2680 vr
->type
= gimple_expr_type (call
);
2682 vr
->hashcode
= vn_reference_compute_hash (vr
);
2683 vn_reference_lookup_1 (vr
, vnresult
);
2686 /* Insert OP into the current hash table with a value number of RESULT. */
2689 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
2691 vn_reference_s
**slot
;
2695 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2696 if (TREE_CODE (result
) == SSA_NAME
)
2697 vr1
->value_id
= VN_INFO (result
)->value_id
;
2699 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
2700 vr1
->vuse
= vuse_ssa_val (vuse
);
2701 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
2702 vr1
->type
= TREE_TYPE (op
);
2703 vr1
->set
= get_alias_set (op
);
2704 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2705 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
2706 vr1
->result_vdef
= vdef
;
2708 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2711 /* Because IL walking on reference lookup can end up visiting
2712 a def that is only to be visited later in iteration order
2713 when we are about to make an irreducible region reducible
2714 the def can be effectively processed and its ref being inserted
2715 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
2716 but save a lookup if we deal with already inserted refs here. */
2719 /* We cannot assert that we have the same value either because
2720 when disentangling an irreducible region we may end up visiting
2721 a use before the corresponding def. That's a missed optimization
2722 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
2723 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
2724 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
2726 fprintf (dump_file
, "Keeping old value ");
2727 print_generic_expr (dump_file
, (*slot
)->result
);
2728 fprintf (dump_file
, " because of collision\n");
2730 free_reference (vr1
);
2731 obstack_free (&vn_tables_obstack
, vr1
);
2736 vr1
->next
= last_inserted_ref
;
2737 last_inserted_ref
= vr1
;
2740 /* Insert a reference by it's pieces into the current hash table with
2741 a value number of RESULT. Return the resulting reference
2742 structure we created. */
2745 vn_reference_insert_pieces (tree vuse
, alias_set_type set
, tree type
,
2746 vec
<vn_reference_op_s
> operands
,
2747 tree result
, unsigned int value_id
)
2750 vn_reference_s
**slot
;
2753 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
2754 vr1
->value_id
= value_id
;
2755 vr1
->vuse
= vuse_ssa_val (vuse
);
2756 vr1
->operands
= valueize_refs (operands
);
2759 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
2760 if (result
&& TREE_CODE (result
) == SSA_NAME
)
2761 result
= SSA_VAL (result
);
2762 vr1
->result
= result
;
2764 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
2767 /* At this point we should have all the things inserted that we have
2768 seen before, and we should never try inserting something that
2770 gcc_assert (!*slot
);
2773 vr1
->next
= last_inserted_ref
;
2774 last_inserted_ref
= vr1
;
2778 /* Compute and return the hash value for nary operation VBO1. */
2781 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
2783 inchash::hash hstate
;
2786 for (i
= 0; i
< vno1
->length
; ++i
)
2787 if (TREE_CODE (vno1
->op
[i
]) == SSA_NAME
)
2788 vno1
->op
[i
] = SSA_VAL (vno1
->op
[i
]);
2790 if (((vno1
->length
== 2
2791 && commutative_tree_code (vno1
->opcode
))
2792 || (vno1
->length
== 3
2793 && commutative_ternary_tree_code (vno1
->opcode
)))
2794 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2795 std::swap (vno1
->op
[0], vno1
->op
[1]);
2796 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
2797 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
2799 std::swap (vno1
->op
[0], vno1
->op
[1]);
2800 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
2803 hstate
.add_int (vno1
->opcode
);
2804 for (i
= 0; i
< vno1
->length
; ++i
)
2805 inchash::add_expr (vno1
->op
[i
], hstate
);
2807 return hstate
.end ();
2810 /* Compare nary operations VNO1 and VNO2 and return true if they are
2814 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
2818 if (vno1
->hashcode
!= vno2
->hashcode
)
2821 if (vno1
->length
!= vno2
->length
)
2824 if (vno1
->opcode
!= vno2
->opcode
2825 || !types_compatible_p (vno1
->type
, vno2
->type
))
2828 for (i
= 0; i
< vno1
->length
; ++i
)
2829 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
2832 /* BIT_INSERT_EXPR has an implict operand as the type precision
2833 of op1. Need to check to make sure they are the same. */
2834 if (vno1
->opcode
== BIT_INSERT_EXPR
2835 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
2836 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
2837 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
2843 /* Initialize VNO from the pieces provided. */
2846 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
2847 enum tree_code code
, tree type
, tree
*ops
)
2850 vno
->length
= length
;
2852 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
2855 /* Initialize VNO from OP. */
2858 init_vn_nary_op_from_op (vn_nary_op_t vno
, tree op
)
2862 vno
->opcode
= TREE_CODE (op
);
2863 vno
->length
= TREE_CODE_LENGTH (TREE_CODE (op
));
2864 vno
->type
= TREE_TYPE (op
);
2865 for (i
= 0; i
< vno
->length
; ++i
)
2866 vno
->op
[i
] = TREE_OPERAND (op
, i
);
2869 /* Return the number of operands for a vn_nary ops structure from STMT. */
2872 vn_nary_length_from_stmt (gimple
*stmt
)
2874 switch (gimple_assign_rhs_code (stmt
))
2878 case VIEW_CONVERT_EXPR
:
2885 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2888 return gimple_num_ops (stmt
) - 1;
2892 /* Initialize VNO from STMT. */
2895 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gimple
*stmt
)
2899 vno
->opcode
= gimple_assign_rhs_code (stmt
);
2900 vno
->type
= gimple_expr_type (stmt
);
2901 switch (vno
->opcode
)
2905 case VIEW_CONVERT_EXPR
:
2907 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2912 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
2913 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
2914 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
2918 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
2919 for (i
= 0; i
< vno
->length
; ++i
)
2920 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
2924 gcc_checking_assert (!gimple_assign_single_p (stmt
));
2925 vno
->length
= gimple_num_ops (stmt
) - 1;
2926 for (i
= 0; i
< vno
->length
; ++i
)
2927 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
2931 /* Compute the hashcode for VNO and look for it in the hash table;
2932 return the resulting value number if it exists in the hash table.
2933 Return NULL_TREE if it does not exist in the hash table or if the
2934 result field of the operation is NULL. VNRESULT will contain the
2935 vn_nary_op_t from the hashtable if it exists. */
2938 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
2940 vn_nary_op_s
**slot
;
2945 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
2946 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
2951 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
2954 /* Lookup a n-ary operation by its pieces and return the resulting value
2955 number if it exists in the hash table. Return NULL_TREE if it does
2956 not exist in the hash table or if the result field of the operation
2957 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2961 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
2962 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
2964 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
2965 sizeof_vn_nary_op (length
));
2966 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
2967 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2970 /* Lookup OP in the current hash table, and return the resulting value
2971 number if it exists in the hash table. Return NULL_TREE if it does
2972 not exist in the hash table or if the result field of the operation
2973 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2977 vn_nary_op_lookup (tree op
, vn_nary_op_t
*vnresult
)
2980 = XALLOCAVAR (struct vn_nary_op_s
,
2981 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op
))));
2982 init_vn_nary_op_from_op (vno1
, op
);
2983 return vn_nary_op_lookup_1 (vno1
, vnresult
);
2986 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2987 value number if it exists in the hash table. Return NULL_TREE if
2988 it does not exist in the hash table. VNRESULT will contain the
2989 vn_nary_op_t from the hashtable if it exists. */
2992 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
2995 = XALLOCAVAR (struct vn_nary_op_s
,
2996 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
2997 init_vn_nary_op_from_stmt (vno1
, stmt
);
2998 return vn_nary_op_lookup_1 (vno1
, vnresult
);
3001 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
3004 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
3006 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
3009 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
3013 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
3015 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
3017 vno1
->value_id
= value_id
;
3018 vno1
->length
= length
;
3019 vno1
->predicated_values
= 0;
3020 vno1
->u
.result
= result
;
3025 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
3026 VNO->HASHCODE first. */
3029 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
,
3032 vn_nary_op_s
**slot
;
3036 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
3037 gcc_assert (! vno
->predicated_values
3038 || (! vno
->u
.values
->next
3039 && vno
->u
.values
->n
== 1));
3042 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
3043 vno
->unwind_to
= *slot
;
3046 /* Prefer non-predicated values.
3047 ??? Only if those are constant, otherwise, with constant predicated
3048 value, turn them into predicated values with entry-block validity
3049 (??? but we always find the first valid result currently). */
3050 if ((*slot
)->predicated_values
3051 && ! vno
->predicated_values
)
3053 /* ??? We cannot remove *slot from the unwind stack list.
3054 For the moment we deal with this by skipping not found
3055 entries but this isn't ideal ... */
3057 /* ??? Maintain a stack of states we can unwind in
3058 vn_nary_op_s? But how far do we unwind? In reality
3059 we need to push change records somewhere... Or not
3060 unwind vn_nary_op_s and linking them but instead
3061 unwind the results "list", linking that, which also
3062 doesn't move on hashtable resize. */
3063 /* We can also have a ->unwind_to recording *slot there.
3064 That way we can make u.values a fixed size array with
3065 recording the number of entries but of course we then
3066 have always N copies for each unwind_to-state. Or we
3067 make sure to only ever append and each unwinding will
3068 pop off one entry (but how to deal with predicated
3069 replaced with non-predicated here?) */
3070 vno
->next
= last_inserted_nary
;
3071 last_inserted_nary
= vno
;
3074 else if (vno
->predicated_values
3075 && ! (*slot
)->predicated_values
)
3077 else if (vno
->predicated_values
3078 && (*slot
)->predicated_values
)
3080 /* ??? Factor this all into a insert_single_predicated_value
3082 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
3084 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
3085 vn_pval
*nval
= vno
->u
.values
;
3086 vn_pval
**next
= &vno
->u
.values
;
3088 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
3090 if (expressions_equal_p (val
->result
, vno
->u
.values
->result
))
3093 for (unsigned i
= 0; i
< val
->n
; ++i
)
3096 = BASIC_BLOCK_FOR_FN (cfun
,
3097 val
->valid_dominated_by_p
[i
]);
3098 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
3099 /* Value registered with more generic predicate. */
3101 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
3102 /* Shouldn't happen, we insert in RPO order. */
3106 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3108 + val
->n
* sizeof (int));
3109 (*next
)->next
= NULL
;
3110 (*next
)->result
= val
->result
;
3111 (*next
)->n
= val
->n
+ 1;
3112 memcpy ((*next
)->valid_dominated_by_p
,
3113 val
->valid_dominated_by_p
,
3114 val
->n
* sizeof (int));
3115 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
3116 next
= &(*next
)->next
;
3117 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3118 fprintf (dump_file
, "Appending predicate to value.\n");
3121 /* Copy other predicated values. */
3122 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3124 + (val
->n
-1) * sizeof (int));
3125 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
3126 (*next
)->next
= NULL
;
3127 next
= &(*next
)->next
;
3133 vno
->next
= last_inserted_nary
;
3134 last_inserted_nary
= vno
;
3138 /* While we do not want to insert things twice it's awkward to
3139 avoid it in the case where visit_nary_op pattern-matches stuff
3140 and ends up simplifying the replacement to itself. We then
3141 get two inserts, one from visit_nary_op and one from
3142 vn_nary_build_or_lookup.
3143 So allow inserts with the same value number. */
3144 if ((*slot
)->u
.result
== vno
->u
.result
)
3148 /* ??? There's also optimistic vs. previous commited state merging
3149 that is problematic for the case of unwinding. */
3151 /* ??? We should return NULL if we do not use 'vno' and have the
3152 caller release it. */
3153 gcc_assert (!*slot
);
3156 vno
->next
= last_inserted_nary
;
3157 last_inserted_nary
= vno
;
3161 /* Insert a n-ary operation into the current hash table using it's
3162 pieces. Return the vn_nary_op_t structure we created and put in
3166 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
3167 tree type
, tree
*ops
,
3168 tree result
, unsigned int value_id
)
3170 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
3171 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3172 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3176 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
3177 tree type
, tree
*ops
,
3178 tree result
, unsigned int value_id
,
3181 /* ??? Currently tracking BBs. */
3182 if (! single_pred_p (pred_e
->dest
))
3184 /* Never record for backedges. */
3185 if (pred_e
->flags
& EDGE_DFS_BACK
)
3190 /* Ignore backedges. */
3191 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
3192 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
3197 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3198 /* ??? Fix dumping, but currently we only get comparisons. */
3199 && TREE_CODE_CLASS (code
) == tcc_comparison
)
3201 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
3202 pred_e
->dest
->index
);
3203 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
3204 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
3205 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
3206 fprintf (dump_file
, " == %s\n",
3207 integer_zerop (result
) ? "false" : "true");
3209 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
3210 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
3211 vno1
->predicated_values
= 1;
3212 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
3214 vno1
->u
.values
->next
= NULL
;
3215 vno1
->u
.values
->result
= result
;
3216 vno1
->u
.values
->n
= 1;
3217 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
3218 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3222 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
);
3225 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
3227 if (! vno
->predicated_values
)
3228 return vno
->u
.result
;
3229 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
3230 for (unsigned i
= 0; i
< val
->n
; ++i
)
3231 if (dominated_by_p_w_unex (bb
,
3233 (cfun
, val
->valid_dominated_by_p
[i
])))
3238 /* Insert OP into the current hash table with a value number of
3239 RESULT. Return the vn_nary_op_t structure we created and put in
3243 vn_nary_op_insert (tree op
, tree result
)
3245 unsigned length
= TREE_CODE_LENGTH (TREE_CODE (op
));
3248 vno1
= alloc_vn_nary_op (length
, result
, VN_INFO (result
)->value_id
);
3249 init_vn_nary_op_from_op (vno1
, op
);
3250 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3253 /* Insert the rhs of STMT into the current hash table with a value number of
3257 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
3260 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
3261 result
, VN_INFO (result
)->value_id
);
3262 init_vn_nary_op_from_stmt (vno1
, stmt
);
3263 return vn_nary_op_insert_into (vno1
, valid_info
->nary
, true);
3266 /* Compute a hashcode for PHI operation VP1 and return it. */
3268 static inline hashval_t
3269 vn_phi_compute_hash (vn_phi_t vp1
)
3271 inchash::hash
hstate (EDGE_COUNT (vp1
->block
->preds
) > 2
3272 ? vp1
->block
->index
: EDGE_COUNT (vp1
->block
->preds
));
3278 /* If all PHI arguments are constants we need to distinguish
3279 the PHI node via its type. */
3281 hstate
.merge_hash (vn_hash_type (type
));
3283 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
3285 /* Don't hash backedge values they need to be handled as VN_TOP
3286 for optimistic value-numbering. */
3287 if (e
->flags
& EDGE_DFS_BACK
)
3290 phi1op
= vp1
->phiargs
[e
->dest_idx
];
3291 if (phi1op
== VN_TOP
)
3293 inchash::add_expr (phi1op
, hstate
);
3296 return hstate
.end ();
3300 /* Return true if COND1 and COND2 represent the same condition, set
3301 *INVERTED_P if one needs to be inverted to make it the same as
3305 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
3306 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
3308 enum tree_code code1
= gimple_cond_code (cond1
);
3309 enum tree_code code2
= gimple_cond_code (cond2
);
3311 *inverted_p
= false;
3314 else if (code1
== swap_tree_comparison (code2
))
3315 std::swap (lhs2
, rhs2
);
3316 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
3318 else if (code1
== invert_tree_comparison
3319 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
3321 std::swap (lhs2
, rhs2
);
3327 return ((expressions_equal_p (lhs1
, lhs2
)
3328 && expressions_equal_p (rhs1
, rhs2
))
3329 || (commutative_tree_code (code1
)
3330 && expressions_equal_p (lhs1
, rhs2
)
3331 && expressions_equal_p (rhs1
, lhs2
)));
3334 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
3337 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
3339 if (vp1
->hashcode
!= vp2
->hashcode
)
3342 if (vp1
->block
!= vp2
->block
)
3344 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
3347 switch (EDGE_COUNT (vp1
->block
->preds
))
3350 /* Single-arg PHIs are just copies. */
3355 /* Rule out backedges into the PHI. */
3356 if (vp1
->block
->loop_father
->header
== vp1
->block
3357 || vp2
->block
->loop_father
->header
== vp2
->block
)
3360 /* If the PHI nodes do not have compatible types
3361 they are not the same. */
3362 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3366 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3368 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
3369 /* If the immediate dominator end in switch stmts multiple
3370 values may end up in the same PHI arg via intermediate
3372 if (EDGE_COUNT (idom1
->succs
) != 2
3373 || EDGE_COUNT (idom2
->succs
) != 2)
3376 /* Verify the controlling stmt is the same. */
3377 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
3378 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
3379 if (! last1
|| ! last2
)
3382 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
3383 last2
, vp2
->cclhs
, vp2
->ccrhs
,
3387 /* Get at true/false controlled edges into the PHI. */
3388 edge te1
, te2
, fe1
, fe2
;
3389 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
3391 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
3395 /* Swap edges if the second condition is the inverted of the
3398 std::swap (te2
, fe2
);
3400 /* ??? Handle VN_TOP specially. */
3401 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
3402 vp2
->phiargs
[te2
->dest_idx
])
3403 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
3404 vp2
->phiargs
[fe2
->dest_idx
]))
3415 /* If the PHI nodes do not have compatible types
3416 they are not the same. */
3417 if (!types_compatible_p (vp1
->type
, vp2
->type
))
3420 /* Any phi in the same block will have it's arguments in the
3421 same edge order, because of how we store phi nodes. */
3422 for (unsigned i
= 0; i
< EDGE_COUNT (vp1
->block
->preds
); ++i
)
3424 tree phi1op
= vp1
->phiargs
[i
];
3425 tree phi2op
= vp2
->phiargs
[i
];
3426 if (phi1op
== VN_TOP
|| phi2op
== VN_TOP
)
3428 if (!expressions_equal_p (phi1op
, phi2op
))
3435 /* Lookup PHI in the current hash table, and return the resulting
3436 value number if it exists in the hash table. Return NULL_TREE if
3437 it does not exist in the hash table. */
3440 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
3443 struct vn_phi_s
*vp1
;
3447 vp1
= XALLOCAVAR (struct vn_phi_s
,
3448 sizeof (struct vn_phi_s
)
3449 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
3451 /* Canonicalize the SSA_NAME's to their value number. */
3452 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3454 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3455 if (TREE_CODE (def
) == SSA_NAME
3456 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3457 def
= SSA_VAL (def
);
3458 vp1
->phiargs
[e
->dest_idx
] = def
;
3460 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3461 vp1
->block
= gimple_bb (phi
);
3462 /* Extract values of the controlling condition. */
3463 vp1
->cclhs
= NULL_TREE
;
3464 vp1
->ccrhs
= NULL_TREE
;
3465 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3466 if (EDGE_COUNT (idom1
->succs
) == 2)
3467 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3469 /* ??? We want to use SSA_VAL here. But possibly not
3471 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3472 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3474 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3475 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
3478 return (*slot
)->result
;
3481 /* Insert PHI into the current hash table with a value number of
3485 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
3488 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
3490 + ((gimple_phi_num_args (phi
) - 1)
3495 /* Canonicalize the SSA_NAME's to their value number. */
3496 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
3498 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
3499 if (TREE_CODE (def
) == SSA_NAME
3500 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
3501 def
= SSA_VAL (def
);
3502 vp1
->phiargs
[e
->dest_idx
] = def
;
3504 vp1
->value_id
= VN_INFO (result
)->value_id
;
3505 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
3506 vp1
->block
= gimple_bb (phi
);
3507 /* Extract values of the controlling condition. */
3508 vp1
->cclhs
= NULL_TREE
;
3509 vp1
->ccrhs
= NULL_TREE
;
3510 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
3511 if (EDGE_COUNT (idom1
->succs
) == 2)
3512 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
3514 /* ??? We want to use SSA_VAL here. But possibly not
3516 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
3517 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
3519 vp1
->result
= result
;
3520 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
3522 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
3523 gcc_assert (!*slot
);
3526 vp1
->next
= last_inserted_phi
;
3527 last_inserted_phi
= vp1
;
3532 /* Return true if BB1 is dominated by BB2 taking into account edges
3533 that are not executable. */
3536 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
)
3541 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3544 /* Before iterating we'd like to know if there exists a
3545 (executable) path from bb2 to bb1 at all, if not we can
3546 directly return false. For now simply iterate once. */
3548 /* Iterate to the single executable bb1 predecessor. */
3549 if (EDGE_COUNT (bb1
->preds
) > 1)
3552 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
3553 if (e
->flags
& EDGE_EXECUTABLE
)
3566 /* Re-do the dominance check with changed bb1. */
3567 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3572 /* Iterate to the single executable bb2 successor. */
3574 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
3575 if (e
->flags
& EDGE_EXECUTABLE
)
3586 /* Verify the reached block is only reached through succe.
3587 If there is only one edge we can spare us the dominator
3588 check and iterate directly. */
3589 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
3591 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
3593 && (e
->flags
& EDGE_EXECUTABLE
))
3603 /* Re-do the dominance check with changed bb2. */
3604 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
3609 /* We could now iterate updating bb1 / bb2. */
3613 /* Set the value number of FROM to TO, return true if it has changed
3617 set_ssa_val_to (tree from
, tree to
)
3619 vn_ssa_aux_t from_info
= VN_INFO (from
);
3620 tree currval
= from_info
->valnum
; // SSA_VAL (from)
3621 poly_int64 toff
, coff
;
3623 /* The only thing we allow as value numbers are ssa_names
3624 and invariants. So assert that here. We don't allow VN_TOP
3625 as visiting a stmt should produce a value-number other than
3627 ??? Still VN_TOP can happen for unreachable code, so force
3628 it to varying in that case. Not all code is prepared to
3629 get VN_TOP on valueization. */
3632 /* ??? When iterating and visiting PHI <undef, backedge-value>
3633 for the first time we rightfully get VN_TOP and we need to
3634 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
3635 With SCCVN we were simply lucky we iterated the other PHI
3636 cycles first and thus visited the backedge-value DEF. */
3637 if (currval
== VN_TOP
)
3639 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3640 fprintf (dump_file
, "Forcing value number to varying on "
3641 "receiving VN_TOP\n");
3645 gcc_checking_assert (to
!= NULL_TREE
3646 && ((TREE_CODE (to
) == SSA_NAME
3647 && (to
== from
|| SSA_VAL (to
) == to
))
3648 || is_gimple_min_invariant (to
)));
3652 if (currval
== from
)
3654 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3656 fprintf (dump_file
, "Not changing value number of ");
3657 print_generic_expr (dump_file
, from
);
3658 fprintf (dump_file
, " from VARYING to ");
3659 print_generic_expr (dump_file
, to
);
3660 fprintf (dump_file
, "\n");
3664 else if (currval
!= VN_TOP
3665 && ! is_gimple_min_invariant (currval
)
3666 && ! ssa_undefined_value_p (currval
, false)
3667 && is_gimple_min_invariant (to
))
3669 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3671 fprintf (dump_file
, "Forcing VARYING instead of changing "
3672 "value number of ");
3673 print_generic_expr (dump_file
, from
);
3674 fprintf (dump_file
, " from ");
3675 print_generic_expr (dump_file
, currval
);
3676 fprintf (dump_file
, " (non-constant) to ");
3677 print_generic_expr (dump_file
, to
);
3678 fprintf (dump_file
, " (constant)\n");
3682 else if (TREE_CODE (to
) == SSA_NAME
3683 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
3688 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3690 fprintf (dump_file
, "Setting value number of ");
3691 print_generic_expr (dump_file
, from
);
3692 fprintf (dump_file
, " to ");
3693 print_generic_expr (dump_file
, to
);
3697 && !operand_equal_p (currval
, to
, 0)
3698 /* Different undefined SSA names are not actually different. See
3699 PR82320 for a testcase were we'd otherwise not terminate iteration. */
3700 && !(TREE_CODE (currval
) == SSA_NAME
3701 && TREE_CODE (to
) == SSA_NAME
3702 && ssa_undefined_value_p (currval
, false)
3703 && ssa_undefined_value_p (to
, false))
3704 /* ??? For addresses involving volatile objects or types operand_equal_p
3705 does not reliably detect ADDR_EXPRs as equal. We know we are only
3706 getting invariant gimple addresses here, so can use
3707 get_addr_base_and_unit_offset to do this comparison. */
3708 && !(TREE_CODE (currval
) == ADDR_EXPR
3709 && TREE_CODE (to
) == ADDR_EXPR
3710 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
3711 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
3712 && known_eq (coff
, toff
)))
3714 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3715 fprintf (dump_file
, " (changed)\n");
3716 from_info
->valnum
= to
;
3719 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3720 fprintf (dump_file
, "\n");
3724 /* Set all definitions in STMT to value number to themselves.
3725 Return true if a value number changed. */
3728 defs_to_varying (gimple
*stmt
)
3730 bool changed
= false;
3734 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
3736 tree def
= DEF_FROM_PTR (defp
);
3737 changed
|= set_ssa_val_to (def
, def
);
3742 /* Visit a copy between LHS and RHS, return true if the value number
3746 visit_copy (tree lhs
, tree rhs
)
3749 rhs
= SSA_VAL (rhs
);
3751 return set_ssa_val_to (lhs
, rhs
);
3754 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
3758 valueized_wider_op (tree wide_type
, tree op
)
3760 if (TREE_CODE (op
) == SSA_NAME
)
3761 op
= vn_valueize (op
);
3763 /* Either we have the op widened available. */
3766 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
3767 wide_type
, ops
, NULL
);
3771 /* Or the op is truncated from some existing value. */
3772 if (TREE_CODE (op
) == SSA_NAME
)
3774 gimple
*def
= SSA_NAME_DEF_STMT (op
);
3775 if (is_gimple_assign (def
)
3776 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
3778 tem
= gimple_assign_rhs1 (def
);
3779 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
3781 if (TREE_CODE (tem
) == SSA_NAME
)
3782 tem
= vn_valueize (tem
);
3788 /* For constants simply extend it. */
3789 if (TREE_CODE (op
) == INTEGER_CST
)
3790 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
3795 /* Visit a nary operator RHS, value number it, and return true if the
3796 value number of LHS has changed as a result. */
3799 visit_nary_op (tree lhs
, gassign
*stmt
)
3801 vn_nary_op_t vnresult
;
3802 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
3803 if (! result
&& vnresult
)
3804 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
3806 return set_ssa_val_to (lhs
, result
);
3808 /* Do some special pattern matching for redundancies of operations
3809 in different types. */
3810 enum tree_code code
= gimple_assign_rhs_code (stmt
);
3811 tree type
= TREE_TYPE (lhs
);
3812 tree rhs1
= gimple_assign_rhs1 (stmt
);
3816 /* Match arithmetic done in a different type where we can easily
3817 substitute the result from some earlier sign-changed or widened
3819 if (INTEGRAL_TYPE_P (type
)
3820 && TREE_CODE (rhs1
) == SSA_NAME
3821 /* We only handle sign-changes or zero-extension -> & mask. */
3822 && ((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
3823 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
3824 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
3826 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
3828 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
3829 || gimple_assign_rhs_code (def
) == MINUS_EXPR
3830 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
3833 /* Either we have the op widened available. */
3834 ops
[0] = valueized_wider_op (type
,
3835 gimple_assign_rhs1 (def
));
3837 ops
[1] = valueized_wider_op (type
,
3838 gimple_assign_rhs2 (def
));
3839 if (ops
[0] && ops
[1])
3841 ops
[0] = vn_nary_op_lookup_pieces
3842 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
3843 /* We have wider operation available. */
3846 unsigned lhs_prec
= TYPE_PRECISION (type
);
3847 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
3848 if (lhs_prec
== rhs_prec
)
3850 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3851 NOP_EXPR
, type
, ops
[0]);
3852 result
= vn_nary_build_or_lookup (&match_op
);
3855 bool changed
= set_ssa_val_to (lhs
, result
);
3856 vn_nary_op_insert_stmt (stmt
, result
);
3862 tree mask
= wide_int_to_tree
3863 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
3864 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
3868 result
= vn_nary_build_or_lookup (&match_op
);
3871 bool changed
= set_ssa_val_to (lhs
, result
);
3872 vn_nary_op_insert_stmt (stmt
, result
);
3883 bool changed
= set_ssa_val_to (lhs
, lhs
);
3884 vn_nary_op_insert_stmt (stmt
, lhs
);
3888 /* Visit a call STMT storing into LHS. Return true if the value number
3889 of the LHS has changed as a result. */
3892 visit_reference_op_call (tree lhs
, gcall
*stmt
)
3894 bool changed
= false;
3895 struct vn_reference_s vr1
;
3896 vn_reference_t vnresult
= NULL
;
3897 tree vdef
= gimple_vdef (stmt
);
3899 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
3900 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
3903 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
3906 if (vnresult
->result_vdef
&& vdef
)
3907 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
3909 /* If the call was discovered to be pure or const reflect
3910 that as far as possible. */
3911 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
3913 if (!vnresult
->result
&& lhs
)
3914 vnresult
->result
= lhs
;
3916 if (vnresult
->result
&& lhs
)
3917 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
3922 vn_reference_s
**slot
;
3923 tree vdef_val
= vdef
;
3926 /* If we value numbered an indirect functions function to
3927 one not clobbering memory value number its VDEF to its
3929 tree fn
= gimple_call_fn (stmt
);
3930 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
3933 if (TREE_CODE (fn
) == ADDR_EXPR
3934 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
3935 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
3936 & (ECF_CONST
| ECF_PURE
)))
3937 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
3939 changed
|= set_ssa_val_to (vdef
, vdef_val
);
3942 changed
|= set_ssa_val_to (lhs
, lhs
);
3943 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3944 vr2
->vuse
= vr1
.vuse
;
3945 /* As we are not walking the virtual operand chain we know the
3946 shared_lookup_references are still original so we can re-use
3948 vr2
->operands
= vr1
.operands
.copy ();
3949 vr2
->type
= vr1
.type
;
3951 vr2
->hashcode
= vr1
.hashcode
;
3953 vr2
->result_vdef
= vdef_val
;
3954 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
3956 gcc_assert (!*slot
);
3958 vr2
->next
= last_inserted_ref
;
3959 last_inserted_ref
= vr2
;
3965 /* Visit a load from a reference operator RHS, part of STMT, value number it,
3966 and return true if the value number of the LHS has changed as a result. */
3969 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
3971 bool changed
= false;
3975 last_vuse
= gimple_vuse (stmt
);
3976 last_vuse_ptr
= &last_vuse
;
3977 result
= vn_reference_lookup (op
, gimple_vuse (stmt
),
3978 default_vn_walk_kind
, NULL
, true);
3979 last_vuse_ptr
= NULL
;
3981 /* We handle type-punning through unions by value-numbering based
3982 on offset and size of the access. Be prepared to handle a
3983 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
3985 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
3987 /* We will be setting the value number of lhs to the value number
3988 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
3989 So first simplify and lookup this expression to see if it
3990 is already available. */
3991 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
3992 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
3993 result
= vn_nary_build_or_lookup (&res_op
);
3994 /* When building the conversion fails avoid inserting the reference
3997 return set_ssa_val_to (lhs
, lhs
);
4001 changed
= set_ssa_val_to (lhs
, result
);
4004 changed
= set_ssa_val_to (lhs
, lhs
);
4005 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
4012 /* Visit a store to a reference operator LHS, part of STMT, value number it,
4013 and return true if the value number of the LHS has changed as a result. */
4016 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
4018 bool changed
= false;
4019 vn_reference_t vnresult
= NULL
;
4021 bool resultsame
= false;
4022 tree vuse
= gimple_vuse (stmt
);
4023 tree vdef
= gimple_vdef (stmt
);
4025 if (TREE_CODE (op
) == SSA_NAME
)
4028 /* First we want to lookup using the *vuses* from the store and see
4029 if there the last store to this location with the same address
4032 The vuses represent the memory state before the store. If the
4033 memory state, address, and value of the store is the same as the
4034 last store to this location, then this store will produce the
4035 same memory state as that store.
4037 In this case the vdef versions for this store are value numbered to those
4038 vuse versions, since they represent the same memory state after
4041 Otherwise, the vdefs for the store are used when inserting into
4042 the table, since the store generates a new memory state. */
4044 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
4046 && vnresult
->result
)
4048 tree result
= vnresult
->result
;
4049 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
4050 || result
== SSA_VAL (result
));
4051 resultsame
= expressions_equal_p (result
, op
);
4054 /* If the TBAA state isn't compatible for downstream reads
4055 we cannot value-number the VDEFs the same. */
4056 alias_set_type set
= get_alias_set (lhs
);
4057 if (vnresult
->set
!= set
4058 && ! alias_set_subset_of (set
, vnresult
->set
))
4065 /* Only perform the following when being called from PRE
4066 which embeds tail merging. */
4067 if (default_vn_walk_kind
== VN_WALK
)
4069 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4070 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
4073 VN_INFO (vdef
)->visited
= true;
4074 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
4078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4080 fprintf (dump_file
, "No store match\n");
4081 fprintf (dump_file
, "Value numbering store ");
4082 print_generic_expr (dump_file
, lhs
);
4083 fprintf (dump_file
, " to ");
4084 print_generic_expr (dump_file
, op
);
4085 fprintf (dump_file
, "\n");
4087 /* Have to set value numbers before insert, since insert is
4088 going to valueize the references in-place. */
4090 changed
|= set_ssa_val_to (vdef
, vdef
);
4092 /* Do not insert structure copies into the tables. */
4093 if (is_gimple_min_invariant (op
)
4094 || is_gimple_reg (op
))
4095 vn_reference_insert (lhs
, op
, vdef
, NULL
);
4097 /* Only perform the following when being called from PRE
4098 which embeds tail merging. */
4099 if (default_vn_walk_kind
== VN_WALK
)
4101 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
4102 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
4107 /* We had a match, so value number the vdef to have the value
4108 number of the vuse it came from. */
4110 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4111 fprintf (dump_file
, "Store matched earlier value, "
4112 "value numbering store vdefs to matching vuses.\n");
4114 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
4120 /* Visit and value number PHI, return true if the value number
4121 changed. When BACKEDGES_VARYING_P is true then assume all
4122 backedge values are varying. When INSERTED is not NULL then
4123 this is just a ahead query for a possible iteration, set INSERTED
4124 to true if we'd insert into the hashtable. */
4127 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
4129 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
4130 tree backedge_val
= NULL_TREE
;
4131 bool seen_non_backedge
= false;
4132 tree sameval_base
= NULL_TREE
;
4133 poly_int64 soff
, doff
;
4134 unsigned n_executable
= 0;
4138 /* TODO: We could check for this in initialization, and replace this
4139 with a gcc_assert. */
4140 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
4141 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
4143 /* We track whether a PHI was CSEd to to avoid excessive iterations
4144 that would be necessary only because the PHI changed arguments
4147 gimple_set_plf (phi
, GF_PLF_1
, false);
4149 /* See if all non-TOP arguments have the same value. TOP is
4150 equivalent to everything, so we can ignore it. */
4151 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4152 if (e
->flags
& EDGE_EXECUTABLE
)
4154 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4157 if (TREE_CODE (def
) == SSA_NAME
)
4159 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
4160 def
= SSA_VAL (def
);
4161 if (e
->flags
& EDGE_DFS_BACK
)
4164 if (!(e
->flags
& EDGE_DFS_BACK
))
4165 seen_non_backedge
= true;
4168 /* Ignore undefined defs for sameval but record one. */
4169 else if (TREE_CODE (def
) == SSA_NAME
4170 && ! virtual_operand_p (def
)
4171 && ssa_undefined_value_p (def
, false))
4173 else if (sameval
== VN_TOP
)
4175 else if (!expressions_equal_p (def
, sameval
))
4177 /* We know we're arriving only with invariant addresses here,
4178 try harder comparing them. We can do some caching here
4179 which we cannot do in expressions_equal_p. */
4180 if (TREE_CODE (def
) == ADDR_EXPR
4181 && TREE_CODE (sameval
) == ADDR_EXPR
4182 && sameval_base
!= (void *)-1)
4185 sameval_base
= get_addr_base_and_unit_offset
4186 (TREE_OPERAND (sameval
, 0), &soff
);
4188 sameval_base
= (tree
)(void *)-1;
4189 else if ((get_addr_base_and_unit_offset
4190 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
4191 && known_eq (soff
, doff
))
4194 sameval
= NULL_TREE
;
4199 /* If the value we want to use is the backedge and that wasn't visited
4200 yet or if we should take it as VARYING but it has a non-VARYING
4201 value drop to VARYING. This only happens when not iterating.
4202 If we value-number a virtual operand never value-number to the
4203 value from the backedge as that confuses the alias-walking code.
4204 See gcc.dg/torture/pr87176.c. If the value is the same on a
4205 non-backedge everything is OK though. */
4207 && !seen_non_backedge
4208 && TREE_CODE (backedge_val
) == SSA_NAME
4209 && sameval
== backedge_val
4210 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
4211 || !SSA_VISITED (backedge_val
)
4212 || SSA_VAL (backedge_val
) != backedge_val
))
4213 /* Note this just drops to VARYING without inserting the PHI into
4215 result
= PHI_RESULT (phi
);
4216 /* If none of the edges was executable keep the value-number at VN_TOP,
4217 if only a single edge is exectuable use its value. */
4218 else if (n_executable
<= 1)
4219 result
= seen_undef
? seen_undef
: sameval
;
4220 /* If we saw only undefined values and VN_TOP use one of the
4221 undefined values. */
4222 else if (sameval
== VN_TOP
)
4223 result
= seen_undef
? seen_undef
: sameval
;
4224 /* First see if it is equivalent to a phi node in this block. We prefer
4225 this as it allows IV elimination - see PRs 66502 and 67167. */
4226 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
4229 && TREE_CODE (result
) == SSA_NAME
4230 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
4232 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
4233 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4235 fprintf (dump_file
, "Marking CSEd to PHI node ");
4236 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
4238 fprintf (dump_file
, "\n");
4242 /* If all values are the same use that, unless we've seen undefined
4243 values as well and the value isn't constant.
4244 CCP/copyprop have the same restriction to not remove uninit warnings. */
4246 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
4250 result
= PHI_RESULT (phi
);
4251 /* Only insert PHIs that are varying, for constant value numbers
4252 we mess up equivalences otherwise as we are only comparing
4253 the immediate controlling predicates. */
4254 vn_phi_insert (phi
, result
, backedges_varying_p
);
4259 return set_ssa_val_to (PHI_RESULT (phi
), result
);
4262 /* Try to simplify RHS using equivalences and constant folding. */
4265 try_to_simplify (gassign
*stmt
)
4267 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4270 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
4271 in this case, there is no point in doing extra work. */
4272 if (code
== SSA_NAME
)
4275 /* First try constant folding based on our current lattice. */
4276 mprts_hook
= vn_lookup_simplify_result
;
4277 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
4280 && (TREE_CODE (tem
) == SSA_NAME
4281 || is_gimple_min_invariant (tem
)))
4287 /* Visit and value number STMT, return true if the value number
4291 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
4293 bool changed
= false;
4295 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4297 fprintf (dump_file
, "Value numbering stmt = ");
4298 print_gimple_stmt (dump_file
, stmt
, 0);
4301 if (gimple_code (stmt
) == GIMPLE_PHI
)
4302 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
4303 else if (gimple_has_volatile_ops (stmt
))
4304 changed
= defs_to_varying (stmt
);
4305 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
4307 enum tree_code code
= gimple_assign_rhs_code (ass
);
4308 tree lhs
= gimple_assign_lhs (ass
);
4309 tree rhs1
= gimple_assign_rhs1 (ass
);
4312 /* Shortcut for copies. Simplifying copies is pointless,
4313 since we copy the expression and value they represent. */
4314 if (code
== SSA_NAME
4315 && TREE_CODE (lhs
) == SSA_NAME
)
4317 changed
= visit_copy (lhs
, rhs1
);
4320 simplified
= try_to_simplify (ass
);
4323 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4325 fprintf (dump_file
, "RHS ");
4326 print_gimple_expr (dump_file
, ass
, 0);
4327 fprintf (dump_file
, " simplified to ");
4328 print_generic_expr (dump_file
, simplified
);
4329 fprintf (dump_file
, "\n");
4332 /* Setting value numbers to constants will occasionally
4333 screw up phi congruence because constants are not
4334 uniquely associated with a single ssa name that can be
4337 && is_gimple_min_invariant (simplified
)
4338 && TREE_CODE (lhs
) == SSA_NAME
)
4340 changed
= set_ssa_val_to (lhs
, simplified
);
4344 && TREE_CODE (simplified
) == SSA_NAME
4345 && TREE_CODE (lhs
) == SSA_NAME
)
4347 changed
= visit_copy (lhs
, simplified
);
4351 if ((TREE_CODE (lhs
) == SSA_NAME
4352 /* We can substitute SSA_NAMEs that are live over
4353 abnormal edges with their constant value. */
4354 && !(gimple_assign_copy_p (ass
)
4355 && is_gimple_min_invariant (rhs1
))
4357 && is_gimple_min_invariant (simplified
))
4358 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4359 /* Stores or copies from SSA_NAMEs that are live over
4360 abnormal edges are a problem. */
4361 || (code
== SSA_NAME
4362 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
4363 changed
= defs_to_varying (ass
);
4364 else if (REFERENCE_CLASS_P (lhs
)
4366 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
4367 else if (TREE_CODE (lhs
) == SSA_NAME
)
4369 if ((gimple_assign_copy_p (ass
)
4370 && is_gimple_min_invariant (rhs1
))
4372 && is_gimple_min_invariant (simplified
)))
4375 changed
= set_ssa_val_to (lhs
, simplified
);
4377 changed
= set_ssa_val_to (lhs
, rhs1
);
4381 /* Visit the original statement. */
4382 switch (vn_get_stmt_kind (ass
))
4385 changed
= visit_nary_op (lhs
, ass
);
4388 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
4391 changed
= defs_to_varying (ass
);
4397 changed
= defs_to_varying (ass
);
4399 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
4401 tree lhs
= gimple_call_lhs (call_stmt
);
4402 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4404 /* Try constant folding based on our current lattice. */
4405 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
4409 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4411 fprintf (dump_file
, "call ");
4412 print_gimple_expr (dump_file
, call_stmt
, 0);
4413 fprintf (dump_file
, " simplified to ");
4414 print_generic_expr (dump_file
, simplified
);
4415 fprintf (dump_file
, "\n");
4418 /* Setting value numbers to constants will occasionally
4419 screw up phi congruence because constants are not
4420 uniquely associated with a single ssa name that can be
4423 && is_gimple_min_invariant (simplified
))
4425 changed
= set_ssa_val_to (lhs
, simplified
);
4426 if (gimple_vdef (call_stmt
))
4427 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4428 SSA_VAL (gimple_vuse (call_stmt
)));
4432 && TREE_CODE (simplified
) == SSA_NAME
)
4434 changed
= visit_copy (lhs
, simplified
);
4435 if (gimple_vdef (call_stmt
))
4436 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
4437 SSA_VAL (gimple_vuse (call_stmt
)));
4440 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
4442 changed
= defs_to_varying (call_stmt
);
4447 /* Pick up flags from a devirtualization target. */
4448 tree fn
= gimple_call_fn (stmt
);
4449 int extra_fnflags
= 0;
4450 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
4453 if (TREE_CODE (fn
) == ADDR_EXPR
4454 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
4455 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
4457 if (!gimple_call_internal_p (call_stmt
)
4458 && (/* Calls to the same function with the same vuse
4459 and the same operands do not necessarily return the same
4460 value, unless they're pure or const. */
4461 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
4462 & (ECF_PURE
| ECF_CONST
))
4463 /* If calls have a vdef, subsequent calls won't have
4464 the same incoming vuse. So, if 2 calls with vdef have the
4465 same vuse, we know they're not subsequent.
4466 We can value number 2 calls to the same function with the
4467 same vuse and the same operands which are not subsequent
4468 the same, because there is no code in the program that can
4469 compare the 2 values... */
4470 || (gimple_vdef (call_stmt
)
4471 /* ... unless the call returns a pointer which does
4472 not alias with anything else. In which case the
4473 information that the values are distinct are encoded
4475 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
4476 /* Only perform the following when being called from PRE
4477 which embeds tail merging. */
4478 && default_vn_walk_kind
== VN_WALK
)))
4479 changed
= visit_reference_op_call (lhs
, call_stmt
);
4481 changed
= defs_to_varying (call_stmt
);
4484 changed
= defs_to_varying (stmt
);
4490 /* Allocate a value number table. */
4493 allocate_vn_table (vn_tables_t table
, unsigned size
)
4495 table
->phis
= new vn_phi_table_type (size
);
4496 table
->nary
= new vn_nary_op_table_type (size
);
4497 table
->references
= new vn_reference_table_type (size
);
4500 /* Free a value number table. */
4503 free_vn_table (vn_tables_t table
)
4505 /* Walk over elements and release vectors. */
4506 vn_reference_iterator_type hir
;
4508 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
4509 vr
->operands
.release ();
4514 delete table
->references
;
4515 table
->references
= NULL
;
4518 /* Set *ID according to RESULT. */
4521 set_value_id_for_result (tree result
, unsigned int *id
)
4523 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4524 *id
= VN_INFO (result
)->value_id
;
4525 else if (result
&& is_gimple_min_invariant (result
))
4526 *id
= get_or_alloc_constant_value_id (result
);
4528 *id
= get_next_value_id ();
4531 /* Set the value ids in the valid hash tables. */
4534 set_hashtable_value_ids (void)
4536 vn_nary_op_iterator_type hin
;
4537 vn_phi_iterator_type hip
;
4538 vn_reference_iterator_type hir
;
4543 /* Now set the value ids of the things we had put in the hash
4546 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
4547 if (! vno
->predicated_values
)
4548 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
4550 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
4551 set_value_id_for_result (vp
->result
, &vp
->value_id
);
4553 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
4555 set_value_id_for_result (vr
->result
, &vr
->value_id
);
4558 /* Return the maximum value id we have ever seen. */
4561 get_max_value_id (void)
4563 return next_value_id
;
4566 /* Return the next unique value id. */
4569 get_next_value_id (void)
4571 return next_value_id
++;
4575 /* Compare two expressions E1 and E2 and return true if they are equal. */
4578 expressions_equal_p (tree e1
, tree e2
)
4580 /* The obvious case. */
4584 /* If either one is VN_TOP consider them equal. */
4585 if (e1
== VN_TOP
|| e2
== VN_TOP
)
4588 /* If only one of them is null, they cannot be equal. */
4592 /* Now perform the actual comparison. */
4593 if (TREE_CODE (e1
) == TREE_CODE (e2
)
4594 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
4601 /* Return true if the nary operation NARY may trap. This is a copy
4602 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4605 vn_nary_may_trap (vn_nary_op_t nary
)
4608 tree rhs2
= NULL_TREE
;
4609 bool honor_nans
= false;
4610 bool honor_snans
= false;
4611 bool fp_operation
= false;
4612 bool honor_trapv
= false;
4616 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
4617 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
4618 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
4621 fp_operation
= FLOAT_TYPE_P (type
);
4624 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
4625 honor_snans
= flag_signaling_nans
!= 0;
4627 else if (INTEGRAL_TYPE_P (type
)
4628 && TYPE_OVERFLOW_TRAPS (type
))
4631 if (nary
->length
>= 2)
4633 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
4635 honor_nans
, honor_snans
, rhs2
,
4641 for (i
= 0; i
< nary
->length
; ++i
)
4642 if (tree_could_trap_p (nary
->op
[i
]))
4649 class eliminate_dom_walker
: public dom_walker
4652 eliminate_dom_walker (cdi_direction
, bitmap
);
4653 ~eliminate_dom_walker ();
4655 virtual edge
before_dom_children (basic_block
);
4656 virtual void after_dom_children (basic_block
);
4658 virtual tree
eliminate_avail (basic_block
, tree op
);
4659 virtual void eliminate_push_avail (basic_block
, tree op
);
4660 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
4662 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
4664 unsigned eliminate_cleanup (bool region_p
= false);
4667 unsigned int el_todo
;
4668 unsigned int eliminations
;
4669 unsigned int insertions
;
4671 /* SSA names that had their defs inserted by PRE if do_pre. */
4672 bitmap inserted_exprs
;
4674 /* Blocks with statements that have had their EH properties changed. */
4675 bitmap need_eh_cleanup
;
4677 /* Blocks with statements that have had their AB properties changed. */
4678 bitmap need_ab_cleanup
;
4680 /* Local state for the eliminate domwalk. */
4681 auto_vec
<gimple
*> to_remove
;
4682 auto_vec
<gimple
*> to_fixup
;
4683 auto_vec
<tree
> avail
;
4684 auto_vec
<tree
> avail_stack
;
4687 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
4688 bitmap inserted_exprs_
)
4689 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
4690 el_todo (0), eliminations (0), insertions (0),
4691 inserted_exprs (inserted_exprs_
)
4693 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
4694 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
4697 eliminate_dom_walker::~eliminate_dom_walker ()
4699 BITMAP_FREE (need_eh_cleanup
);
4700 BITMAP_FREE (need_ab_cleanup
);
4703 /* Return a leader for OP that is available at the current point of the
4704 eliminate domwalk. */
4707 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
4709 tree valnum
= VN_INFO (op
)->valnum
;
4710 if (TREE_CODE (valnum
) == SSA_NAME
)
4712 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
4714 if (avail
.length () > SSA_NAME_VERSION (valnum
))
4715 return avail
[SSA_NAME_VERSION (valnum
)];
4717 else if (is_gimple_min_invariant (valnum
))
4722 /* At the current point of the eliminate domwalk make OP available. */
4725 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
4727 tree valnum
= VN_INFO (op
)->valnum
;
4728 if (TREE_CODE (valnum
) == SSA_NAME
)
4730 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
4731 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1);
4733 if (avail
[SSA_NAME_VERSION (valnum
)])
4734 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
4735 avail_stack
.safe_push (pushop
);
4736 avail
[SSA_NAME_VERSION (valnum
)] = op
;
4740 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
4741 the leader for the expression if insertion was successful. */
4744 eliminate_dom_walker::eliminate_insert (basic_block bb
,
4745 gimple_stmt_iterator
*gsi
, tree val
)
4747 /* We can insert a sequence with a single assignment only. */
4748 gimple_seq stmts
= VN_INFO (val
)->expr
;
4749 if (!gimple_seq_singleton_p (stmts
))
4751 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
4753 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
4754 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
4755 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
4756 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
4757 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
4760 tree op
= gimple_assign_rhs1 (stmt
);
4761 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
4762 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4763 op
= TREE_OPERAND (op
, 0);
4764 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
4770 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
4771 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
4772 TREE_TYPE (val
), leader
,
4773 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
4774 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
4775 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
4776 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
4777 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
4779 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
4780 TREE_TYPE (val
), leader
);
4781 if (TREE_CODE (res
) != SSA_NAME
4782 || SSA_NAME_IS_DEFAULT_DEF (res
)
4783 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
4785 gimple_seq_discard (stmts
);
4787 /* During propagation we have to treat SSA info conservatively
4788 and thus we can end up simplifying the inserted expression
4789 at elimination time to sth not defined in stmts. */
4790 /* But then this is a redundancy we failed to detect. Which means
4791 res now has two values. That doesn't play well with how
4792 we track availability here, so give up. */
4793 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4795 if (TREE_CODE (res
) == SSA_NAME
)
4796 res
= eliminate_avail (bb
, res
);
4799 fprintf (dump_file
, "Failed to insert expression for value ");
4800 print_generic_expr (dump_file
, val
);
4801 fprintf (dump_file
, " which is really fully redundant to ");
4802 print_generic_expr (dump_file
, res
);
4803 fprintf (dump_file
, "\n");
4811 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
4812 VN_INFO (res
)->valnum
= val
;
4813 VN_INFO (res
)->visited
= true;
4817 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4819 fprintf (dump_file
, "Inserted ");
4820 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
4827 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
4829 tree sprime
= NULL_TREE
;
4830 gimple
*stmt
= gsi_stmt (*gsi
);
4831 tree lhs
= gimple_get_lhs (stmt
);
4832 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
4833 && !gimple_has_volatile_ops (stmt
)
4834 /* See PR43491. Do not replace a global register variable when
4835 it is a the RHS of an assignment. Do replace local register
4836 variables since gcc does not guarantee a local variable will
4837 be allocated in register.
4838 ??? The fix isn't effective here. This should instead
4839 be ensured by not value-numbering them the same but treating
4840 them like volatiles? */
4841 && !(gimple_assign_single_p (stmt
)
4842 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
4843 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
4844 && is_global_var (gimple_assign_rhs1 (stmt
)))))
4846 sprime
= eliminate_avail (b
, lhs
);
4849 /* If there is no existing usable leader but SCCVN thinks
4850 it has an expression it wants to use as replacement,
4852 tree val
= VN_INFO (lhs
)->valnum
;
4854 && TREE_CODE (val
) == SSA_NAME
4855 && VN_INFO (val
)->needs_insertion
4856 && VN_INFO (val
)->expr
!= NULL
4857 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
4858 eliminate_push_avail (b
, sprime
);
4861 /* If this now constitutes a copy duplicate points-to
4862 and range info appropriately. This is especially
4863 important for inserted code. See tree-ssa-copy.c
4864 for similar code. */
4866 && TREE_CODE (sprime
) == SSA_NAME
)
4868 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
4869 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
4870 && SSA_NAME_PTR_INFO (lhs
)
4871 && ! SSA_NAME_PTR_INFO (sprime
))
4873 duplicate_ssa_name_ptr_info (sprime
,
4874 SSA_NAME_PTR_INFO (lhs
));
4876 mark_ptr_info_alignment_unknown
4877 (SSA_NAME_PTR_INFO (sprime
));
4879 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
4880 && SSA_NAME_RANGE_INFO (lhs
)
4881 && ! SSA_NAME_RANGE_INFO (sprime
)
4883 duplicate_ssa_name_range_info (sprime
,
4884 SSA_NAME_RANGE_TYPE (lhs
),
4885 SSA_NAME_RANGE_INFO (lhs
));
4888 /* Inhibit the use of an inserted PHI on a loop header when
4889 the address of the memory reference is a simple induction
4890 variable. In other cases the vectorizer won't do anything
4891 anyway (either it's loop invariant or a complicated
4894 && TREE_CODE (sprime
) == SSA_NAME
4896 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
4897 && loop_outer (b
->loop_father
)
4898 && has_zero_uses (sprime
)
4899 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
4900 && gimple_assign_load_p (stmt
))
4902 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
4903 basic_block def_bb
= gimple_bb (def_stmt
);
4904 if (gimple_code (def_stmt
) == GIMPLE_PHI
4905 && def_bb
->loop_father
->header
== def_bb
)
4907 loop_p loop
= def_bb
->loop_father
;
4911 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
4914 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
4916 && flow_bb_inside_loop_p (loop
, def_bb
)
4917 && simple_iv (loop
, loop
, op
, &iv
, true))
4925 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4927 fprintf (dump_file
, "Not replacing ");
4928 print_gimple_expr (dump_file
, stmt
, 0);
4929 fprintf (dump_file
, " with ");
4930 print_generic_expr (dump_file
, sprime
);
4931 fprintf (dump_file
, " which would add a loop"
4932 " carried dependence to loop %d\n",
4935 /* Don't keep sprime available. */
4943 /* If we can propagate the value computed for LHS into
4944 all uses don't bother doing anything with this stmt. */
4945 if (may_propagate_copy (lhs
, sprime
))
4947 /* Mark it for removal. */
4948 to_remove
.safe_push (stmt
);
4950 /* ??? Don't count copy/constant propagations. */
4951 if (gimple_assign_single_p (stmt
)
4952 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
4953 || gimple_assign_rhs1 (stmt
) == sprime
))
4956 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4958 fprintf (dump_file
, "Replaced ");
4959 print_gimple_expr (dump_file
, stmt
, 0);
4960 fprintf (dump_file
, " with ");
4961 print_generic_expr (dump_file
, sprime
);
4962 fprintf (dump_file
, " in all uses of ");
4963 print_gimple_stmt (dump_file
, stmt
, 0);
4970 /* If this is an assignment from our leader (which
4971 happens in the case the value-number is a constant)
4972 then there is nothing to do. */
4973 if (gimple_assign_single_p (stmt
)
4974 && sprime
== gimple_assign_rhs1 (stmt
))
4977 /* Else replace its RHS. */
4978 bool can_make_abnormal_goto
4979 = is_gimple_call (stmt
)
4980 && stmt_can_make_abnormal_goto (stmt
);
4982 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4984 fprintf (dump_file
, "Replaced ");
4985 print_gimple_expr (dump_file
, stmt
, 0);
4986 fprintf (dump_file
, " with ");
4987 print_generic_expr (dump_file
, sprime
);
4988 fprintf (dump_file
, " in ");
4989 print_gimple_stmt (dump_file
, stmt
, 0);
4993 gimple
*orig_stmt
= stmt
;
4994 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
4995 TREE_TYPE (sprime
)))
4996 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
4997 tree vdef
= gimple_vdef (stmt
);
4998 tree vuse
= gimple_vuse (stmt
);
4999 propagate_tree_value_into_stmt (gsi
, sprime
);
5000 stmt
= gsi_stmt (*gsi
);
5002 /* In case the VDEF on the original stmt was released, value-number
5003 it to the VUSE. This is to make vuse_ssa_val able to skip
5004 released virtual operands. */
5005 if (vdef
!= gimple_vdef (stmt
))
5007 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
5008 VN_INFO (vdef
)->valnum
= vuse
;
5011 /* If we removed EH side-effects from the statement, clean
5012 its EH information. */
5013 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
5015 bitmap_set_bit (need_eh_cleanup
,
5016 gimple_bb (stmt
)->index
);
5017 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5018 fprintf (dump_file
, " Removed EH side-effects.\n");
5021 /* Likewise for AB side-effects. */
5022 if (can_make_abnormal_goto
5023 && !stmt_can_make_abnormal_goto (stmt
))
5025 bitmap_set_bit (need_ab_cleanup
,
5026 gimple_bb (stmt
)->index
);
5027 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5028 fprintf (dump_file
, " Removed AB side-effects.\n");
5035 /* If the statement is a scalar store, see if the expression
5036 has the same value number as its rhs. If so, the store is
5038 if (gimple_assign_single_p (stmt
)
5039 && !gimple_has_volatile_ops (stmt
)
5040 && !is_gimple_reg (gimple_assign_lhs (stmt
))
5041 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
5042 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
5045 tree rhs
= gimple_assign_rhs1 (stmt
);
5046 vn_reference_t vnresult
;
5047 val
= vn_reference_lookup (lhs
, gimple_vuse (stmt
), VN_WALKREWRITE
,
5049 if (TREE_CODE (rhs
) == SSA_NAME
)
5050 rhs
= VN_INFO (rhs
)->valnum
;
5052 && operand_equal_p (val
, rhs
, 0))
5054 /* We can only remove the later store if the former aliases
5055 at least all accesses the later one does or if the store
5056 was to readonly memory storing the same value. */
5057 alias_set_type set
= get_alias_set (lhs
);
5059 || vnresult
->set
== set
5060 || alias_set_subset_of (set
, vnresult
->set
))
5062 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5064 fprintf (dump_file
, "Deleted redundant store ");
5065 print_gimple_stmt (dump_file
, stmt
, 0);
5068 /* Queue stmt for removal. */
5069 to_remove
.safe_push (stmt
);
5075 /* If this is a control statement value numbering left edges
5076 unexecuted on force the condition in a way consistent with
5078 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
5080 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
5081 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
5083 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5085 fprintf (dump_file
, "Removing unexecutable edge from ");
5086 print_gimple_stmt (dump_file
, stmt
, 0);
5088 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
5089 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
5090 gimple_cond_make_true (cond
);
5092 gimple_cond_make_false (cond
);
5094 el_todo
|= TODO_cleanup_cfg
;
5099 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
5100 bool was_noreturn
= (is_gimple_call (stmt
)
5101 && gimple_call_noreturn_p (stmt
));
5102 tree vdef
= gimple_vdef (stmt
);
5103 tree vuse
= gimple_vuse (stmt
);
5105 /* If we didn't replace the whole stmt (or propagate the result
5106 into all uses), replace all uses on this stmt with their
5108 bool modified
= false;
5109 use_operand_p use_p
;
5111 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5113 tree use
= USE_FROM_PTR (use_p
);
5114 /* ??? The call code above leaves stmt operands un-updated. */
5115 if (TREE_CODE (use
) != SSA_NAME
)
5118 if (SSA_NAME_IS_DEFAULT_DEF (use
))
5119 /* ??? For default defs BB shouldn't matter, but we have to
5120 solve the inconsistency between rpo eliminate and
5121 dom eliminate avail valueization first. */
5122 sprime
= eliminate_avail (b
, use
);
5124 /* Look for sth available at the definition block of the argument.
5125 This avoids inconsistencies between availability there which
5126 decides if the stmt can be removed and availability at the
5127 use site. The SSA property ensures that things available
5128 at the definition are also available at uses. */
5129 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
5130 if (sprime
&& sprime
!= use
5131 && may_propagate_copy (use
, sprime
)
5132 /* We substitute into debug stmts to avoid excessive
5133 debug temporaries created by removed stmts, but we need
5134 to avoid doing so for inserted sprimes as we never want
5135 to create debug temporaries for them. */
5137 || TREE_CODE (sprime
) != SSA_NAME
5138 || !is_gimple_debug (stmt
)
5139 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
5141 propagate_value (use_p
, sprime
);
5146 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
5147 into which is a requirement for the IPA devirt machinery. */
5148 gimple
*old_stmt
= stmt
;
5151 /* If a formerly non-invariant ADDR_EXPR is turned into an
5152 invariant one it was on a separate stmt. */
5153 if (gimple_assign_single_p (stmt
)
5154 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
5155 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
5156 gimple_stmt_iterator prev
= *gsi
;
5158 if (fold_stmt (gsi
))
5160 /* fold_stmt may have created new stmts inbetween
5161 the previous stmt and the folded stmt. Mark
5162 all defs created there as varying to not confuse
5163 the SCCVN machinery as we're using that even during
5165 if (gsi_end_p (prev
))
5166 prev
= gsi_start_bb (b
);
5169 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
5174 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
5175 dit
, SSA_OP_ALL_DEFS
)
5176 /* As existing DEFs may move between stmts
5177 only process new ones. */
5178 if (! has_VN_INFO (def
))
5180 VN_INFO (def
)->valnum
= def
;
5181 VN_INFO (def
)->visited
= true;
5183 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
5189 stmt
= gsi_stmt (*gsi
);
5190 /* In case we folded the stmt away schedule the NOP for removal. */
5191 if (gimple_nop_p (stmt
))
5192 to_remove
.safe_push (stmt
);
5195 /* Visit indirect calls and turn them into direct calls if
5196 possible using the devirtualization machinery. Do this before
5197 checking for required EH/abnormal/noreturn cleanup as devird
5198 may expose more of those. */
5199 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5201 tree fn
= gimple_call_fn (call_stmt
);
5203 && flag_devirtualize
5204 && virtual_method_call_p (fn
))
5206 tree otr_type
= obj_type_ref_class (fn
);
5207 unsigned HOST_WIDE_INT otr_tok
5208 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
5210 ipa_polymorphic_call_context
context (current_function_decl
,
5211 fn
, stmt
, &instance
);
5212 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
5215 vec
<cgraph_node
*> targets
5216 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
5217 otr_tok
, context
, &final
);
5219 dump_possible_polymorphic_call_targets (dump_file
,
5220 obj_type_ref_class (fn
),
5222 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
5225 if (targets
.length () == 1)
5226 fn
= targets
[0]->decl
;
5228 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
5229 if (dump_enabled_p ())
5231 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
5232 "converting indirect call to "
5234 lang_hooks
.decl_printable_name (fn
, 2));
5236 gimple_call_set_fndecl (call_stmt
, fn
);
5237 /* If changing the call to __builtin_unreachable
5238 or similar noreturn function, adjust gimple_call_fntype
5240 if (gimple_call_noreturn_p (call_stmt
)
5241 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
5242 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
5243 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
5245 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
5246 maybe_remove_unused_call_args (cfun
, call_stmt
);
5254 /* When changing a call into a noreturn call, cfg cleanup
5255 is needed to fix up the noreturn call. */
5257 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
5258 to_fixup
.safe_push (stmt
);
5259 /* When changing a condition or switch into one we know what
5260 edge will be executed, schedule a cfg cleanup. */
5261 if ((gimple_code (stmt
) == GIMPLE_COND
5262 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
5263 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
5264 || (gimple_code (stmt
) == GIMPLE_SWITCH
5265 && TREE_CODE (gimple_switch_index
5266 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
5267 el_todo
|= TODO_cleanup_cfg
;
5268 /* If we removed EH side-effects from the statement, clean
5269 its EH information. */
5270 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
5272 bitmap_set_bit (need_eh_cleanup
,
5273 gimple_bb (stmt
)->index
);
5274 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5275 fprintf (dump_file
, " Removed EH side-effects.\n");
5277 /* Likewise for AB side-effects. */
5278 if (can_make_abnormal_goto
5279 && !stmt_can_make_abnormal_goto (stmt
))
5281 bitmap_set_bit (need_ab_cleanup
,
5282 gimple_bb (stmt
)->index
);
5283 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5284 fprintf (dump_file
, " Removed AB side-effects.\n");
5287 /* In case the VDEF on the original stmt was released, value-number
5288 it to the VUSE. This is to make vuse_ssa_val able to skip
5289 released virtual operands. */
5290 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
5291 VN_INFO (vdef
)->valnum
= vuse
;
5294 /* Make new values available - for fully redundant LHS we
5295 continue with the next stmt above and skip this. */
5297 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
5298 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
5301 /* Perform elimination for the basic-block B during the domwalk. */
5304 eliminate_dom_walker::before_dom_children (basic_block b
)
5307 avail_stack
.safe_push (NULL_TREE
);
5309 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
5310 if (!(b
->flags
& BB_EXECUTABLE
))
5315 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
5317 gphi
*phi
= gsi
.phi ();
5318 tree res
= PHI_RESULT (phi
);
5320 if (virtual_operand_p (res
))
5326 tree sprime
= eliminate_avail (b
, res
);
5330 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5332 fprintf (dump_file
, "Replaced redundant PHI node defining ");
5333 print_generic_expr (dump_file
, res
);
5334 fprintf (dump_file
, " with ");
5335 print_generic_expr (dump_file
, sprime
);
5336 fprintf (dump_file
, "\n");
5339 /* If we inserted this PHI node ourself, it's not an elimination. */
5340 if (! inserted_exprs
5341 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
5344 /* If we will propagate into all uses don't bother to do
5346 if (may_propagate_copy (res
, sprime
))
5348 /* Mark the PHI for removal. */
5349 to_remove
.safe_push (phi
);
5354 remove_phi_node (&gsi
, false);
5356 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
5357 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
5358 gimple
*stmt
= gimple_build_assign (res
, sprime
);
5359 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
5360 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
5364 eliminate_push_avail (b
, res
);
5368 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
5371 eliminate_stmt (b
, &gsi
);
5373 /* Replace destination PHI arguments. */
5376 FOR_EACH_EDGE (e
, ei
, b
->succs
)
5377 if (e
->flags
& EDGE_EXECUTABLE
)
5378 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
5382 gphi
*phi
= gsi
.phi ();
5383 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
5384 tree arg
= USE_FROM_PTR (use_p
);
5385 if (TREE_CODE (arg
) != SSA_NAME
5386 || virtual_operand_p (arg
))
5388 tree sprime
= eliminate_avail (b
, arg
);
5389 if (sprime
&& may_propagate_copy (arg
, sprime
))
5390 propagate_value (use_p
, sprime
);
5393 vn_context_bb
= NULL
;
5398 /* Make no longer available leaders no longer available. */
5401 eliminate_dom_walker::after_dom_children (basic_block
)
5404 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
5406 tree valnum
= VN_INFO (entry
)->valnum
;
5407 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
5409 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
5411 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
5415 /* Remove queued stmts and perform delayed cleanups. */
5418 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
5420 statistics_counter_event (cfun
, "Eliminated", eliminations
);
5421 statistics_counter_event (cfun
, "Insertions", insertions
);
5423 /* We cannot remove stmts during BB walk, especially not release SSA
5424 names there as this confuses the VN machinery. The stmts ending
5425 up in to_remove are either stores or simple copies.
5426 Remove stmts in reverse order to make debug stmt creation possible. */
5427 while (!to_remove
.is_empty ())
5429 bool do_release_defs
= true;
5430 gimple
*stmt
= to_remove
.pop ();
5432 /* When we are value-numbering a region we do not require exit PHIs to
5433 be present so we have to make sure to deal with uses outside of the
5434 region of stmts that we thought are eliminated.
5435 ??? Note we may be confused by uses in dead regions we didn't run
5436 elimination on. Rather than checking individual uses we accept
5437 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
5438 contains such example). */
5441 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
5443 tree lhs
= gimple_phi_result (phi
);
5444 if (!has_zero_uses (lhs
))
5446 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5447 fprintf (dump_file
, "Keeping eliminated stmt live "
5448 "as copy because of out-of-region uses\n");
5449 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5450 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5451 gimple_stmt_iterator gsi
5452 = gsi_after_labels (gimple_bb (stmt
));
5453 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5454 do_release_defs
= false;
5457 else if (tree lhs
= gimple_get_lhs (stmt
))
5458 if (TREE_CODE (lhs
) == SSA_NAME
5459 && !has_zero_uses (lhs
))
5461 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5462 fprintf (dump_file
, "Keeping eliminated stmt live "
5463 "as copy because of out-of-region uses\n");
5464 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
5465 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5466 if (is_gimple_assign (stmt
))
5468 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
5469 update_stmt (gsi_stmt (gsi
));
5474 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
5475 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
5476 do_release_defs
= false;
5481 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5483 fprintf (dump_file
, "Removing dead stmt ");
5484 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
5487 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
5488 if (gimple_code (stmt
) == GIMPLE_PHI
)
5489 remove_phi_node (&gsi
, do_release_defs
);
5492 basic_block bb
= gimple_bb (stmt
);
5493 unlink_stmt_vdef (stmt
);
5494 if (gsi_remove (&gsi
, true))
5495 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
5496 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
5497 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
5498 if (do_release_defs
)
5499 release_defs (stmt
);
5502 /* Removing a stmt may expose a forwarder block. */
5503 el_todo
|= TODO_cleanup_cfg
;
5506 /* Fixup stmts that became noreturn calls. This may require splitting
5507 blocks and thus isn't possible during the dominator walk. Do this
5508 in reverse order so we don't inadvertedly remove a stmt we want to
5509 fixup by visiting a dominating now noreturn call first. */
5510 while (!to_fixup
.is_empty ())
5512 gimple
*stmt
= to_fixup
.pop ();
5514 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5516 fprintf (dump_file
, "Fixing up noreturn call ");
5517 print_gimple_stmt (dump_file
, stmt
, 0);
5520 if (fixup_noreturn_call (stmt
))
5521 el_todo
|= TODO_cleanup_cfg
;
5524 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
5525 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
5528 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
5531 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
5533 if (do_eh_cleanup
|| do_ab_cleanup
)
5534 el_todo
|= TODO_cleanup_cfg
;
5539 /* Eliminate fully redundant computations. */
5542 eliminate_with_rpo_vn (bitmap inserted_exprs
)
5544 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
5546 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
5547 return walker
.eliminate_cleanup ();
5551 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
5552 bool iterate
, bool eliminate
);
5555 run_rpo_vn (vn_lookup_kind kind
)
5557 default_vn_walk_kind
= kind
;
5558 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
5560 /* ??? Prune requirement of these. */
5561 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
5562 constant_value_ids
= BITMAP_ALLOC (NULL
);
5564 /* Initialize the value ids and prune out remaining VN_TOPs
5568 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5570 vn_ssa_aux_t info
= VN_INFO (name
);
5572 || info
->valnum
== VN_TOP
)
5573 info
->valnum
= name
;
5574 if (info
->valnum
== name
)
5575 info
->value_id
= get_next_value_id ();
5576 else if (is_gimple_min_invariant (info
->valnum
))
5577 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
5581 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5583 vn_ssa_aux_t info
= VN_INFO (name
);
5584 if (TREE_CODE (info
->valnum
) == SSA_NAME
5585 && info
->valnum
!= name
5586 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
5587 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
5590 set_hashtable_value_ids ();
5592 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5594 fprintf (dump_file
, "Value numbers:\n");
5595 FOR_EACH_SSA_NAME (i
, name
, cfun
)
5597 if (VN_INFO (name
)->visited
5598 && SSA_VAL (name
) != name
)
5600 print_generic_expr (dump_file
, name
);
5601 fprintf (dump_file
, " = ");
5602 print_generic_expr (dump_file
, SSA_VAL (name
));
5603 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
5609 /* Free VN associated data structures. */
5614 free_vn_table (valid_info
);
5615 XDELETE (valid_info
);
5616 obstack_free (&vn_tables_obstack
, NULL
);
5617 obstack_free (&vn_tables_insert_obstack
, NULL
);
5619 vn_ssa_aux_iterator_type it
;
5621 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
5622 if (info
->needs_insertion
)
5623 release_ssa_name (info
->name
);
5624 obstack_free (&vn_ssa_aux_obstack
, NULL
);
5625 delete vn_ssa_aux_hash
;
5627 delete constant_to_value_id
;
5628 constant_to_value_id
= NULL
;
5629 BITMAP_FREE (constant_value_ids
);
5632 /* Adaptor to the elimination engine using RPO availability. */
5634 class rpo_elim
: public eliminate_dom_walker
5637 rpo_elim(basic_block entry_
)
5638 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
) {}
5641 virtual tree
eliminate_avail (basic_block
, tree op
);
5643 virtual void eliminate_push_avail (basic_block
, tree
);
5646 /* Instead of having a local availability lattice for each
5647 basic-block and availability at X defined as union of
5648 the local availabilities at X and its dominators we're
5649 turning this upside down and track availability per
5650 value given values are usually made available at very
5651 few points (at least one).
5652 So we have a value -> vec<location, leader> map where
5653 LOCATION is specifying the basic-block LEADER is made
5654 available for VALUE. We push to this vector in RPO
5655 order thus for iteration we can simply pop the last
5657 LOCATION is the basic-block index and LEADER is its
5658 SSA name version. */
5659 /* ??? We'd like to use auto_vec here with embedded storage
5660 but that doesn't play well until we can provide move
5661 constructors and use std::move on hash-table expansion.
5662 So for now this is a bit more expensive than necessary.
5663 We eventually want to switch to a chaining scheme like
5664 for hashtable entries for unwinding which would make
5665 making the vector part of the vn_ssa_aux structure possible. */
5666 typedef hash_map
<tree
, vec
<std::pair
<int, int> > > rpo_avail_t
;
5667 rpo_avail_t m_rpo_avail
;
5670 /* Global RPO state for access from hooks. */
5671 static rpo_elim
*rpo_avail
;
5673 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
5676 vn_lookup_simplify_result (gimple_match_op
*res_op
)
5678 if (!res_op
->code
.is_tree_code ())
5680 tree
*ops
= res_op
->ops
;
5681 unsigned int length
= res_op
->num_ops
;
5682 if (res_op
->code
== CONSTRUCTOR
5683 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
5684 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
5685 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
5687 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
5688 ops
= XALLOCAVEC (tree
, length
);
5689 for (unsigned i
= 0; i
< length
; ++i
)
5690 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
5692 vn_nary_op_t vnresult
= NULL
;
5693 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
5694 res_op
->type
, ops
, &vnresult
);
5695 /* If this is used from expression simplification make sure to
5696 return an available expression. */
5697 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
5698 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
5702 rpo_elim::~rpo_elim ()
5704 /* Release the avail vectors. */
5705 for (rpo_avail_t::iterator i
= m_rpo_avail
.begin ();
5706 i
!= m_rpo_avail
.end (); ++i
)
5707 (*i
).second
.release ();
5710 /* Return a leader for OPs value that is valid at BB. */
5713 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
5716 tree valnum
= SSA_VAL (op
, &visited
);
5717 /* If we didn't visit OP then it must be defined outside of the
5718 region we process and also dominate it. So it is available. */
5721 if (TREE_CODE (valnum
) == SSA_NAME
)
5723 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
5725 vec
<std::pair
<int, int> > *av
= m_rpo_avail
.get (valnum
);
5726 if (!av
|| av
->is_empty ())
5728 int i
= av
->length () - 1;
5729 if ((*av
)[i
].first
== bb
->index
)
5730 /* On tramp3d 90% of the cases are here. */
5731 return ssa_name ((*av
)[i
].second
);
5734 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, (*av
)[i
].first
);
5735 /* ??? During elimination we have to use availability at the
5736 definition site of a use we try to replace. This
5737 is required to not run into inconsistencies because
5738 of dominated_by_p_w_unex behavior and removing a definition
5739 while not replacing all uses.
5740 ??? We could try to consistently walk dominators
5741 ignoring non-executable regions. The nearest common
5742 dominator of bb and abb is where we can stop walking. We
5743 may also be able to "pre-compute" (bits of) the next immediate
5744 (non-)dominator during the RPO walk when marking edges as
5746 if (dominated_by_p_w_unex (bb
, abb
))
5748 tree leader
= ssa_name ((*av
)[i
].second
);
5749 /* Prevent eliminations that break loop-closed SSA. */
5750 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
5751 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
5752 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
5753 (leader
))->loop_father
,
5756 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5758 print_generic_expr (dump_file
, leader
);
5759 fprintf (dump_file
, " is available for ");
5760 print_generic_expr (dump_file
, valnum
);
5761 fprintf (dump_file
, "\n");
5763 /* On tramp3d 99% of the _remaining_ cases succeed at
5767 /* ??? Can we somehow skip to the immediate dominator
5768 RPO index (bb_to_rpo)? Again, maybe not worth, on
5769 tramp3d the worst number of elements in the vector is 9. */
5773 else if (valnum
!= VN_TOP
)
5774 /* valnum is is_gimple_min_invariant. */
5779 /* Make LEADER a leader for its value at BB. */
5782 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
5784 tree valnum
= VN_INFO (leader
)->valnum
;
5785 if (valnum
== VN_TOP
)
5787 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5789 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
5790 print_generic_expr (dump_file
, leader
);
5791 fprintf (dump_file
, " for value ");
5792 print_generic_expr (dump_file
, valnum
);
5793 fprintf (dump_file
, "\n");
5796 vec
<std::pair
<int, int> > &av
= m_rpo_avail
.get_or_insert (valnum
, &existed
);
5799 new (&av
) vec
<std::pair
<int, int> >;
5801 av
.reserve_exact (2);
5803 av
.safe_push (std::make_pair (bb
->index
, SSA_NAME_VERSION (leader
)));
5806 /* Valueization hook for RPO VN plus required state. */
5809 rpo_vn_valueize (tree name
)
5811 if (TREE_CODE (name
) == SSA_NAME
)
5813 vn_ssa_aux_t val
= VN_INFO (name
);
5816 tree tem
= val
->valnum
;
5817 if (tem
!= VN_TOP
&& tem
!= name
)
5819 if (TREE_CODE (tem
) != SSA_NAME
)
5821 /* For all values we only valueize to an available leader
5822 which means we can use SSA name info without restriction. */
5823 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
5832 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
5833 inverted condition. */
5836 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
5841 /* a < b -> a {!,<}= b */
5842 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5843 ops
, boolean_true_node
, 0, pred_e
);
5844 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
5845 ops
, boolean_true_node
, 0, pred_e
);
5846 /* a < b -> ! a {>,=} b */
5847 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5848 ops
, boolean_false_node
, 0, pred_e
);
5849 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5850 ops
, boolean_false_node
, 0, pred_e
);
5853 /* a > b -> a {!,>}= b */
5854 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
5855 ops
, boolean_true_node
, 0, pred_e
);
5856 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
5857 ops
, boolean_true_node
, 0, pred_e
);
5858 /* a > b -> ! a {<,=} b */
5859 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5860 ops
, boolean_false_node
, 0, pred_e
);
5861 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
5862 ops
, boolean_false_node
, 0, pred_e
);
5865 /* a == b -> ! a {<,>} b */
5866 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
5867 ops
, boolean_false_node
, 0, pred_e
);
5868 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
5869 ops
, boolean_false_node
, 0, pred_e
);
5874 /* Nothing besides inverted condition. */
5880 /* Main stmt worker for RPO VN, process BB. */
5883 process_bb (rpo_elim
&avail
, basic_block bb
,
5884 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
5885 bool do_region
, bitmap exit_bbs
)
5893 /* If we are in loop-closed SSA preserve this state. This is
5894 relevant when called on regions from outside of FRE/PRE. */
5895 bool lc_phi_nodes
= false;
5896 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
))
5897 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5898 if (e
->src
->loop_father
!= e
->dest
->loop_father
5899 && flow_loop_nested_p (e
->dest
->loop_father
,
5900 e
->src
->loop_father
))
5902 lc_phi_nodes
= true;
5906 /* Value-number all defs in the basic-block. */
5907 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
5910 gphi
*phi
= gsi
.phi ();
5911 tree res
= PHI_RESULT (phi
);
5912 vn_ssa_aux_t res_info
= VN_INFO (res
);
5915 gcc_assert (!res_info
->visited
);
5916 res_info
->valnum
= VN_TOP
;
5917 res_info
->visited
= true;
5920 /* When not iterating force backedge values to varying. */
5921 visit_stmt (phi
, !iterate_phis
);
5922 if (virtual_operand_p (res
))
5926 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
5927 how we handle backedges and availability.
5928 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
5929 tree val
= res_info
->valnum
;
5930 if (res
!= val
&& !iterate
&& eliminate
)
5932 if (tree leader
= avail
.eliminate_avail (bb
, res
))
5935 /* Preserve loop-closed SSA form. */
5937 || is_gimple_min_invariant (leader
)))
5939 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5941 fprintf (dump_file
, "Replaced redundant PHI node "
5943 print_generic_expr (dump_file
, res
);
5944 fprintf (dump_file
, " with ");
5945 print_generic_expr (dump_file
, leader
);
5946 fprintf (dump_file
, "\n");
5948 avail
.eliminations
++;
5950 if (may_propagate_copy (res
, leader
))
5952 /* Schedule for removal. */
5953 avail
.to_remove
.safe_push (phi
);
5956 /* ??? Else generate a copy stmt. */
5960 /* Only make defs available that not already are. But make
5961 sure loop-closed SSA PHI node defs are picked up for
5965 || ! avail
.eliminate_avail (bb
, res
))
5966 avail
.eliminate_push_avail (bb
, res
);
5969 /* For empty BBs mark outgoing edges executable. For non-empty BBs
5970 we do this when processing the last stmt as we have to do this
5971 before elimination which otherwise forces GIMPLE_CONDs to
5972 if (1 != 0) style when seeing non-executable edges. */
5973 if (gsi_end_p (gsi_start_bb (bb
)))
5975 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5977 if (!(e
->flags
& EDGE_EXECUTABLE
))
5979 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5981 "marking outgoing edge %d -> %d executable\n",
5982 e
->src
->index
, e
->dest
->index
);
5983 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
5984 e
->flags
|= EDGE_EXECUTABLE
;
5985 e
->dest
->flags
|= BB_EXECUTABLE
;
5987 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
5989 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5991 "marking destination block %d reachable\n",
5993 e
->dest
->flags
|= BB_EXECUTABLE
;
5997 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
5998 !gsi_end_p (gsi
); gsi_next (&gsi
))
6004 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
6006 vn_ssa_aux_t op_info
= VN_INFO (op
);
6007 gcc_assert (!op_info
->visited
);
6008 op_info
->valnum
= VN_TOP
;
6009 op_info
->visited
= true;
6012 /* We somehow have to deal with uses that are not defined
6013 in the processed region. Forcing unvisited uses to
6014 varying here doesn't play well with def-use following during
6015 expression simplification, so we deal with this by checking
6016 the visited flag in SSA_VAL. */
6019 visit_stmt (gsi_stmt (gsi
));
6021 gimple
*last
= gsi_stmt (gsi
);
6023 switch (gimple_code (last
))
6026 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
6027 (as_a
<gswitch
*> (last
))));
6031 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
6032 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
6033 tree val
= gimple_simplify (gimple_cond_code (last
),
6034 boolean_type_node
, lhs
, rhs
,
6036 /* If the condition didn't simplfy see if we have recorded
6037 an expression from sofar taken edges. */
6038 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
6040 vn_nary_op_t vnresult
;
6044 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
6045 boolean_type_node
, ops
,
6047 /* Did we get a predicated value? */
6048 if (! val
&& vnresult
&& vnresult
->predicated_values
)
6050 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
6051 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
6053 fprintf (dump_file
, "Got predicated value ");
6054 print_generic_expr (dump_file
, val
, TDF_NONE
);
6055 fprintf (dump_file
, " for ");
6056 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
6061 e
= find_taken_edge (bb
, val
);
6064 /* If we didn't manage to compute the taken edge then
6065 push predicated expressions for the condition itself
6066 and related conditions to the hashtables. This allows
6067 simplification of redundant conditions which is
6068 important as early cleanup. */
6069 edge true_e
, false_e
;
6070 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
6071 enum tree_code code
= gimple_cond_code (last
);
6072 enum tree_code icode
6073 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
6078 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
6081 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
6084 vn_nary_op_insert_pieces_predicated
6085 (2, code
, boolean_type_node
, ops
,
6086 boolean_true_node
, 0, true_e
);
6088 vn_nary_op_insert_pieces_predicated
6089 (2, code
, boolean_type_node
, ops
,
6090 boolean_false_node
, 0, false_e
);
6091 if (icode
!= ERROR_MARK
)
6094 vn_nary_op_insert_pieces_predicated
6095 (2, icode
, boolean_type_node
, ops
,
6096 boolean_false_node
, 0, true_e
);
6098 vn_nary_op_insert_pieces_predicated
6099 (2, icode
, boolean_type_node
, ops
,
6100 boolean_true_node
, 0, false_e
);
6102 /* Relax for non-integers, inverted condition handled
6104 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
6107 insert_related_predicates_on_edge (code
, ops
, true_e
);
6109 insert_related_predicates_on_edge (icode
, ops
, false_e
);
6115 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
6122 todo
= TODO_cleanup_cfg
;
6123 if (!(e
->flags
& EDGE_EXECUTABLE
))
6125 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6127 "marking known outgoing %sedge %d -> %d executable\n",
6128 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
6129 e
->src
->index
, e
->dest
->index
);
6130 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
6131 e
->flags
|= EDGE_EXECUTABLE
;
6132 e
->dest
->flags
|= BB_EXECUTABLE
;
6134 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6136 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6138 "marking destination block %d reachable\n",
6140 e
->dest
->flags
|= BB_EXECUTABLE
;
6143 else if (gsi_one_before_end_p (gsi
))
6145 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6147 if (!(e
->flags
& EDGE_EXECUTABLE
))
6149 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6151 "marking outgoing edge %d -> %d executable\n",
6152 e
->src
->index
, e
->dest
->index
);
6153 gcc_checking_assert (iterate
|| !(e
->flags
& EDGE_DFS_BACK
));
6154 e
->flags
|= EDGE_EXECUTABLE
;
6155 e
->dest
->flags
|= BB_EXECUTABLE
;
6157 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
6159 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6161 "marking destination block %d reachable\n",
6163 e
->dest
->flags
|= BB_EXECUTABLE
;
6168 /* Eliminate. That also pushes to avail. */
6169 if (eliminate
&& ! iterate
)
6170 avail
.eliminate_stmt (bb
, &gsi
);
6172 /* If not eliminating, make all not already available defs
6174 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
6175 if (! avail
.eliminate_avail (bb
, op
))
6176 avail
.eliminate_push_avail (bb
, op
);
6179 /* Eliminate in destination PHI arguments. Always substitute in dest
6180 PHIs, even for non-executable edges. This handles region
6182 if (!iterate
&& eliminate
)
6183 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6184 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6185 !gsi_end_p (gsi
); gsi_next (&gsi
))
6187 gphi
*phi
= gsi
.phi ();
6188 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6189 tree arg
= USE_FROM_PTR (use_p
);
6190 if (TREE_CODE (arg
) != SSA_NAME
6191 || virtual_operand_p (arg
))
6194 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
6196 sprime
= SSA_VAL (arg
);
6197 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
6198 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
6201 /* Look for sth available at the definition block of the argument.
6202 This avoids inconsistencies between availability there which
6203 decides if the stmt can be removed and availability at the
6204 use site. The SSA property ensures that things available
6205 at the definition are also available at uses. */
6206 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
6210 && may_propagate_copy (arg
, sprime
))
6211 propagate_value (use_p
, sprime
);
6214 vn_context_bb
= NULL
;
6218 /* Unwind state per basic-block. */
6222 /* Times this block has been visited. */
6224 /* Whether to handle this as iteration point or whether to treat
6225 incoming backedge PHI values as varying. */
6228 vn_reference_t ref_top
;
6230 vn_nary_op_t nary_top
;
6233 /* Unwind the RPO VN state for iteration. */
6236 do_unwind (unwind_state
*to
, int rpo_idx
, rpo_elim
&avail
, int *bb_to_rpo
)
6238 gcc_assert (to
->iterate
);
6239 for (; last_inserted_nary
!= to
->nary_top
;
6240 last_inserted_nary
= last_inserted_nary
->next
)
6243 slot
= valid_info
->nary
->find_slot_with_hash
6244 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
6245 /* Predication causes the need to restore previous state. */
6246 if ((*slot
)->unwind_to
)
6247 *slot
= (*slot
)->unwind_to
;
6249 valid_info
->nary
->clear_slot (slot
);
6251 for (; last_inserted_phi
!= to
->phi_top
;
6252 last_inserted_phi
= last_inserted_phi
->next
)
6255 slot
= valid_info
->phis
->find_slot_with_hash
6256 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
6257 valid_info
->phis
->clear_slot (slot
);
6259 for (; last_inserted_ref
!= to
->ref_top
;
6260 last_inserted_ref
= last_inserted_ref
->next
)
6262 vn_reference_t
*slot
;
6263 slot
= valid_info
->references
->find_slot_with_hash
6264 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
6265 (*slot
)->operands
.release ();
6266 valid_info
->references
->clear_slot (slot
);
6268 obstack_free (&vn_tables_obstack
, to
->ob_top
);
6270 /* Prune [rpo_idx, ] from avail. */
6271 /* ??? This is O(number-of-values-in-region) which is
6272 O(region-size) rather than O(iteration-piece). */
6273 for (rpo_elim::rpo_avail_t::iterator i
6274 = avail
.m_rpo_avail
.begin ();
6275 i
!= avail
.m_rpo_avail
.end (); ++i
)
6277 while (! (*i
).second
.is_empty ())
6279 if (bb_to_rpo
[(*i
).second
.last ().first
] < rpo_idx
)
6286 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
6287 If ITERATE is true then treat backedges optimistically as not
6288 executed and iterate. If ELIMINATE is true then perform
6289 elimination, otherwise leave that to the caller. */
6292 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
6293 bool iterate
, bool eliminate
)
6297 /* We currently do not support region-based iteration when
6298 elimination is requested. */
6299 gcc_assert (!entry
|| !iterate
|| !eliminate
);
6300 /* When iterating we need loop info up-to-date. */
6301 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
6303 bool do_region
= entry
!= NULL
;
6306 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
6307 exit_bbs
= BITMAP_ALLOC (NULL
);
6308 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
6311 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
6312 int n
= rev_post_order_and_mark_dfs_back_seme (fn
, entry
, exit_bbs
,
6314 /* rev_post_order_and_mark_dfs_back_seme fills RPO in reverse order. */
6315 for (int i
= 0; i
< n
/ 2; ++i
)
6316 std::swap (rpo
[i
], rpo
[n
-i
-1]);
6319 BITMAP_FREE (exit_bbs
);
6321 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
6322 for (int i
= 0; i
< n
; ++i
)
6323 bb_to_rpo
[rpo
[i
]] = i
;
6325 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
6327 rpo_elim
avail (entry
->dest
);
6330 /* Verify we have no extra entries into the region. */
6331 if (flag_checking
&& do_region
)
6333 auto_bb_flag
bb_in_region (fn
);
6334 for (int i
= 0; i
< n
; ++i
)
6336 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6337 bb
->flags
|= bb_in_region
;
6339 /* We can't merge the first two loops because we cannot rely
6340 on EDGE_DFS_BACK for edges not within the region. But if
6341 we decide to always have the bb_in_region flag we can
6342 do the checking during the RPO walk itself (but then it's
6343 also easy to handle MEME conservatively). */
6344 for (int i
= 0; i
< n
; ++i
)
6346 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6349 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6350 gcc_assert (e
== entry
|| (e
->src
->flags
& bb_in_region
));
6352 for (int i
= 0; i
< n
; ++i
)
6354 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6355 bb
->flags
&= ~bb_in_region
;
6359 /* Create the VN state. For the initial size of the various hashtables
6360 use a heuristic based on region size and number of SSA names. */
6361 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
6362 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
6363 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
6365 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
6366 gcc_obstack_init (&vn_ssa_aux_obstack
);
6368 gcc_obstack_init (&vn_tables_obstack
);
6369 gcc_obstack_init (&vn_tables_insert_obstack
);
6370 valid_info
= XCNEW (struct vn_tables_s
);
6371 allocate_vn_table (valid_info
, region_size
);
6372 last_inserted_ref
= NULL
;
6373 last_inserted_phi
= NULL
;
6374 last_inserted_nary
= NULL
;
6376 vn_valueize
= rpo_vn_valueize
;
6378 /* Initialize the unwind state and edge/BB executable state. */
6379 for (int i
= 0; i
< n
; ++i
)
6381 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6382 rpo_state
[i
].visited
= 0;
6383 bb
->flags
&= ~BB_EXECUTABLE
;
6384 bool has_backedges
= false;
6387 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
6389 if (e
->flags
& EDGE_DFS_BACK
)
6390 has_backedges
= true;
6391 if (! iterate
&& (e
->flags
& EDGE_DFS_BACK
))
6393 e
->flags
|= EDGE_EXECUTABLE
;
6394 /* ??? Strictly speaking we only need to unconditionally
6395 process a block when it is in an irreducible region,
6396 thus when it may be reachable via the backedge only. */
6397 bb
->flags
|= BB_EXECUTABLE
;
6400 e
->flags
&= ~EDGE_EXECUTABLE
;
6402 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
6404 entry
->flags
|= EDGE_EXECUTABLE
;
6405 entry
->dest
->flags
|= BB_EXECUTABLE
;
6407 /* As heuristic to improve compile-time we handle only the N innermost
6408 loops and the outermost one optimistically. */
6412 unsigned max_depth
= PARAM_VALUE (PARAM_RPO_VN_MAX_LOOP_DEPTH
);
6413 FOR_EACH_LOOP (loop
, LI_ONLY_INNERMOST
)
6414 if (loop_depth (loop
) > max_depth
)
6415 for (unsigned i
= 2;
6416 i
< loop_depth (loop
) - max_depth
; ++i
)
6418 basic_block header
= superloop_at_depth (loop
, i
)->header
;
6419 bool non_latch_backedge
= false;
6422 FOR_EACH_EDGE (e
, ei
, header
->preds
)
6423 if (e
->flags
& EDGE_DFS_BACK
)
6425 e
->flags
|= EDGE_EXECUTABLE
;
6426 /* There can be a non-latch backedge into the header
6427 which is part of an outer irreducible region. We
6428 cannot avoid iterating this block then. */
6429 if (!dominated_by_p (CDI_DOMINATORS
,
6432 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6433 fprintf (dump_file
, "non-latch backedge %d -> %d "
6434 "forces iteration of loop %d\n",
6435 e
->src
->index
, e
->dest
->index
, loop
->num
);
6436 non_latch_backedge
= true;
6439 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
6443 /* Go and process all blocks, iterating as necessary. */
6448 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
6450 /* If the block has incoming backedges remember unwind state. This
6451 is required even for non-executable blocks since in irreducible
6452 regions we might reach them via the backedge and re-start iterating
6454 Note we can individually mark blocks with incoming backedges to
6455 not iterate where we then handle PHIs conservatively. We do that
6456 heuristically to reduce compile-time for degenerate cases. */
6457 if (rpo_state
[idx
].iterate
)
6459 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
6460 rpo_state
[idx
].ref_top
= last_inserted_ref
;
6461 rpo_state
[idx
].phi_top
= last_inserted_phi
;
6462 rpo_state
[idx
].nary_top
= last_inserted_nary
;
6465 if (!(bb
->flags
& BB_EXECUTABLE
))
6467 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6468 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
6474 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6475 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
6477 todo
|= process_bb (avail
, bb
,
6478 rpo_state
[idx
].visited
!= 0,
6479 rpo_state
[idx
].iterate
,
6480 iterate
, eliminate
, do_region
, exit_bbs
);
6481 rpo_state
[idx
].visited
++;
6485 /* Verify if changed values flow over executable outgoing backedges
6486 and those change destination PHI values (that's the thing we
6487 can easily verify). Reduce over all such edges to the farthest
6489 int iterate_to
= -1;
6492 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6493 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
6494 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
6495 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
6497 int destidx
= bb_to_rpo
[e
->dest
->index
];
6498 if (!rpo_state
[destidx
].visited
)
6500 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6501 fprintf (dump_file
, "Unvisited destination %d\n",
6503 if (iterate_to
== -1
6504 || destidx
< iterate_to
)
6505 iterate_to
= destidx
;
6508 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6509 fprintf (dump_file
, "Looking for changed values of backedge "
6510 "%d->%d destination PHIs\n",
6511 e
->src
->index
, e
->dest
->index
);
6512 vn_context_bb
= e
->dest
;
6514 for (gsi
= gsi_start_phis (e
->dest
);
6515 !gsi_end_p (gsi
); gsi_next (&gsi
))
6517 bool inserted
= false;
6518 /* While we'd ideally just iterate on value changes
6519 we CSE PHIs and do that even across basic-block
6520 boundaries. So even hashtable state changes can
6521 be important (which is roughly equivalent to
6522 PHI argument value changes). To not excessively
6523 iterate because of that we track whether a PHI
6524 was CSEd to with GF_PLF_1. */
6525 bool phival_changed
;
6526 if ((phival_changed
= visit_phi (gsi
.phi (),
6528 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
6531 && dump_file
&& (dump_flags
& TDF_DETAILS
))
6532 fprintf (dump_file
, "PHI was CSEd and hashtable "
6533 "state (changed)\n");
6534 if (iterate_to
== -1
6535 || destidx
< iterate_to
)
6536 iterate_to
= destidx
;
6540 vn_context_bb
= NULL
;
6542 if (iterate_to
!= -1)
6544 do_unwind (&rpo_state
[iterate_to
], iterate_to
,
6547 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6548 fprintf (dump_file
, "Iterating to %d BB%d\n",
6549 iterate_to
, rpo
[iterate_to
]);
6558 /* If statistics or dump file active. */
6560 unsigned max_visited
= 1;
6561 for (int i
= 0; i
< n
; ++i
)
6563 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
6564 if (bb
->flags
& BB_EXECUTABLE
)
6566 statistics_histogram_event (cfun
, "RPO block visited times",
6567 rpo_state
[i
].visited
);
6568 if (rpo_state
[i
].visited
> max_visited
)
6569 max_visited
= rpo_state
[i
].visited
;
6571 unsigned nvalues
= 0, navail
= 0;
6572 for (rpo_elim::rpo_avail_t::iterator i
= avail
.m_rpo_avail
.begin ();
6573 i
!= avail
.m_rpo_avail
.end (); ++i
)
6576 navail
+= (*i
).second
.length ();
6578 statistics_counter_event (cfun
, "RPO blocks", n
);
6579 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
6580 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
6581 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
6582 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
6583 statistics_histogram_event (cfun
, "RPO num avail", navail
);
6584 statistics_histogram_event (cfun
, "RPO num lattice",
6585 vn_ssa_aux_hash
->elements ());
6586 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
6588 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
6589 " blocks in total discovering %d executable blocks iterating "
6590 "%d.%d times, a block was visited max. %u times\n",
6592 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
6594 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
6595 "and %" PRIu64
" lattice elements\n",
6596 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
6601 /* When !iterate we already performed elimination during the RPO
6605 /* Elimination for region-based VN needs to be done within the
6607 gcc_assert (! do_region
);
6608 /* Note we can't use avail.walk here because that gets confused
6609 by the existing availability and it will be less efficient
6611 todo
|= eliminate_with_rpo_vn (NULL
);
6614 todo
|= avail
.eliminate_cleanup (do_region
);
6620 XDELETEVEC (bb_to_rpo
);
6626 /* Region-based entry for RPO VN. Performs value-numbering and elimination
6627 on the SEME region specified by ENTRY and EXIT_BBS. */
6630 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
6632 default_vn_walk_kind
= VN_WALKREWRITE
;
6633 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
6641 const pass_data pass_data_fre
=
6643 GIMPLE_PASS
, /* type */
6645 OPTGROUP_NONE
, /* optinfo_flags */
6646 TV_TREE_FRE
, /* tv_id */
6647 ( PROP_cfg
| PROP_ssa
), /* properties_required */
6648 0, /* properties_provided */
6649 0, /* properties_destroyed */
6650 0, /* todo_flags_start */
6651 0, /* todo_flags_finish */
6654 class pass_fre
: public gimple_opt_pass
6657 pass_fre (gcc::context
*ctxt
)
6658 : gimple_opt_pass (pass_data_fre
, ctxt
)
6661 /* opt_pass methods: */
6662 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
6663 virtual bool gate (function
*) { return flag_tree_fre
!= 0; }
6664 virtual unsigned int execute (function
*);
6666 }; // class pass_fre
6669 pass_fre::execute (function
*fun
)
6673 /* At -O[1g] use the cheap non-iterating mode. */
6674 calculate_dominance_info (CDI_DOMINATORS
);
6676 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
6678 default_vn_walk_kind
= VN_WALKREWRITE
;
6679 todo
= do_rpo_vn (fun
, NULL
, NULL
, optimize
> 1, true);
6683 loop_optimizer_finalize ();
6691 make_pass_fre (gcc::context
*ctxt
)
6693 return new pass_fre (ctxt
);
6696 #undef BB_EXECUTABLE