1 /* SCC value numbering for trees
2 Copyright (C) 2006-2022 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-iterator.h"
43 #include "gimple-fold.h"
57 #include "tree-ssa-propagate.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
135 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
136 #define BB_EXECUTABLE BB_VISITED
138 static vn_lookup_kind default_vn_walk_kind
;
140 /* vn_nary_op hashtable helpers. */
142 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
144 typedef vn_nary_op_s
*compare_type
;
145 static inline hashval_t
hash (const vn_nary_op_s
*);
146 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
149 /* Return the computed hashcode for nary operation P1. */
152 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
154 return vno1
->hashcode
;
157 /* Compare nary operations P1 and P2 and return true if they are
161 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
163 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
166 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
167 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
170 /* vn_phi hashtable helpers. */
173 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
175 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
177 static inline hashval_t
hash (const vn_phi_s
*);
178 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
181 /* Return the computed hashcode for phi operation P1. */
184 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
186 return vp1
->hashcode
;
189 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
192 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
194 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
197 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
198 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
201 /* Compare two reference operands P1 and P2 for equality. Return true if
202 they are equal, and false otherwise. */
205 vn_reference_op_eq (const void *p1
, const void *p2
)
207 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
208 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
210 return (vro1
->opcode
== vro2
->opcode
211 /* We do not care for differences in type qualification. */
212 && (vro1
->type
== vro2
->type
213 || (vro1
->type
&& vro2
->type
214 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
215 TYPE_MAIN_VARIANT (vro2
->type
))))
216 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
217 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
218 && expressions_equal_p (vro1
->op2
, vro2
->op2
)
219 && (vro1
->opcode
!= CALL_EXPR
|| vro1
->clique
== vro2
->clique
));
222 /* Free a reference operation structure VP. */
225 free_reference (vn_reference_s
*vr
)
227 vr
->operands
.release ();
231 /* vn_reference hashtable helpers. */
233 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
235 static inline hashval_t
hash (const vn_reference_s
*);
236 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
239 /* Return the hashcode for a given reference operation P1. */
242 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
244 return vr1
->hashcode
;
248 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
250 return v
== c
|| vn_reference_eq (v
, c
);
253 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
254 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
256 /* Pretty-print OPS to OUTFILE. */
259 print_vn_reference_ops (FILE *outfile
, const vec
<vn_reference_op_s
> ops
)
261 vn_reference_op_t vro
;
263 fprintf (outfile
, "{");
264 for (i
= 0; ops
.iterate (i
, &vro
); i
++)
266 bool closebrace
= false;
267 if (vro
->opcode
!= SSA_NAME
268 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
270 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
271 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
273 fprintf (outfile
, "<");
277 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
280 fprintf (outfile
, internal_fn_name ((internal_fn
)vro
->clique
));
282 print_generic_expr (outfile
, vro
->op0
);
285 fprintf (outfile
, ",");
286 print_generic_expr (outfile
, vro
->op1
);
290 fprintf (outfile
, ",");
291 print_generic_expr (outfile
, vro
->op2
);
295 fprintf (outfile
, ">");
296 if (i
!= ops
.length () - 1)
297 fprintf (outfile
, ",");
299 fprintf (outfile
, "}");
303 debug_vn_reference_ops (const vec
<vn_reference_op_s
> ops
)
305 print_vn_reference_ops (stderr
, ops
);
306 fputc ('\n', stderr
);
309 /* The set of VN hashtables. */
311 typedef struct vn_tables_s
313 vn_nary_op_table_type
*nary
;
314 vn_phi_table_type
*phis
;
315 vn_reference_table_type
*references
;
319 /* vn_constant hashtable helpers. */
321 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
323 static inline hashval_t
hash (const vn_constant_s
*);
324 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
327 /* Hash table hash function for vn_constant_t. */
330 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
332 return vc1
->hashcode
;
335 /* Hash table equality function for vn_constant_t. */
338 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
340 if (vc1
->hashcode
!= vc2
->hashcode
)
343 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
346 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
349 /* Obstack we allocate the vn-tables elements from. */
350 static obstack vn_tables_obstack
;
351 /* Special obstack we never unwind. */
352 static obstack vn_tables_insert_obstack
;
354 static vn_reference_t last_inserted_ref
;
355 static vn_phi_t last_inserted_phi
;
356 static vn_nary_op_t last_inserted_nary
;
357 static vn_ssa_aux_t last_pushed_avail
;
359 /* Valid hashtables storing information we have proven to be
361 static vn_tables_t valid_info
;
364 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
365 an SSA name, otherwise just return it. */
366 tree (*vn_valueize
) (tree
);
368 vn_valueize_for_srt (tree t
, void* context ATTRIBUTE_UNUSED
)
370 basic_block saved_vn_context_bb
= vn_context_bb
;
371 /* Look for sth available at the definition block of the argument.
372 This avoids inconsistencies between availability there which
373 decides if the stmt can be removed and availability at the
374 use site. The SSA property ensures that things available
375 at the definition are also available at uses. */
376 if (!SSA_NAME_IS_DEFAULT_DEF (t
))
377 vn_context_bb
= gimple_bb (SSA_NAME_DEF_STMT (t
));
378 tree res
= vn_valueize (t
);
379 vn_context_bb
= saved_vn_context_bb
;
384 /* This represents the top of the VN lattice, which is the universal
389 /* Unique counter for our value ids. */
391 static unsigned int next_value_id
;
392 static int next_constant_value_id
;
395 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
396 are allocated on an obstack for locality reasons, and to free them
397 without looping over the vec. */
399 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
401 typedef vn_ssa_aux_t value_type
;
402 typedef tree compare_type
;
403 static inline hashval_t
hash (const value_type
&);
404 static inline bool equal (const value_type
&, const compare_type
&);
405 static inline void mark_deleted (value_type
&) {}
406 static const bool empty_zero_p
= true;
407 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
408 static inline bool is_deleted (value_type
&) { return false; }
409 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
413 vn_ssa_aux_hasher::hash (const value_type
&entry
)
415 return SSA_NAME_VERSION (entry
->name
);
419 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
421 return name
== entry
->name
;
424 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
425 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
426 static struct obstack vn_ssa_aux_obstack
;
428 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
429 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
430 vn_nary_op_table_type
*);
431 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
432 enum tree_code
, tree
, tree
*);
433 static tree
vn_lookup_simplify_result (gimple_match_op
*);
434 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
435 (tree
, alias_set_type
, alias_set_type
, tree
,
436 vec
<vn_reference_op_s
, va_heap
>, tree
);
438 /* Return whether there is value numbering information for a given SSA name. */
441 has_VN_INFO (tree name
)
443 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
450 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
455 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
456 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
457 newinfo
->name
= name
;
458 newinfo
->valnum
= VN_TOP
;
459 /* We are using the visited flag to handle uses with defs not within the
460 region being value-numbered. */
461 newinfo
->visited
= false;
463 /* Given we create the VN_INFOs on-demand now we have to do initialization
464 different than VN_TOP here. */
465 if (SSA_NAME_IS_DEFAULT_DEF (name
))
466 switch (TREE_CODE (SSA_NAME_VAR (name
)))
469 /* All undefined vars are VARYING. */
470 newinfo
->valnum
= name
;
471 newinfo
->visited
= true;
475 /* Parameters are VARYING but we can record a condition
476 if we know it is a non-NULL pointer. */
477 newinfo
->visited
= true;
478 newinfo
->valnum
= name
;
479 if (POINTER_TYPE_P (TREE_TYPE (name
))
480 && nonnull_arg_p (SSA_NAME_VAR (name
)))
484 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
486 /* Allocate from non-unwinding stack. */
487 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
488 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
489 boolean_type_node
, ops
);
490 nary
->predicated_values
= 0;
491 nary
->u
.result
= boolean_true_node
;
492 vn_nary_op_insert_into (nary
, valid_info
->nary
);
493 gcc_assert (nary
->unwind_to
== NULL
);
494 /* Also do not link it into the undo chain. */
495 last_inserted_nary
= nary
->next
;
496 nary
->next
= (vn_nary_op_t
)(void *)-1;
497 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
498 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
499 boolean_type_node
, ops
);
500 nary
->predicated_values
= 0;
501 nary
->u
.result
= boolean_false_node
;
502 vn_nary_op_insert_into (nary
, valid_info
->nary
);
503 gcc_assert (nary
->unwind_to
== NULL
);
504 last_inserted_nary
= nary
->next
;
505 nary
->next
= (vn_nary_op_t
)(void *)-1;
506 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
508 fprintf (dump_file
, "Recording ");
509 print_generic_expr (dump_file
, name
, TDF_SLIM
);
510 fprintf (dump_file
, " != 0\n");
516 /* If the result is passed by invisible reference the default
517 def is initialized, otherwise it's uninitialized. Still
518 undefined is varying. */
519 newinfo
->visited
= true;
520 newinfo
->valnum
= name
;
529 /* Return the SSA value of X. */
532 SSA_VAL (tree x
, bool *visited
= NULL
)
534 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
536 *visited
= tem
&& tem
->visited
;
537 return tem
&& tem
->visited
? tem
->valnum
: x
;
540 /* Return the SSA value of the VUSE x, supporting released VDEFs
541 during elimination which will value-number the VDEF to the
542 associated VUSE (but not substitute in the whole lattice). */
545 vuse_ssa_val (tree x
)
553 gcc_assert (x
!= VN_TOP
);
555 while (SSA_NAME_IN_FREE_LIST (x
));
560 /* Similar to the above but used as callback for walk_non_aliased_vuses
561 and thus should stop at unvisited VUSE to not walk across region
565 vuse_valueize (tree vuse
)
570 vuse
= SSA_VAL (vuse
, &visited
);
573 gcc_assert (vuse
!= VN_TOP
);
575 while (SSA_NAME_IN_FREE_LIST (vuse
));
580 /* Return the vn_kind the expression computed by the stmt should be
584 vn_get_stmt_kind (gimple
*stmt
)
586 switch (gimple_code (stmt
))
594 enum tree_code code
= gimple_assign_rhs_code (stmt
);
595 tree rhs1
= gimple_assign_rhs1 (stmt
);
596 switch (get_gimple_rhs_class (code
))
598 case GIMPLE_UNARY_RHS
:
599 case GIMPLE_BINARY_RHS
:
600 case GIMPLE_TERNARY_RHS
:
602 case GIMPLE_SINGLE_RHS
:
603 switch (TREE_CODE_CLASS (code
))
606 /* VOP-less references can go through unary case. */
607 if ((code
== REALPART_EXPR
608 || code
== IMAGPART_EXPR
609 || code
== VIEW_CONVERT_EXPR
610 || code
== BIT_FIELD_REF
)
611 && (TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
612 || is_gimple_min_invariant (TREE_OPERAND (rhs1
, 0))))
616 case tcc_declaration
:
623 if (code
== ADDR_EXPR
)
624 return (is_gimple_min_invariant (rhs1
)
625 ? VN_CONSTANT
: VN_REFERENCE
);
626 else if (code
== CONSTRUCTOR
)
639 /* Lookup a value id for CONSTANT and return it. If it does not
643 get_constant_value_id (tree constant
)
645 vn_constant_s
**slot
;
646 struct vn_constant_s vc
;
648 vc
.hashcode
= vn_hash_constant_with_type (constant
);
649 vc
.constant
= constant
;
650 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
652 return (*slot
)->value_id
;
656 /* Lookup a value id for CONSTANT, and if it does not exist, create a
657 new one and return it. If it does exist, return it. */
660 get_or_alloc_constant_value_id (tree constant
)
662 vn_constant_s
**slot
;
663 struct vn_constant_s vc
;
666 /* If the hashtable isn't initialized we're not running from PRE and thus
667 do not need value-ids. */
668 if (!constant_to_value_id
)
671 vc
.hashcode
= vn_hash_constant_with_type (constant
);
672 vc
.constant
= constant
;
673 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
675 return (*slot
)->value_id
;
677 vcp
= XNEW (struct vn_constant_s
);
678 vcp
->hashcode
= vc
.hashcode
;
679 vcp
->constant
= constant
;
680 vcp
->value_id
= get_next_constant_value_id ();
682 return vcp
->value_id
;
685 /* Compute the hash for a reference operand VRO1. */
688 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
690 hstate
.add_int (vro1
->opcode
);
691 if (vro1
->opcode
== CALL_EXPR
&& !vro1
->op0
)
692 hstate
.add_int (vro1
->clique
);
694 inchash::add_expr (vro1
->op0
, hstate
);
696 inchash::add_expr (vro1
->op1
, hstate
);
698 inchash::add_expr (vro1
->op2
, hstate
);
701 /* Compute a hash for the reference operation VR1 and return it. */
704 vn_reference_compute_hash (const vn_reference_t vr1
)
706 inchash::hash hstate
;
709 vn_reference_op_t vro
;
713 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
715 if (vro
->opcode
== MEM_REF
)
717 else if (vro
->opcode
!= ADDR_EXPR
)
719 if (maybe_ne (vro
->off
, -1))
721 if (known_eq (off
, -1))
727 if (maybe_ne (off
, -1)
728 && maybe_ne (off
, 0))
729 hstate
.add_poly_int (off
);
732 && vro
->opcode
== ADDR_EXPR
)
736 tree op
= TREE_OPERAND (vro
->op0
, 0);
737 hstate
.add_int (TREE_CODE (op
));
738 inchash::add_expr (op
, hstate
);
742 vn_reference_op_compute_hash (vro
, hstate
);
745 result
= hstate
.end ();
746 /* ??? We would ICE later if we hash instead of adding that in. */
748 result
+= SSA_NAME_VERSION (vr1
->vuse
);
753 /* Return true if reference operations VR1 and VR2 are equivalent. This
754 means they have the same set of operands and vuses. */
757 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
761 /* Early out if this is not a hash collision. */
762 if (vr1
->hashcode
!= vr2
->hashcode
)
765 /* The VOP needs to be the same. */
766 if (vr1
->vuse
!= vr2
->vuse
)
769 /* If the operands are the same we are done. */
770 if (vr1
->operands
== vr2
->operands
)
773 if (!vr1
->type
|| !vr2
->type
)
775 if (vr1
->type
!= vr2
->type
)
778 else if (vr1
->type
== vr2
->type
)
780 else if (COMPLETE_TYPE_P (vr1
->type
) != COMPLETE_TYPE_P (vr2
->type
)
781 || (COMPLETE_TYPE_P (vr1
->type
)
782 && !expressions_equal_p (TYPE_SIZE (vr1
->type
),
783 TYPE_SIZE (vr2
->type
))))
785 else if (vr1
->operands
[0].opcode
== CALL_EXPR
786 && !types_compatible_p (vr1
->type
, vr2
->type
))
788 else if (INTEGRAL_TYPE_P (vr1
->type
)
789 && INTEGRAL_TYPE_P (vr2
->type
))
791 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
794 else if (INTEGRAL_TYPE_P (vr1
->type
)
795 && (TYPE_PRECISION (vr1
->type
)
796 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
798 else if (INTEGRAL_TYPE_P (vr2
->type
)
799 && (TYPE_PRECISION (vr2
->type
)
800 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
807 poly_int64 off1
= 0, off2
= 0;
808 vn_reference_op_t vro1
, vro2
;
809 vn_reference_op_s tem1
, tem2
;
810 bool deref1
= false, deref2
= false;
811 bool reverse1
= false, reverse2
= false;
812 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
814 if (vro1
->opcode
== MEM_REF
)
816 /* Do not look through a storage order barrier. */
817 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
819 reverse1
|= vro1
->reverse
;
820 if (known_eq (vro1
->off
, -1))
824 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
826 if (vro2
->opcode
== MEM_REF
)
828 /* Do not look through a storage order barrier. */
829 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
831 reverse2
|= vro2
->reverse
;
832 if (known_eq (vro2
->off
, -1))
836 if (maybe_ne (off1
, off2
) || reverse1
!= reverse2
)
838 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
840 memset (&tem1
, 0, sizeof (tem1
));
841 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
842 tem1
.type
= TREE_TYPE (tem1
.op0
);
843 tem1
.opcode
= TREE_CODE (tem1
.op0
);
847 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
849 memset (&tem2
, 0, sizeof (tem2
));
850 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
851 tem2
.type
= TREE_TYPE (tem2
.op0
);
852 tem2
.opcode
= TREE_CODE (tem2
.op0
);
856 if (deref1
!= deref2
)
858 if (!vn_reference_op_eq (vro1
, vro2
))
863 while (vr1
->operands
.length () != i
864 || vr2
->operands
.length () != j
);
869 /* Copy the operations present in load/store REF into RESULT, a vector of
870 vn_reference_op_s's. */
873 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
875 /* For non-calls, store the information that makes up the address. */
879 vn_reference_op_s temp
;
881 memset (&temp
, 0, sizeof (temp
));
882 temp
.type
= TREE_TYPE (ref
);
883 temp
.opcode
= TREE_CODE (ref
);
889 temp
.op0
= TREE_OPERAND (ref
, 1);
892 temp
.op0
= TREE_OPERAND (ref
, 1);
896 /* The base address gets its own vn_reference_op_s structure. */
897 temp
.op0
= TREE_OPERAND (ref
, 1);
898 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
900 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
901 temp
.base
= MR_DEPENDENCE_BASE (ref
);
902 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
905 /* The base address gets its own vn_reference_op_s structure. */
906 temp
.op0
= TMR_INDEX (ref
);
907 temp
.op1
= TMR_STEP (ref
);
908 temp
.op2
= TMR_OFFSET (ref
);
909 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
910 temp
.base
= MR_DEPENDENCE_BASE (ref
);
911 result
->safe_push (temp
);
912 memset (&temp
, 0, sizeof (temp
));
913 temp
.type
= NULL_TREE
;
914 temp
.opcode
= ERROR_MARK
;
915 temp
.op0
= TMR_INDEX2 (ref
);
919 /* Record bits, position and storage order. */
920 temp
.op0
= TREE_OPERAND (ref
, 1);
921 temp
.op1
= TREE_OPERAND (ref
, 2);
922 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
924 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
927 /* The field decl is enough to unambiguously specify the field,
928 so use its type here. */
929 temp
.type
= TREE_TYPE (TREE_OPERAND (ref
, 1));
930 temp
.op0
= TREE_OPERAND (ref
, 1);
931 temp
.op1
= TREE_OPERAND (ref
, 2);
932 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
933 && TYPE_REVERSE_STORAGE_ORDER
934 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
936 tree this_offset
= component_ref_field_offset (ref
);
938 && poly_int_tree_p (this_offset
))
940 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
941 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
944 = (wi::to_poly_offset (this_offset
)
945 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
946 /* Probibit value-numbering zero offset components
947 of addresses the same before the pass folding
948 __builtin_object_size had a chance to run. */
949 if (TREE_CODE (orig
) != ADDR_EXPR
951 || (cfun
->curr_properties
& PROP_objsz
))
952 off
.to_shwi (&temp
.off
);
957 case ARRAY_RANGE_REF
:
960 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
961 /* Record index as operand. */
962 temp
.op0
= TREE_OPERAND (ref
, 1);
963 /* Always record lower bounds and element size. */
964 temp
.op1
= array_ref_low_bound (ref
);
965 /* But record element size in units of the type alignment. */
966 temp
.op2
= TREE_OPERAND (ref
, 3);
967 temp
.align
= eltype
->type_common
.align
;
969 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
970 size_int (TYPE_ALIGN_UNIT (eltype
)));
971 if (poly_int_tree_p (temp
.op0
)
972 && poly_int_tree_p (temp
.op1
)
973 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
975 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
976 - wi::to_poly_offset (temp
.op1
))
977 * wi::to_offset (temp
.op2
)
978 * vn_ref_op_align_unit (&temp
));
979 off
.to_shwi (&temp
.off
);
981 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
982 && TYPE_REVERSE_STORAGE_ORDER
983 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
987 if (DECL_HARD_REGISTER (ref
))
996 /* Canonicalize decls to MEM[&decl] which is what we end up with
997 when valueizing MEM[ptr] with ptr = &decl. */
998 temp
.opcode
= MEM_REF
;
999 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
1001 result
->safe_push (temp
);
1002 temp
.opcode
= ADDR_EXPR
;
1003 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
1004 temp
.type
= TREE_TYPE (temp
.op0
);
1019 if (is_gimple_min_invariant (ref
))
1025 /* These are only interesting for their operands, their
1026 existence, and their type. They will never be the last
1027 ref in the chain of references (IE they require an
1028 operand), so we don't have to put anything
1029 for op* as it will be handled by the iteration */
1033 case VIEW_CONVERT_EXPR
:
1035 temp
.reverse
= storage_order_barrier_p (ref
);
1038 /* This is only interesting for its constant offset. */
1039 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
1044 result
->safe_push (temp
);
1046 if (REFERENCE_CLASS_P (ref
)
1047 || TREE_CODE (ref
) == MODIFY_EXPR
1048 || TREE_CODE (ref
) == WITH_SIZE_EXPR
1049 || (TREE_CODE (ref
) == ADDR_EXPR
1050 && !is_gimple_min_invariant (ref
)))
1051 ref
= TREE_OPERAND (ref
, 0);
1057 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1058 operands in *OPS, the reference alias set SET and the reference type TYPE.
1059 Return true if something useful was produced. */
1062 ao_ref_init_from_vn_reference (ao_ref
*ref
,
1063 alias_set_type set
, alias_set_type base_set
,
1064 tree type
, const vec
<vn_reference_op_s
> &ops
)
1067 tree base
= NULL_TREE
;
1068 tree
*op0_p
= &base
;
1069 poly_offset_int offset
= 0;
1070 poly_offset_int max_size
;
1071 poly_offset_int size
= -1;
1072 tree size_tree
= NULL_TREE
;
1074 /* We don't handle calls. */
1078 machine_mode mode
= TYPE_MODE (type
);
1079 if (mode
== BLKmode
)
1080 size_tree
= TYPE_SIZE (type
);
1082 size
= GET_MODE_BITSIZE (mode
);
1083 if (size_tree
!= NULL_TREE
1084 && poly_int_tree_p (size_tree
))
1085 size
= wi::to_poly_offset (size_tree
);
1087 /* Lower the final access size from the outermost expression. */
1088 const_vn_reference_op_t cst_op
= &ops
[0];
1089 /* Cast away constness for the sake of the const-unsafe
1090 FOR_EACH_VEC_ELT(). */
1091 vn_reference_op_t op
= const_cast<vn_reference_op_t
>(cst_op
);
1092 size_tree
= NULL_TREE
;
1093 if (op
->opcode
== COMPONENT_REF
)
1094 size_tree
= DECL_SIZE (op
->op0
);
1095 else if (op
->opcode
== BIT_FIELD_REF
)
1096 size_tree
= op
->op0
;
1097 if (size_tree
!= NULL_TREE
1098 && poly_int_tree_p (size_tree
)
1099 && (!known_size_p (size
)
1100 || known_lt (wi::to_poly_offset (size_tree
), size
)))
1101 size
= wi::to_poly_offset (size_tree
);
1103 /* Initially, maxsize is the same as the accessed element size.
1104 In the following it will only grow (or become -1). */
1107 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1108 and find the ultimate containing object. */
1109 FOR_EACH_VEC_ELT (ops
, i
, op
)
1113 /* These may be in the reference ops, but we cannot do anything
1114 sensible with them here. */
1116 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1117 if (base
!= NULL_TREE
1118 && TREE_CODE (base
) == MEM_REF
1120 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1122 const_vn_reference_op_t pop
= &ops
[i
-1];
1123 base
= TREE_OPERAND (op
->op0
, 0);
1124 if (known_eq (pop
->off
, -1))
1130 offset
+= pop
->off
* BITS_PER_UNIT
;
1138 /* Record the base objects. */
1140 *op0_p
= build2 (MEM_REF
, op
->type
,
1141 NULL_TREE
, op
->op0
);
1142 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1143 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1144 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1155 /* And now the usual component-reference style ops. */
1157 offset
+= wi::to_poly_offset (op
->op1
);
1162 tree field
= op
->op0
;
1163 /* We do not have a complete COMPONENT_REF tree here so we
1164 cannot use component_ref_field_offset. Do the interesting
1166 tree this_offset
= DECL_FIELD_OFFSET (field
);
1168 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1172 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1173 << LOG2_BITS_PER_UNIT
);
1174 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1180 case ARRAY_RANGE_REF
:
1182 /* We recorded the lower bound and the element size. */
1183 if (!poly_int_tree_p (op
->op0
)
1184 || !poly_int_tree_p (op
->op1
)
1185 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1189 poly_offset_int woffset
1190 = wi::sext (wi::to_poly_offset (op
->op0
)
1191 - wi::to_poly_offset (op
->op1
),
1192 TYPE_PRECISION (sizetype
));
1193 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1194 woffset
<<= LOG2_BITS_PER_UNIT
;
1206 case VIEW_CONVERT_EXPR
:
1223 if (base
== NULL_TREE
)
1226 ref
->ref
= NULL_TREE
;
1228 ref
->ref_alias_set
= set
;
1229 ref
->base_alias_set
= base_set
;
1230 /* We discount volatiles from value-numbering elsewhere. */
1231 ref
->volatile_p
= false;
1233 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1241 if (!offset
.to_shwi (&ref
->offset
))
1248 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1254 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1255 vn_reference_op_s's. */
1258 copy_reference_ops_from_call (gcall
*call
,
1259 vec
<vn_reference_op_s
> *result
)
1261 vn_reference_op_s temp
;
1263 tree lhs
= gimple_call_lhs (call
);
1266 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1267 different. By adding the lhs here in the vector, we ensure that the
1268 hashcode is different, guaranteeing a different value number. */
1269 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1271 memset (&temp
, 0, sizeof (temp
));
1272 temp
.opcode
= MODIFY_EXPR
;
1273 temp
.type
= TREE_TYPE (lhs
);
1276 result
->safe_push (temp
);
1279 /* Copy the type, opcode, function, static chain and EH region, if any. */
1280 memset (&temp
, 0, sizeof (temp
));
1281 temp
.type
= gimple_call_fntype (call
);
1282 temp
.opcode
= CALL_EXPR
;
1283 temp
.op0
= gimple_call_fn (call
);
1284 if (gimple_call_internal_p (call
))
1285 temp
.clique
= gimple_call_internal_fn (call
);
1286 temp
.op1
= gimple_call_chain (call
);
1287 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1288 temp
.op2
= size_int (lr
);
1290 result
->safe_push (temp
);
1292 /* Copy the call arguments. As they can be references as well,
1293 just chain them together. */
1294 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1296 tree callarg
= gimple_call_arg (call
, i
);
1297 copy_reference_ops_from_ref (callarg
, result
);
1301 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1302 *I_P to point to the last element of the replacement. */
1304 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1307 unsigned int i
= *i_p
;
1308 vn_reference_op_t op
= &(*ops
)[i
];
1309 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1311 poly_int64 addr_offset
= 0;
1313 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1314 from .foo.bar to the preceding MEM_REF offset and replace the
1315 address with &OBJ. */
1316 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (op
->op0
, 0),
1317 &addr_offset
, vn_valueize
);
1318 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1319 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1322 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1325 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1326 op
->op0
= build_fold_addr_expr (addr_base
);
1327 if (tree_fits_shwi_p (mem_op
->op0
))
1328 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1336 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1337 *I_P to point to the last element of the replacement. */
1339 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1342 bool changed
= false;
1343 vn_reference_op_t op
;
1347 unsigned int i
= *i_p
;
1349 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1351 enum tree_code code
;
1352 poly_offset_int off
;
1354 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1355 if (!is_gimple_assign (def_stmt
))
1358 code
= gimple_assign_rhs_code (def_stmt
);
1359 if (code
!= ADDR_EXPR
1360 && code
!= POINTER_PLUS_EXPR
)
1363 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1365 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1366 from .foo.bar to the preceding MEM_REF offset and replace the
1367 address with &OBJ. */
1368 if (code
== ADDR_EXPR
)
1370 tree addr
, addr_base
;
1371 poly_int64 addr_offset
;
1373 addr
= gimple_assign_rhs1 (def_stmt
);
1374 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr
, 0),
1377 /* If that didn't work because the address isn't invariant propagate
1378 the reference tree from the address operation in case the current
1379 dereference isn't offsetted. */
1381 && *i_p
== ops
->length () - 1
1382 && known_eq (off
, 0)
1383 /* This makes us disable this transform for PRE where the
1384 reference ops might be also used for code insertion which
1386 && default_vn_walk_kind
== VN_WALKREWRITE
)
1388 auto_vec
<vn_reference_op_s
, 32> tem
;
1389 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1390 /* Make sure to preserve TBAA info. The only objects not
1391 wrapped in MEM_REFs that can have their address taken are
1393 if (tem
.length () >= 2
1394 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1396 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1398 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1399 wi::to_poly_wide (new_mem_op
->op0
));
1402 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1405 ops
->safe_splice (tem
);
1410 || TREE_CODE (addr_base
) != MEM_REF
1411 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1412 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1417 off
+= mem_ref_offset (addr_base
);
1418 op
->op0
= TREE_OPERAND (addr_base
, 0);
1423 ptr
= gimple_assign_rhs1 (def_stmt
);
1424 ptroff
= gimple_assign_rhs2 (def_stmt
);
1425 if (TREE_CODE (ptr
) != SSA_NAME
1426 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1427 /* Make sure to not endlessly recurse.
1428 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1429 happen when we value-number a PHI to its backedge value. */
1430 || SSA_VAL (ptr
) == op
->op0
1431 || !poly_int_tree_p (ptroff
))
1434 off
+= wi::to_poly_offset (ptroff
);
1438 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1439 if (tree_fits_shwi_p (mem_op
->op0
))
1440 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1443 /* ??? Can end up with endless recursion here!?
1444 gcc.c-torture/execute/strcmp-1.c */
1445 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1446 op
->op0
= SSA_VAL (op
->op0
);
1447 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1448 op
->opcode
= TREE_CODE (op
->op0
);
1453 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1455 /* Fold a remaining *&. */
1456 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1457 vn_reference_fold_indirect (ops
, i_p
);
1462 /* Optimize the reference REF to a constant if possible or return
1463 NULL_TREE if not. */
1466 fully_constant_vn_reference_p (vn_reference_t ref
)
1468 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1469 vn_reference_op_t op
;
1471 /* Try to simplify the translated expression if it is
1472 a call to a builtin function with at most two arguments. */
1474 if (op
->opcode
== CALL_EXPR
1476 || (TREE_CODE (op
->op0
) == ADDR_EXPR
1477 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1478 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0),
1480 && operands
.length () >= 2
1481 && operands
.length () <= 3)
1483 vn_reference_op_t arg0
, arg1
= NULL
;
1484 bool anyconst
= false;
1485 arg0
= &operands
[1];
1486 if (operands
.length () > 2)
1487 arg1
= &operands
[2];
1488 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1489 || (arg0
->opcode
== ADDR_EXPR
1490 && is_gimple_min_invariant (arg0
->op0
)))
1493 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1494 || (arg1
->opcode
== ADDR_EXPR
1495 && is_gimple_min_invariant (arg1
->op0
))))
1501 fn
= as_combined_fn (DECL_FUNCTION_CODE
1502 (TREE_OPERAND (op
->op0
, 0)));
1504 fn
= as_combined_fn ((internal_fn
) op
->clique
);
1507 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
, arg1
->op0
);
1509 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
);
1511 && is_gimple_min_invariant (folded
))
1516 /* Simplify reads from constants or constant initializers. */
1517 else if (BITS_PER_UNIT
== 8
1519 && COMPLETE_TYPE_P (ref
->type
)
1520 && is_gimple_reg_type (ref
->type
))
1524 if (INTEGRAL_TYPE_P (ref
->type
))
1525 size
= TYPE_PRECISION (ref
->type
);
1526 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1527 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1530 if (size
% BITS_PER_UNIT
!= 0
1531 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1533 size
/= BITS_PER_UNIT
;
1535 for (i
= 0; i
< operands
.length (); ++i
)
1537 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1542 if (known_eq (operands
[i
].off
, -1))
1544 off
+= operands
[i
].off
;
1545 if (operands
[i
].opcode
== MEM_REF
)
1551 vn_reference_op_t base
= &operands
[--i
];
1552 tree ctor
= error_mark_node
;
1553 tree decl
= NULL_TREE
;
1554 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1556 else if (base
->opcode
== MEM_REF
1557 && base
[1].opcode
== ADDR_EXPR
1558 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1559 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1560 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1562 decl
= TREE_OPERAND (base
[1].op0
, 0);
1563 if (TREE_CODE (decl
) == STRING_CST
)
1566 ctor
= ctor_for_folding (decl
);
1568 if (ctor
== NULL_TREE
)
1569 return build_zero_cst (ref
->type
);
1570 else if (ctor
!= error_mark_node
)
1572 HOST_WIDE_INT const_off
;
1575 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1576 off
* BITS_PER_UNIT
,
1577 size
* BITS_PER_UNIT
, decl
);
1580 STRIP_USELESS_TYPE_CONVERSION (res
);
1581 if (is_gimple_min_invariant (res
))
1585 else if (off
.is_constant (&const_off
))
1587 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1588 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1590 return native_interpret_expr (ref
->type
, buf
, len
);
1598 /* Return true if OPS contain a storage order barrier. */
1601 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1603 vn_reference_op_t op
;
1606 FOR_EACH_VEC_ELT (ops
, i
, op
)
1607 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1613 /* Return true if OPS represent an access with reverse storage order. */
1616 reverse_storage_order_for_component_p (vec
<vn_reference_op_s
> ops
)
1619 if (ops
[i
].opcode
== REALPART_EXPR
|| ops
[i
].opcode
== IMAGPART_EXPR
)
1621 switch (ops
[i
].opcode
)
1627 return ops
[i
].reverse
;
1633 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1634 structures into their value numbers. This is done in-place, and
1635 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1636 whether any operands were valueized. */
1639 valueize_refs_1 (vec
<vn_reference_op_s
> *orig
, bool *valueized_anything
,
1640 bool with_avail
= false)
1642 *valueized_anything
= false;
1644 for (unsigned i
= 0; i
< orig
->length (); ++i
)
1647 vn_reference_op_t vro
= &(*orig
)[i
];
1648 if (vro
->opcode
== SSA_NAME
1649 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1651 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1652 if (tem
!= vro
->op0
)
1654 *valueized_anything
= true;
1657 /* If it transforms from an SSA_NAME to a constant, update
1659 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1660 vro
->opcode
= TREE_CODE (vro
->op0
);
1662 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1664 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1665 if (tem
!= vro
->op1
)
1667 *valueized_anything
= true;
1671 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1673 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1674 if (tem
!= vro
->op2
)
1676 *valueized_anything
= true;
1680 /* If it transforms from an SSA_NAME to an address, fold with
1681 a preceding indirect reference. */
1684 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1685 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1687 if (vn_reference_fold_indirect (orig
, &i
))
1688 *valueized_anything
= true;
1691 && vro
->opcode
== SSA_NAME
1692 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1694 if (vn_reference_maybe_forwprop_address (orig
, &i
))
1696 *valueized_anything
= true;
1697 /* Re-valueize the current operand. */
1701 /* If it transforms a non-constant ARRAY_REF into a constant
1702 one, adjust the constant offset. */
1703 else if (vro
->opcode
== ARRAY_REF
1704 && known_eq (vro
->off
, -1)
1705 && poly_int_tree_p (vro
->op0
)
1706 && poly_int_tree_p (vro
->op1
)
1707 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1709 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1710 - wi::to_poly_offset (vro
->op1
))
1711 * wi::to_offset (vro
->op2
)
1712 * vn_ref_op_align_unit (vro
));
1713 off
.to_shwi (&vro
->off
);
1719 valueize_refs (vec
<vn_reference_op_s
> *orig
)
1722 valueize_refs_1 (orig
, &tem
);
1725 static vec
<vn_reference_op_s
> shared_lookup_references
;
1727 /* Create a vector of vn_reference_op_s structures from REF, a
1728 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1729 this function. *VALUEIZED_ANYTHING will specify whether any
1730 operands were valueized. */
1732 static vec
<vn_reference_op_s
>
1733 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1737 shared_lookup_references
.truncate (0);
1738 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1739 valueize_refs_1 (&shared_lookup_references
, valueized_anything
);
1740 return shared_lookup_references
;
1743 /* Create a vector of vn_reference_op_s structures from CALL, a
1744 call statement. The vector is shared among all callers of
1747 static vec
<vn_reference_op_s
>
1748 valueize_shared_reference_ops_from_call (gcall
*call
)
1752 shared_lookup_references
.truncate (0);
1753 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1754 valueize_refs (&shared_lookup_references
);
1755 return shared_lookup_references
;
1758 /* Lookup a SCCVN reference operation VR in the current hash table.
1759 Returns the resulting value number if it exists in the hash table,
1760 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1761 vn_reference_t stored in the hashtable if something is found. */
1764 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1766 vn_reference_s
**slot
;
1769 hash
= vr
->hashcode
;
1770 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1774 *vnresult
= (vn_reference_t
)*slot
;
1775 return ((vn_reference_t
)*slot
)->result
;
1782 /* Partial definition tracking support. */
1786 HOST_WIDE_INT offset
;
1793 HOST_WIDE_INT rhs_off
;
1794 HOST_WIDE_INT offset
;
1798 /* Context for alias walking. */
1800 struct vn_walk_cb_data
1802 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1803 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
, tree mask_
,
1804 bool redundant_store_removal_p_
)
1805 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1806 mask (mask_
), masked_result (NULL_TREE
), same_val (NULL_TREE
),
1807 vn_walk_kind (vn_walk_kind_
),
1808 tbaa_p (tbaa_p_
), redundant_store_removal_p (redundant_store_removal_p_
),
1809 saved_operands (vNULL
), first_set (-2), first_base_set (-2),
1813 last_vuse_ptr
= &last_vuse
;
1814 ao_ref_init (&orig_ref
, orig_ref_
);
1817 wide_int w
= wi::to_wide (mask
);
1818 unsigned int pos
= 0, prec
= w
.get_precision ();
1820 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
1822 /* When bitwise and with a constant is done on a memory load,
1823 we don't really need all the bits to be defined or defined
1824 to constants, we don't really care what is in the position
1825 corresponding to 0 bits in the mask.
1826 So, push the ranges of those 0 bits in the mask as artificial
1827 zero stores and let the partial def handling code do the
1831 int tz
= wi::ctz (w
);
1832 if (pos
+ tz
> prec
)
1836 if (BYTES_BIG_ENDIAN
)
1837 pd
.offset
= prec
- pos
- tz
;
1841 void *r
= push_partial_def (pd
, 0, 0, 0, prec
);
1842 gcc_assert (r
== NULL_TREE
);
1847 w
= wi::lrshift (w
, tz
);
1848 tz
= wi::ctz (wi::bit_not (w
));
1849 if (pos
+ tz
> prec
)
1852 w
= wi::lrshift (w
, tz
);
1856 ~vn_walk_cb_data ();
1857 void *finish (alias_set_type
, alias_set_type
, tree
);
1858 void *push_partial_def (pd_data pd
,
1859 alias_set_type
, alias_set_type
, HOST_WIDE_INT
,
1864 tree
*last_vuse_ptr
;
1869 vn_lookup_kind vn_walk_kind
;
1871 bool redundant_store_removal_p
;
1872 vec
<vn_reference_op_s
> saved_operands
;
1874 /* The VDEFs of partial defs we come along. */
1875 auto_vec
<pd_data
, 2> partial_defs
;
1876 /* The first defs range to avoid splay tree setup in most cases. */
1877 pd_range first_range
;
1878 alias_set_type first_set
;
1879 alias_set_type first_base_set
;
1880 splay_tree known_ranges
;
1881 obstack ranges_obstack
;
1884 vn_walk_cb_data::~vn_walk_cb_data ()
1888 splay_tree_delete (known_ranges
);
1889 obstack_free (&ranges_obstack
, NULL
);
1891 saved_operands
.release ();
1895 vn_walk_cb_data::finish (alias_set_type set
, alias_set_type base_set
, tree val
)
1897 if (first_set
!= -2)
1900 base_set
= first_base_set
;
1904 masked_result
= val
;
1907 if (same_val
&& !operand_equal_p (val
, same_val
))
1909 vec
<vn_reference_op_s
> &operands
1910 = saved_operands
.exists () ? saved_operands
: vr
->operands
;
1911 return vn_reference_lookup_or_insert_for_pieces (last_vuse
, set
, base_set
,
1912 vr
->type
, operands
, val
);
1915 /* pd_range splay-tree helpers. */
1918 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1920 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1921 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1922 if (offset1
< offset2
)
1924 else if (offset1
> offset2
)
1930 pd_tree_alloc (int size
, void *data_
)
1932 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1933 return obstack_alloc (&data
->ranges_obstack
, size
);
1937 pd_tree_dealloc (void *, void *)
1941 /* Push PD to the vector of partial definitions returning a
1942 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1943 NULL when we want to continue looking for partial defs or -1
1947 vn_walk_cb_data::push_partial_def (pd_data pd
,
1948 alias_set_type set
, alias_set_type base_set
,
1949 HOST_WIDE_INT offseti
,
1950 HOST_WIDE_INT maxsizei
)
1952 const HOST_WIDE_INT bufsize
= 64;
1953 /* We're using a fixed buffer for encoding so fail early if the object
1954 we want to interpret is bigger. */
1955 if (maxsizei
> bufsize
* BITS_PER_UNIT
1957 || BITS_PER_UNIT
!= 8
1958 /* Not prepared to handle PDP endian. */
1959 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1962 /* Turn too large constant stores into non-constant stores. */
1963 if (CONSTANT_CLASS_P (pd
.rhs
) && pd
.size
> bufsize
* BITS_PER_UNIT
)
1964 pd
.rhs
= error_mark_node
;
1966 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1967 most a partial byte before and/or after the region. */
1968 if (!CONSTANT_CLASS_P (pd
.rhs
))
1970 if (pd
.offset
< offseti
)
1972 HOST_WIDE_INT o
= ROUND_DOWN (offseti
- pd
.offset
, BITS_PER_UNIT
);
1973 gcc_assert (pd
.size
> o
);
1977 if (pd
.size
> maxsizei
)
1978 pd
.size
= maxsizei
+ ((pd
.size
- maxsizei
) % BITS_PER_UNIT
);
1981 pd
.offset
-= offseti
;
1983 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
1984 || CONSTANT_CLASS_P (pd
.rhs
));
1986 if (partial_defs
.is_empty ())
1988 /* If we get a clobber upfront, fail. */
1989 if (TREE_CLOBBER_P (pd
.rhs
))
1993 partial_defs
.safe_push (pd
);
1994 first_range
.offset
= pd
.offset
;
1995 first_range
.size
= pd
.size
;
1997 first_base_set
= base_set
;
1998 last_vuse_ptr
= NULL
;
2000 /* Go check if the first partial definition was a full one in case
2001 the caller didn't optimize for this. */
2007 /* ??? Optimize the case where the 2nd partial def completes
2009 gcc_obstack_init (&ranges_obstack
);
2010 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
2012 pd_tree_dealloc
, this);
2013 splay_tree_insert (known_ranges
,
2014 (splay_tree_key
)&first_range
.offset
,
2015 (splay_tree_value
)&first_range
);
2018 pd_range newr
= { pd
.offset
, pd
.size
};
2020 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2021 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
2022 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
2023 && ((r
= (pd_range
*)n
->value
), true)
2024 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2025 newr
.offset
, newr
.size
))
2027 /* Ignore partial defs already covered. Here we also drop shadowed
2028 clobbers arriving here at the floor. */
2029 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
2032 = MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
2036 /* newr.offset wasn't covered yet, insert the range. */
2037 r
= XOBNEW (&ranges_obstack
, pd_range
);
2039 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
2040 (splay_tree_value
)r
);
2042 /* Merge r which now contains newr and is a member of the splay tree with
2043 adjacent overlapping ranges. */
2045 while ((n
= splay_tree_successor (known_ranges
,
2046 (splay_tree_key
)&r
->offset
))
2047 && ((rafter
= (pd_range
*)n
->value
), true)
2048 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2049 rafter
->offset
, rafter
->size
))
2051 r
->size
= MAX (r
->offset
+ r
->size
,
2052 rafter
->offset
+ rafter
->size
) - r
->offset
;
2053 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
2055 /* If we get a clobber, fail. */
2056 if (TREE_CLOBBER_P (pd
.rhs
))
2058 /* Non-constants are OK as long as they are shadowed by a constant. */
2061 partial_defs
.safe_push (pd
);
2064 /* Now we have merged newr into the range tree. When we have covered
2065 [offseti, sizei] then the tree will contain exactly one node which has
2066 the desired properties and it will be 'r'. */
2067 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
2068 /* Continue looking for partial defs. */
2071 /* Now simply native encode all partial defs in reverse order. */
2072 unsigned ndefs
= partial_defs
.length ();
2073 /* We support up to 512-bit values (for V8DFmode). */
2074 unsigned char buffer
[bufsize
+ 1];
2075 unsigned char this_buffer
[bufsize
+ 1];
2078 memset (buffer
, 0, bufsize
+ 1);
2079 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2080 while (!partial_defs
.is_empty ())
2082 pd_data pd
= partial_defs
.pop ();
2084 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
2086 /* Empty CONSTRUCTOR. */
2087 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
2090 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2091 memset (this_buffer
, 0, len
);
2095 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
2096 (MAX (0, -pd
.offset
)
2097 + pd
.rhs_off
) / BITS_PER_UNIT
);
2099 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
2100 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
2102 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2103 fprintf (dump_file
, "Failed to encode %u "
2104 "partial definitions\n", ndefs
);
2109 unsigned char *p
= buffer
;
2110 HOST_WIDE_INT size
= pd
.size
;
2112 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
2113 this_buffer
[len
] = 0;
2114 if (BYTES_BIG_ENDIAN
)
2116 /* LSB of this_buffer[len - 1] byte should be at
2117 pd.offset + pd.size - 1 bits in buffer. */
2118 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
2119 + pd
.size
) % BITS_PER_UNIT
;
2121 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
2122 unsigned char *q
= this_buffer
;
2123 unsigned int off
= 0;
2127 off
= pd
.offset
/ BITS_PER_UNIT
;
2128 gcc_assert (off
< needed_len
);
2132 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
2133 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
2138 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2139 q
= (this_buffer
+ len
2140 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2142 if (pd
.offset
% BITS_PER_UNIT
)
2144 msk
= -1U << (BITS_PER_UNIT
2145 - (pd
.offset
% BITS_PER_UNIT
));
2146 *p
= (*p
& msk
) | (*q
& ~msk
);
2150 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
2151 gcc_assert (size
>= 0);
2155 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2157 q
= (this_buffer
+ len
2158 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2160 if (pd
.offset
% BITS_PER_UNIT
)
2163 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
2165 gcc_assert (size
>= 0);
2168 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
2170 size
= (needed_len
- off
) * BITS_PER_UNIT
;
2171 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
2172 if (size
% BITS_PER_UNIT
)
2175 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
2176 p
+= size
/ BITS_PER_UNIT
;
2177 q
+= size
/ BITS_PER_UNIT
;
2178 *p
= (*q
& msk
) | (*p
& ~msk
);
2185 /* LSB of this_buffer[0] byte should be at pd.offset bits
2188 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2189 amnt
= pd
.offset
% BITS_PER_UNIT
;
2191 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2192 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
2193 gcc_assert (off
< needed_len
);
2195 (HOST_WIDE_INT
) (needed_len
- off
) * BITS_PER_UNIT
);
2197 if (amnt
+ size
< BITS_PER_UNIT
)
2199 /* Low amnt bits come from *p, then size bits
2200 from this_buffer[0] and the remaining again from
2202 msk
= ((1 << size
) - 1) << amnt
;
2203 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2209 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2211 size
-= (BITS_PER_UNIT
- amnt
);
2216 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2218 size
-= BITS_PER_UNIT
- amnt
;
2219 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2221 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2223 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2224 p
+= size
/ BITS_PER_UNIT
;
2225 if (size
% BITS_PER_UNIT
)
2227 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2228 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2229 & ~msk
) | (*p
& msk
);
2234 tree type
= vr
->type
;
2235 /* Make sure to interpret in a type that has a range covering the whole
2237 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2238 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2240 if (BYTES_BIG_ENDIAN
)
2242 unsigned sz
= needed_len
;
2243 if (maxsizei
% BITS_PER_UNIT
)
2244 shift_bytes_in_array_right (buffer
, needed_len
,
2246 - (maxsizei
% BITS_PER_UNIT
));
2247 if (INTEGRAL_TYPE_P (type
))
2248 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2249 if (sz
> needed_len
)
2251 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2252 val
= native_interpret_expr (type
, this_buffer
, sz
);
2255 val
= native_interpret_expr (type
, buffer
, needed_len
);
2258 val
= native_interpret_expr (type
, buffer
, bufsize
);
2259 /* If we chop off bits because the types precision doesn't match the memory
2260 access size this is ok when optimizing reads but not when called from
2261 the DSE code during elimination. */
2262 if (val
&& type
!= vr
->type
)
2264 if (! int_fits_type_p (val
, vr
->type
))
2267 val
= fold_convert (vr
->type
, val
);
2272 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2274 "Successfully combined %u partial definitions\n", ndefs
);
2275 /* We are using the alias-set of the first store we encounter which
2276 should be appropriate here. */
2277 return finish (first_set
, first_base_set
, val
);
2281 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2283 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2288 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2289 with the current VUSE and performs the expression lookup. */
2292 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *data_
)
2294 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2295 vn_reference_t vr
= data
->vr
;
2296 vn_reference_s
**slot
;
2299 /* If we have partial definitions recorded we have to go through
2300 vn_reference_lookup_3. */
2301 if (!data
->partial_defs
.is_empty ())
2304 if (data
->last_vuse_ptr
)
2306 *data
->last_vuse_ptr
= vuse
;
2307 data
->last_vuse
= vuse
;
2310 /* Fixup vuse and hash. */
2312 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2313 vr
->vuse
= vuse_ssa_val (vuse
);
2315 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2317 hash
= vr
->hashcode
;
2318 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2321 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2322 return data
->finish (vr
->set
, vr
->base_set
, (*slot
)->result
);
2329 /* Lookup an existing or insert a new vn_reference entry into the
2330 value table for the VUSE, SET, TYPE, OPERANDS reference which
2331 has the value VALUE which is either a constant or an SSA name. */
2333 static vn_reference_t
2334 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2336 alias_set_type base_set
,
2338 vec
<vn_reference_op_s
,
2343 vn_reference_t result
;
2345 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2346 vr1
.operands
= operands
;
2349 vr1
.base_set
= base_set
;
2350 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2351 if (vn_reference_lookup_1 (&vr1
, &result
))
2353 if (TREE_CODE (value
) == SSA_NAME
)
2354 value_id
= VN_INFO (value
)->value_id
;
2356 value_id
= get_or_alloc_constant_value_id (value
);
2357 return vn_reference_insert_pieces (vuse
, set
, base_set
, type
,
2358 operands
.copy (), value
, value_id
);
2361 /* Return a value-number for RCODE OPS... either by looking up an existing
2362 value-number for the possibly simplified result or by inserting the
2363 operation if INSERT is true. If SIMPLIFY is false, return a value
2364 number for the unsimplified expression. */
2367 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
,
2370 tree result
= NULL_TREE
;
2371 /* We will be creating a value number for
2373 So first simplify and lookup this expression to see if it
2374 is already available. */
2375 /* For simplification valueize. */
2378 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2379 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2381 tree tem
= vn_valueize (res_op
->ops
[i
]);
2384 res_op
->ops
[i
] = tem
;
2386 /* If valueization of an operand fails (it is not available), skip
2389 if (i
== res_op
->num_ops
)
2391 mprts_hook
= vn_lookup_simplify_result
;
2392 res
= res_op
->resimplify (NULL
, vn_valueize
);
2395 gimple
*new_stmt
= NULL
;
2397 && gimple_simplified_result_is_gimple_val (res_op
))
2399 /* The expression is already available. */
2400 result
= res_op
->ops
[0];
2401 /* Valueize it, simplification returns sth in AVAIL only. */
2402 if (TREE_CODE (result
) == SSA_NAME
)
2403 result
= SSA_VAL (result
);
2407 tree val
= vn_lookup_simplify_result (res_op
);
2410 gimple_seq stmts
= NULL
;
2411 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2414 gcc_assert (gimple_seq_singleton_p (stmts
));
2415 new_stmt
= gimple_seq_first_stmt (stmts
);
2419 /* The expression is already available. */
2424 /* The expression is not yet available, value-number lhs to
2425 the new SSA_NAME we created. */
2426 /* Initialize value-number information properly. */
2427 vn_ssa_aux_t result_info
= VN_INFO (result
);
2428 result_info
->valnum
= result
;
2429 result_info
->value_id
= get_next_value_id ();
2430 result_info
->visited
= 1;
2431 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2433 result_info
->needs_insertion
= true;
2434 /* ??? PRE phi-translation inserts NARYs without corresponding
2435 SSA name result. Re-use those but set their result according
2436 to the stmt we just built. */
2437 vn_nary_op_t nary
= NULL
;
2438 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2441 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2442 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2444 /* As all "inserted" statements are singleton SCCs, insert
2445 to the valid table. This is strictly needed to
2446 avoid re-generating new value SSA_NAMEs for the same
2447 expression during SCC iteration over and over (the
2448 optimistic table gets cleared after each iteration).
2449 We do not need to insert into the optimistic table, as
2450 lookups there will fall back to the valid table. */
2453 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2455 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2456 vno1
->value_id
= result_info
->value_id
;
2457 vno1
->length
= length
;
2458 vno1
->predicated_values
= 0;
2459 vno1
->u
.result
= result
;
2460 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (new_stmt
));
2461 vn_nary_op_insert_into (vno1
, valid_info
->nary
);
2462 /* Also do not link it into the undo chain. */
2463 last_inserted_nary
= vno1
->next
;
2464 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2466 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2468 fprintf (dump_file
, "Inserting name ");
2469 print_generic_expr (dump_file
, result
);
2470 fprintf (dump_file
, " for expression ");
2471 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2472 fprintf (dump_file
, "\n");
2478 /* Return a value-number for RCODE OPS... either by looking up an existing
2479 value-number for the simplified result or by inserting the operation. */
2482 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2484 return vn_nary_build_or_lookup_1 (res_op
, true, true);
2487 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2488 its value if present. */
2491 vn_nary_simplify (vn_nary_op_t nary
)
2493 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2495 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2496 nary
->type
, nary
->length
);
2497 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2498 return vn_nary_build_or_lookup_1 (&op
, false, true);
2501 /* Elimination engine. */
2503 class eliminate_dom_walker
: public dom_walker
2506 eliminate_dom_walker (cdi_direction
, bitmap
);
2507 ~eliminate_dom_walker ();
2509 edge
before_dom_children (basic_block
) final override
;
2510 void after_dom_children (basic_block
) final override
;
2512 virtual tree
eliminate_avail (basic_block
, tree op
);
2513 virtual void eliminate_push_avail (basic_block
, tree op
);
2514 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2516 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2518 unsigned eliminate_cleanup (bool region_p
= false);
2521 unsigned int el_todo
;
2522 unsigned int eliminations
;
2523 unsigned int insertions
;
2525 /* SSA names that had their defs inserted by PRE if do_pre. */
2526 bitmap inserted_exprs
;
2528 /* Blocks with statements that have had their EH properties changed. */
2529 bitmap need_eh_cleanup
;
2531 /* Blocks with statements that have had their AB properties changed. */
2532 bitmap need_ab_cleanup
;
2534 /* Local state for the eliminate domwalk. */
2535 auto_vec
<gimple
*> to_remove
;
2536 auto_vec
<gimple
*> to_fixup
;
2537 auto_vec
<tree
> avail
;
2538 auto_vec
<tree
> avail_stack
;
2541 /* Adaptor to the elimination engine using RPO availability. */
2543 class rpo_elim
: public eliminate_dom_walker
2546 rpo_elim(basic_block entry_
)
2547 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2548 m_avail_freelist (NULL
) {}
2550 tree
eliminate_avail (basic_block
, tree op
) final override
;
2552 void eliminate_push_avail (basic_block
, tree
) final override
;
2555 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2557 vn_avail
*m_avail_freelist
;
2560 /* Global RPO state for access from hooks. */
2561 static eliminate_dom_walker
*rpo_avail
;
2562 basic_block vn_context_bb
;
2564 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2565 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2566 Otherwise return false. */
2569 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2570 tree base2
, poly_int64
*offset2
)
2573 if (TREE_CODE (base1
) == MEM_REF
2574 && TREE_CODE (base2
) == MEM_REF
)
2576 if (mem_ref_offset (base1
).to_shwi (&soff
))
2578 base1
= TREE_OPERAND (base1
, 0);
2579 *offset1
+= soff
* BITS_PER_UNIT
;
2581 if (mem_ref_offset (base2
).to_shwi (&soff
))
2583 base2
= TREE_OPERAND (base2
, 0);
2584 *offset2
+= soff
* BITS_PER_UNIT
;
2586 return operand_equal_p (base1
, base2
, 0);
2588 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2591 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2592 from the statement defining VUSE and if not successful tries to
2593 translate *REFP and VR_ through an aggregate copy at the definition
2594 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2595 of *REF and *VR. If only disambiguation was performed then
2596 *DISAMBIGUATE_ONLY is set to true. */
2599 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2600 translate_flags
*disambiguate_only
)
2602 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2603 vn_reference_t vr
= data
->vr
;
2604 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2605 tree base
= ao_ref_base (ref
);
2606 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2607 static vec
<vn_reference_op_s
> lhs_ops
;
2609 bool lhs_ref_ok
= false;
2610 poly_int64 copy_size
;
2612 /* First try to disambiguate after value-replacing in the definitions LHS. */
2613 if (is_gimple_assign (def_stmt
))
2615 tree lhs
= gimple_assign_lhs (def_stmt
);
2616 bool valueized_anything
= false;
2617 /* Avoid re-allocation overhead. */
2618 lhs_ops
.truncate (0);
2619 basic_block saved_rpo_bb
= vn_context_bb
;
2620 vn_context_bb
= gimple_bb (def_stmt
);
2621 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2623 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2624 valueize_refs_1 (&lhs_ops
, &valueized_anything
, true);
2626 vn_context_bb
= saved_rpo_bb
;
2627 ao_ref_init (&lhs_ref
, lhs
);
2629 if (valueized_anything
2630 && ao_ref_init_from_vn_reference
2631 (&lhs_ref
, ao_ref_alias_set (&lhs_ref
),
2632 ao_ref_base_alias_set (&lhs_ref
), TREE_TYPE (lhs
), lhs_ops
)
2633 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2635 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2639 /* When the def is a CLOBBER we can optimistically disambiguate
2640 against it since any overlap it would be undefined behavior.
2641 Avoid this for obvious must aliases to save compile-time though.
2642 We also may not do this when the query is used for redundant
2644 if (!data
->redundant_store_removal_p
2645 && gimple_clobber_p (def_stmt
)
2646 && !operand_equal_p (ao_ref_base (&lhs_ref
), base
, OEP_ADDRESS_OF
))
2648 *disambiguate_only
= TR_DISAMBIGUATE
;
2652 /* Besides valueizing the LHS we can also use access-path based
2653 disambiguation on the original non-valueized ref. */
2656 && data
->orig_ref
.ref
)
2658 /* We want to use the non-valueized LHS for this, but avoid redundant
2660 ao_ref
*lref
= &lhs_ref
;
2662 if (valueized_anything
)
2664 ao_ref_init (&lref_alt
, lhs
);
2667 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2669 *disambiguate_only
= (valueized_anything
2670 ? TR_VALUEIZE_AND_DISAMBIGUATE
2676 /* If we reach a clobbering statement try to skip it and see if
2677 we find a VN result with exactly the same value as the
2678 possible clobber. In this case we can ignore the clobber
2679 and return the found value. */
2680 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2681 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2682 && (ref
->ref
|| data
->orig_ref
.ref
)
2684 && data
->partial_defs
.is_empty ()
2685 && multiple_p (get_object_alignment
2686 (ref
->ref
? ref
->ref
: data
->orig_ref
.ref
),
2688 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2690 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2691 /* ??? We may not compare to ahead values which might be from
2692 a different loop iteration but only to loop invariants. Use
2693 CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2694 The one-hop lookup below doesn't have this issue since there's
2695 a virtual PHI before we ever reach a backedge to cross.
2696 We can skip multiple defs as long as they are from the same
2699 && !operand_equal_p (data
->same_val
, rhs
))
2701 else if (CONSTANT_CLASS_P (rhs
))
2703 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2706 "Skipping possible redundant definition ");
2707 print_gimple_stmt (dump_file
, def_stmt
, 0);
2709 /* Delay the actual compare of the values to the end of the walk
2710 but do not update last_vuse from here. */
2711 data
->last_vuse_ptr
= NULL
;
2712 data
->same_val
= rhs
;
2717 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2718 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2719 data
->last_vuse_ptr
= NULL
;
2720 tree saved_vuse
= vr
->vuse
;
2721 hashval_t saved_hashcode
= vr
->hashcode
;
2722 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
),
2724 /* Need to restore vr->vuse and vr->hashcode. */
2725 vr
->vuse
= saved_vuse
;
2726 vr
->hashcode
= saved_hashcode
;
2727 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2728 if (res
&& res
!= (void *)-1)
2730 vn_reference_t vnresult
= (vn_reference_t
) res
;
2731 if (TREE_CODE (rhs
) == SSA_NAME
)
2732 rhs
= SSA_VAL (rhs
);
2733 if (vnresult
->result
2734 && operand_equal_p (vnresult
->result
, rhs
, 0))
2740 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2741 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2742 && gimple_call_num_args (def_stmt
) <= 4)
2744 /* For builtin calls valueize its arguments and call the
2745 alias oracle again. Valueization may improve points-to
2746 info of pointers and constify size and position arguments.
2747 Originally this was motivated by PR61034 which has
2748 conditional calls to free falsely clobbering ref because
2749 of imprecise points-to info of the argument. */
2751 bool valueized_anything
= false;
2752 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2754 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2755 tree val
= vn_valueize (oldargs
[i
]);
2756 if (val
!= oldargs
[i
])
2758 gimple_call_set_arg (def_stmt
, i
, val
);
2759 valueized_anything
= true;
2762 if (valueized_anything
)
2764 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2766 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2767 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2770 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2776 if (*disambiguate_only
> TR_TRANSLATE
)
2779 /* If we cannot constrain the size of the reference we cannot
2780 test if anything kills it. */
2781 if (!ref
->max_size_known_p ())
2784 poly_int64 offset
= ref
->offset
;
2785 poly_int64 maxsize
= ref
->max_size
;
2787 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2788 from that definition.
2790 if (is_gimple_reg_type (vr
->type
)
2791 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2792 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET_CHK
))
2793 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2794 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2795 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2797 && BITS_PER_UNIT
== 8
2798 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2799 && offset
.is_constant (&offseti
)
2800 && ref
->size
.is_constant (&sizei
)
2801 && (offseti
% BITS_PER_UNIT
== 0
2802 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2803 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2804 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
2805 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)))))
2806 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2807 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2810 poly_int64 offset2
, size2
, maxsize2
;
2812 tree ref2
= gimple_call_arg (def_stmt
, 0);
2813 if (TREE_CODE (ref2
) == SSA_NAME
)
2815 ref2
= SSA_VAL (ref2
);
2816 if (TREE_CODE (ref2
) == SSA_NAME
2817 && (TREE_CODE (base
) != MEM_REF
2818 || TREE_OPERAND (base
, 0) != ref2
))
2820 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2821 if (gimple_assign_single_p (def_stmt
)
2822 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2823 ref2
= gimple_assign_rhs1 (def_stmt
);
2826 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2828 ref2
= TREE_OPERAND (ref2
, 0);
2829 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2831 if (!known_size_p (maxsize2
)
2832 || !known_eq (maxsize2
, size2
)
2833 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2836 else if (TREE_CODE (ref2
) == SSA_NAME
)
2839 if (TREE_CODE (base
) != MEM_REF
2840 || !(mem_ref_offset (base
)
2841 << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2845 if (TREE_OPERAND (base
, 0) != ref2
)
2847 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2848 if (is_gimple_assign (def
)
2849 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2850 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2851 && poly_int_tree_p (gimple_assign_rhs2 (def
)))
2853 tree rhs2
= gimple_assign_rhs2 (def
);
2854 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2
),
2856 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2858 ref2
= gimple_assign_rhs1 (def
);
2859 if (TREE_CODE (ref2
) == SSA_NAME
)
2860 ref2
= SSA_VAL (ref2
);
2868 tree len
= gimple_call_arg (def_stmt
, 2);
2869 HOST_WIDE_INT leni
, offset2i
;
2870 if (TREE_CODE (len
) == SSA_NAME
)
2871 len
= SSA_VAL (len
);
2872 /* Sometimes the above trickery is smarter than alias analysis. Take
2873 advantage of that. */
2874 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2875 (wi::to_poly_offset (len
)
2876 << LOG2_BITS_PER_UNIT
)))
2878 if (data
->partial_defs
.is_empty ()
2879 && known_subrange_p (offset
, maxsize
, offset2
,
2880 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2883 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2884 val
= build_zero_cst (vr
->type
);
2885 else if (INTEGRAL_TYPE_P (vr
->type
)
2886 && known_eq (ref
->size
, 8)
2887 && offseti
% BITS_PER_UNIT
== 0)
2889 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2890 vr
->type
, gimple_call_arg (def_stmt
, 1));
2891 val
= vn_nary_build_or_lookup (&res_op
);
2893 || (TREE_CODE (val
) == SSA_NAME
2894 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2899 unsigned buflen
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2900 if (INTEGRAL_TYPE_P (vr
->type
))
2901 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2902 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2903 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2905 if (BYTES_BIG_ENDIAN
)
2908 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2912 shift_bytes_in_array_right (buf
, buflen
,
2913 BITS_PER_UNIT
- amnt
);
2918 else if (offseti
% BITS_PER_UNIT
!= 0)
2921 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
2923 shift_bytes_in_array_left (buf
, buflen
, amnt
);
2927 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
2931 return data
->finish (0, 0, val
);
2933 /* For now handle clearing memory with partial defs. */
2934 else if (known_eq (ref
->size
, maxsize
)
2935 && integer_zerop (gimple_call_arg (def_stmt
, 1))
2936 && tree_fits_poly_int64_p (len
)
2937 && tree_to_poly_int64 (len
).is_constant (&leni
)
2938 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
2939 && offset
.is_constant (&offseti
)
2940 && offset2
.is_constant (&offset2i
)
2941 && maxsize
.is_constant (&maxsizei
)
2942 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2943 leni
<< LOG2_BITS_PER_UNIT
))
2946 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
2948 pd
.offset
= offset2i
;
2949 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
2950 return data
->push_partial_def (pd
, 0, 0, offseti
, maxsizei
);
2954 /* 2) Assignment from an empty CONSTRUCTOR. */
2955 else if (is_gimple_reg_type (vr
->type
)
2956 && gimple_assign_single_p (def_stmt
)
2957 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2958 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2961 poly_int64 offset2
, size2
, maxsize2
;
2962 HOST_WIDE_INT offset2i
, size2i
;
2963 gcc_assert (lhs_ref_ok
);
2964 base2
= ao_ref_base (&lhs_ref
);
2965 offset2
= lhs_ref
.offset
;
2966 size2
= lhs_ref
.size
;
2967 maxsize2
= lhs_ref
.max_size
;
2968 if (known_size_p (maxsize2
)
2969 && known_eq (maxsize2
, size2
)
2970 && adjust_offsets_for_equal_base_address (base
, &offset
,
2973 if (data
->partial_defs
.is_empty ()
2974 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2976 /* While technically undefined behavior do not optimize
2977 a full read from a clobber. */
2978 if (gimple_clobber_p (def_stmt
))
2980 tree val
= build_zero_cst (vr
->type
);
2981 return data
->finish (ao_ref_alias_set (&lhs_ref
),
2982 ao_ref_base_alias_set (&lhs_ref
), val
);
2984 else if (known_eq (ref
->size
, maxsize
)
2985 && maxsize
.is_constant (&maxsizei
)
2986 && offset
.is_constant (&offseti
)
2987 && offset2
.is_constant (&offset2i
)
2988 && size2
.is_constant (&size2i
)
2989 && ranges_known_overlap_p (offseti
, maxsizei
,
2992 /* Let clobbers be consumed by the partial-def tracker
2993 which can choose to ignore them if they are shadowed
2996 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
2998 pd
.offset
= offset2i
;
3000 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3001 ao_ref_base_alias_set (&lhs_ref
),
3007 /* 3) Assignment from a constant. We can use folds native encode/interpret
3008 routines to extract the assigned bits. */
3009 else if (known_eq (ref
->size
, maxsize
)
3010 && is_gimple_reg_type (vr
->type
)
3011 && !reverse_storage_order_for_component_p (vr
->operands
)
3012 && !contains_storage_order_barrier_p (vr
->operands
)
3013 && gimple_assign_single_p (def_stmt
)
3015 && BITS_PER_UNIT
== 8
3016 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
3017 /* native_encode and native_decode operate on arrays of bytes
3018 and so fundamentally need a compile-time size and offset. */
3019 && maxsize
.is_constant (&maxsizei
)
3020 && offset
.is_constant (&offseti
)
3021 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
3022 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
3023 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
3025 tree lhs
= gimple_assign_lhs (def_stmt
);
3027 poly_int64 offset2
, size2
, maxsize2
;
3028 HOST_WIDE_INT offset2i
, size2i
;
3030 gcc_assert (lhs_ref_ok
);
3031 base2
= ao_ref_base (&lhs_ref
);
3032 offset2
= lhs_ref
.offset
;
3033 size2
= lhs_ref
.size
;
3034 maxsize2
= lhs_ref
.max_size
;
3035 reverse
= reverse_storage_order_for_component_p (lhs
);
3038 && !storage_order_barrier_p (lhs
)
3039 && known_eq (maxsize2
, size2
)
3040 && adjust_offsets_for_equal_base_address (base
, &offset
,
3042 && offset
.is_constant (&offseti
)
3043 && offset2
.is_constant (&offset2i
)
3044 && size2
.is_constant (&size2i
))
3046 if (data
->partial_defs
.is_empty ()
3047 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
3049 /* We support up to 512-bit values (for V8DFmode). */
3050 unsigned char buffer
[65];
3053 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3054 if (TREE_CODE (rhs
) == SSA_NAME
)
3055 rhs
= SSA_VAL (rhs
);
3056 len
= native_encode_expr (rhs
,
3057 buffer
, sizeof (buffer
) - 1,
3058 (offseti
- offset2i
) / BITS_PER_UNIT
);
3059 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
3061 tree type
= vr
->type
;
3062 unsigned char *buf
= buffer
;
3063 unsigned int amnt
= 0;
3064 /* Make sure to interpret in a type that has a range
3065 covering the whole access size. */
3066 if (INTEGRAL_TYPE_P (vr
->type
)
3067 && maxsizei
!= TYPE_PRECISION (vr
->type
))
3068 type
= build_nonstandard_integer_type (maxsizei
,
3069 TYPE_UNSIGNED (type
));
3070 if (BYTES_BIG_ENDIAN
)
3072 /* For big-endian native_encode_expr stored the rhs
3073 such that the LSB of it is the LSB of buffer[len - 1].
3074 That bit is stored into memory at position
3075 offset2 + size2 - 1, i.e. in byte
3076 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3077 E.g. for offset2 1 and size2 14, rhs -1 and memory
3078 previously cleared that is:
3081 Now, if we want to extract offset 2 and size 12 from
3082 it using native_interpret_expr (which actually works
3083 for integral bitfield types in terms of byte size of
3084 the mode), the native_encode_expr stored the value
3087 and returned len 2 (the X bits are outside of
3089 Let sz be maxsize / BITS_PER_UNIT if not extracting
3090 a bitfield, and GET_MODE_SIZE otherwise.
3091 We need to align the LSB of the value we want to
3092 extract as the LSB of buf[sz - 1].
3093 The LSB from memory we need to read is at position
3094 offset + maxsize - 1. */
3095 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
3096 if (INTEGRAL_TYPE_P (type
))
3097 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
3098 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3099 - offseti
- maxsizei
) % BITS_PER_UNIT
;
3101 shift_bytes_in_array_right (buffer
, len
, amnt
);
3102 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3103 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
3104 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
3108 buf
= buffer
+ len
- sz
- amnt
;
3109 len
-= (buf
- buffer
);
3114 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
3115 - offseti
) % BITS_PER_UNIT
;
3119 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
3123 tree val
= native_interpret_expr (type
, buf
, len
);
3124 /* If we chop off bits because the types precision doesn't
3125 match the memory access size this is ok when optimizing
3126 reads but not when called from the DSE code during
3129 && type
!= vr
->type
)
3131 if (! int_fits_type_p (val
, vr
->type
))
3134 val
= fold_convert (vr
->type
, val
);
3138 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3139 ao_ref_base_alias_set (&lhs_ref
), val
);
3142 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
3146 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3147 if (TREE_CODE (rhs
) == SSA_NAME
)
3148 rhs
= SSA_VAL (rhs
);
3151 pd
.offset
= offset2i
;
3153 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3154 ao_ref_base_alias_set (&lhs_ref
),
3160 /* 4) Assignment from an SSA name which definition we may be able
3161 to access pieces from or we can combine to a larger entity. */
3162 else if (known_eq (ref
->size
, maxsize
)
3163 && is_gimple_reg_type (vr
->type
)
3164 && !reverse_storage_order_for_component_p (vr
->operands
)
3165 && !contains_storage_order_barrier_p (vr
->operands
)
3166 && gimple_assign_single_p (def_stmt
)
3167 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
3169 tree lhs
= gimple_assign_lhs (def_stmt
);
3171 poly_int64 offset2
, size2
, maxsize2
;
3172 HOST_WIDE_INT offset2i
, size2i
, offseti
;
3174 gcc_assert (lhs_ref_ok
);
3175 base2
= ao_ref_base (&lhs_ref
);
3176 offset2
= lhs_ref
.offset
;
3177 size2
= lhs_ref
.size
;
3178 maxsize2
= lhs_ref
.max_size
;
3179 reverse
= reverse_storage_order_for_component_p (lhs
);
3180 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
3182 && !storage_order_barrier_p (lhs
)
3183 && known_size_p (maxsize2
)
3184 && known_eq (maxsize2
, size2
)
3185 && adjust_offsets_for_equal_base_address (base
, &offset
,
3188 if (data
->partial_defs
.is_empty ()
3189 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
3190 /* ??? We can't handle bitfield precision extracts without
3191 either using an alternate type for the BIT_FIELD_REF and
3192 then doing a conversion or possibly adjusting the offset
3193 according to endianness. */
3194 && (! INTEGRAL_TYPE_P (vr
->type
)
3195 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
3196 && multiple_p (ref
->size
, BITS_PER_UNIT
))
3198 tree val
= NULL_TREE
;
3199 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
3200 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
3202 gimple_match_op
op (gimple_match_cond::UNCOND
,
3203 BIT_FIELD_REF
, vr
->type
,
3205 bitsize_int (ref
->size
),
3206 bitsize_int (offset
- offset2
));
3207 val
= vn_nary_build_or_lookup (&op
);
3209 else if (known_eq (ref
->size
, size2
))
3211 gimple_match_op
op (gimple_match_cond::UNCOND
,
3212 VIEW_CONVERT_EXPR
, vr
->type
,
3214 val
= vn_nary_build_or_lookup (&op
);
3217 && (TREE_CODE (val
) != SSA_NAME
3218 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
3219 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3220 ao_ref_base_alias_set (&lhs_ref
), val
);
3222 else if (maxsize
.is_constant (&maxsizei
)
3223 && offset
.is_constant (&offseti
)
3224 && offset2
.is_constant (&offset2i
)
3225 && size2
.is_constant (&size2i
)
3226 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3229 pd
.rhs
= SSA_VAL (def_rhs
);
3231 pd
.offset
= offset2i
;
3233 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3234 ao_ref_base_alias_set (&lhs_ref
),
3240 /* 4b) Assignment done via one of the vectorizer internal store
3241 functions where we may be able to access pieces from or we can
3242 combine to a larger entity. */
3243 else if (known_eq (ref
->size
, maxsize
)
3244 && is_gimple_reg_type (vr
->type
)
3245 && !reverse_storage_order_for_component_p (vr
->operands
)
3246 && !contains_storage_order_barrier_p (vr
->operands
)
3247 && is_gimple_call (def_stmt
)
3248 && gimple_call_internal_p (def_stmt
)
3249 && internal_store_fn_p (gimple_call_internal_fn (def_stmt
)))
3251 gcall
*call
= as_a
<gcall
*> (def_stmt
);
3252 internal_fn fn
= gimple_call_internal_fn (call
);
3254 tree mask
= NULL_TREE
, len
= NULL_TREE
, bias
= NULL_TREE
;
3257 case IFN_MASK_STORE
:
3258 mask
= gimple_call_arg (call
, internal_fn_mask_index (fn
));
3259 mask
= vn_valueize (mask
);
3260 if (TREE_CODE (mask
) != VECTOR_CST
)
3264 len
= gimple_call_arg (call
, 2);
3265 bias
= gimple_call_arg (call
, 4);
3266 if (!tree_fits_uhwi_p (len
) || !tree_fits_shwi_p (bias
))
3272 tree def_rhs
= gimple_call_arg (call
,
3273 internal_fn_stored_value_index (fn
));
3274 def_rhs
= vn_valueize (def_rhs
);
3275 if (TREE_CODE (def_rhs
) != VECTOR_CST
)
3278 ao_ref_init_from_ptr_and_size (&lhs_ref
,
3279 vn_valueize (gimple_call_arg (call
, 0)),
3280 TYPE_SIZE_UNIT (TREE_TYPE (def_rhs
)));
3282 poly_int64 offset2
, size2
, maxsize2
;
3283 HOST_WIDE_INT offset2i
, size2i
, offseti
;
3284 base2
= ao_ref_base (&lhs_ref
);
3285 offset2
= lhs_ref
.offset
;
3286 size2
= lhs_ref
.size
;
3287 maxsize2
= lhs_ref
.max_size
;
3288 if (known_size_p (maxsize2
)
3289 && known_eq (maxsize2
, size2
)
3290 && adjust_offsets_for_equal_base_address (base
, &offset
,
3292 && maxsize
.is_constant (&maxsizei
)
3293 && offset
.is_constant (&offseti
)
3294 && offset2
.is_constant (&offset2i
)
3295 && size2
.is_constant (&size2i
))
3297 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
, size2
))
3298 /* Poor-mans disambiguation. */
3300 else if (ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3304 tree aa
= gimple_call_arg (call
, 1);
3305 alias_set_type set
= get_deref_alias_set (TREE_TYPE (aa
));
3306 tree vectype
= TREE_TYPE (def_rhs
);
3307 unsigned HOST_WIDE_INT elsz
3308 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype
)));
3311 HOST_WIDE_INT start
= 0, len
= 0;
3312 unsigned mask_idx
= 0;
3315 if (integer_zerop (VECTOR_CST_ELT (mask
, mask_idx
)))
3320 pd
.offset
= offset2i
+ start
;
3322 if (ranges_known_overlap_p
3323 (offset
, maxsize
, pd
.offset
, pd
.size
))
3325 void *res
= data
->push_partial_def
3326 (pd
, set
, set
, offseti
, maxsizei
);
3331 start
= (mask_idx
+ 1) * elsz
;
3338 while (known_lt (mask_idx
, TYPE_VECTOR_SUBPARTS (vectype
)));
3342 pd
.offset
= offset2i
+ start
;
3344 if (ranges_known_overlap_p (offset
, maxsize
,
3345 pd
.offset
, pd
.size
))
3346 return data
->push_partial_def (pd
, set
, set
,
3350 else if (fn
== IFN_LEN_STORE
)
3353 pd
.offset
= offset2i
;
3354 pd
.size
= (tree_to_uhwi (len
)
3355 + -tree_to_shwi (bias
)) * BITS_PER_UNIT
;
3356 if (ranges_known_overlap_p (offset
, maxsize
,
3357 pd
.offset
, pd
.size
))
3358 return data
->push_partial_def (pd
, set
, set
,
3368 /* 5) For aggregate copies translate the reference through them if
3369 the copy kills ref. */
3370 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3371 && gimple_assign_single_p (def_stmt
)
3372 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
3373 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
3374 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
3378 auto_vec
<vn_reference_op_s
> rhs
;
3379 vn_reference_op_t vro
;
3382 gcc_assert (lhs_ref_ok
);
3384 /* See if the assignment kills REF. */
3385 base2
= ao_ref_base (&lhs_ref
);
3386 if (!lhs_ref
.max_size_known_p ()
3388 && (TREE_CODE (base
) != MEM_REF
3389 || TREE_CODE (base2
) != MEM_REF
3390 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3391 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3392 TREE_OPERAND (base2
, 1))))
3393 || !stmt_kills_ref_p (def_stmt
, ref
))
3396 /* Find the common base of ref and the lhs. lhs_ops already
3397 contains valueized operands for the lhs. */
3398 i
= vr
->operands
.length () - 1;
3399 j
= lhs_ops
.length () - 1;
3400 while (j
>= 0 && i
>= 0
3401 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3407 /* ??? The innermost op should always be a MEM_REF and we already
3408 checked that the assignment to the lhs kills vr. Thus for
3409 aggregate copies using char[] types the vn_reference_op_eq
3410 may fail when comparing types for compatibility. But we really
3411 don't care here - further lookups with the rewritten operands
3412 will simply fail if we messed up types too badly. */
3413 poly_int64 extra_off
= 0;
3414 if (j
== 0 && i
>= 0
3415 && lhs_ops
[0].opcode
== MEM_REF
3416 && maybe_ne (lhs_ops
[0].off
, -1))
3418 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3420 else if (vr
->operands
[i
].opcode
== MEM_REF
3421 && maybe_ne (vr
->operands
[i
].off
, -1))
3423 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3428 /* i now points to the first additional op.
3429 ??? LHS may not be completely contained in VR, one or more
3430 VIEW_CONVERT_EXPRs could be in its way. We could at least
3431 try handling outermost VIEW_CONVERT_EXPRs. */
3435 /* Punt if the additional ops contain a storage order barrier. */
3436 for (k
= i
; k
>= 0; k
--)
3438 vro
= &vr
->operands
[k
];
3439 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3443 /* Now re-write REF to be based on the rhs of the assignment. */
3444 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3445 copy_reference_ops_from_ref (rhs1
, &rhs
);
3447 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3448 bool force_no_tbaa
= false;
3449 if (maybe_ne (extra_off
, 0))
3451 if (rhs
.length () < 2)
3453 int ix
= rhs
.length () - 2;
3454 if (rhs
[ix
].opcode
!= MEM_REF
3455 || known_eq (rhs
[ix
].off
, -1))
3457 rhs
[ix
].off
+= extra_off
;
3458 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3459 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3461 /* When we have offsetted the RHS, reading only parts of it,
3462 we can no longer use the original TBAA type, force alias-set
3464 force_no_tbaa
= true;
3467 /* Save the operands since we need to use the original ones for
3468 the hash entry we use. */
3469 if (!data
->saved_operands
.exists ())
3470 data
->saved_operands
= vr
->operands
.copy ();
3472 /* We need to pre-pend vr->operands[0..i] to rhs. */
3473 vec
<vn_reference_op_s
> old
= vr
->operands
;
3474 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3475 vr
->operands
.safe_grow (i
+ 1 + rhs
.length (), true);
3477 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3478 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3479 vr
->operands
[i
+ 1 + j
] = *vro
;
3480 valueize_refs (&vr
->operands
);
3481 if (old
== shared_lookup_references
)
3482 shared_lookup_references
= vr
->operands
;
3483 vr
->hashcode
= vn_reference_compute_hash (vr
);
3485 /* Try folding the new reference to a constant. */
3486 tree val
= fully_constant_vn_reference_p (vr
);
3489 if (data
->partial_defs
.is_empty ())
3490 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3491 ao_ref_base_alias_set (&lhs_ref
), val
);
3492 /* This is the only interesting case for partial-def handling
3493 coming from targets that like to gimplify init-ctors as
3494 aggregate copies from constant data like aarch64 for
3496 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3503 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3504 ao_ref_base_alias_set (&lhs_ref
),
3509 /* Continuing with partial defs isn't easily possible here, we
3510 have to find a full def from further lookups from here. Probably
3511 not worth the special-casing everywhere. */
3512 if (!data
->partial_defs
.is_empty ())
3515 /* Adjust *ref from the new operands. */
3517 ao_ref_init (&rhs1_ref
, rhs1
);
3518 if (!ao_ref_init_from_vn_reference (&r
,
3520 : ao_ref_alias_set (&rhs1_ref
),
3522 : ao_ref_base_alias_set (&rhs1_ref
),
3523 vr
->type
, vr
->operands
))
3525 /* This can happen with bitfields. */
3526 if (maybe_ne (ref
->size
, r
.size
))
3528 /* If the access lacks some subsetting simply apply that by
3529 shortening it. That in the end can only be successful
3530 if we can pun the lookup result which in turn requires
3532 if (known_eq (r
.size
, r
.max_size
)
3533 && known_lt (ref
->size
, r
.size
))
3534 r
.size
= r
.max_size
= ref
->size
;
3540 /* Do not update last seen VUSE after translating. */
3541 data
->last_vuse_ptr
= NULL
;
3542 /* Invalidate the original access path since it now contains
3544 data
->orig_ref
.ref
= NULL_TREE
;
3545 /* Use the alias-set of this LHS for recording an eventual result. */
3546 if (data
->first_set
== -2)
3548 data
->first_set
= ao_ref_alias_set (&lhs_ref
);
3549 data
->first_base_set
= ao_ref_base_alias_set (&lhs_ref
);
3552 /* Keep looking for the adjusted *REF / VR pair. */
3556 /* 6) For memcpy copies translate the reference through them if the copy
3557 kills ref. But we cannot (easily) do this translation if the memcpy is
3558 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3559 can modify the storage order of objects (see storage_order_barrier_p). */
3560 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3561 && is_gimple_reg_type (vr
->type
)
3562 /* ??? Handle BCOPY as well. */
3563 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3564 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY_CHK
)
3565 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3566 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY_CHK
)
3567 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
)
3568 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE_CHK
))
3569 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3570 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3571 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3572 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3573 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3574 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
3575 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)),
3577 /* Handling this is more complicated, give up for now. */
3578 && data
->partial_defs
.is_empty ())
3582 poly_int64 rhs_offset
, lhs_offset
;
3583 vn_reference_op_s op
;
3584 poly_uint64 mem_offset
;
3585 poly_int64 at
, byte_maxsize
;
3587 /* Only handle non-variable, addressable refs. */
3588 if (maybe_ne (ref
->size
, maxsize
)
3589 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3590 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3593 /* Extract a pointer base and an offset for the destination. */
3594 lhs
= gimple_call_arg (def_stmt
, 0);
3596 if (TREE_CODE (lhs
) == SSA_NAME
)
3598 lhs
= vn_valueize (lhs
);
3599 if (TREE_CODE (lhs
) == SSA_NAME
)
3601 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3602 if (gimple_assign_single_p (def_stmt
)
3603 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3604 lhs
= gimple_assign_rhs1 (def_stmt
);
3607 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3609 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
)))
3610 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs
))))
3612 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3616 if (TREE_CODE (tem
) == MEM_REF
3617 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3619 lhs
= TREE_OPERAND (tem
, 0);
3620 if (TREE_CODE (lhs
) == SSA_NAME
)
3621 lhs
= vn_valueize (lhs
);
3622 lhs_offset
+= mem_offset
;
3624 else if (DECL_P (tem
))
3625 lhs
= build_fold_addr_expr (tem
);
3629 if (TREE_CODE (lhs
) != SSA_NAME
3630 && TREE_CODE (lhs
) != ADDR_EXPR
)
3633 /* Extract a pointer base and an offset for the source. */
3634 rhs
= gimple_call_arg (def_stmt
, 1);
3636 if (TREE_CODE (rhs
) == SSA_NAME
)
3637 rhs
= vn_valueize (rhs
);
3638 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3640 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs
)))
3641 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs
))))
3643 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3647 if (TREE_CODE (tem
) == MEM_REF
3648 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3650 rhs
= TREE_OPERAND (tem
, 0);
3651 rhs_offset
+= mem_offset
;
3653 else if (DECL_P (tem
)
3654 || TREE_CODE (tem
) == STRING_CST
)
3655 rhs
= build_fold_addr_expr (tem
);
3659 if (TREE_CODE (rhs
) == SSA_NAME
)
3660 rhs
= SSA_VAL (rhs
);
3661 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3664 /* The bases of the destination and the references have to agree. */
3665 if (TREE_CODE (base
) == MEM_REF
)
3667 if (TREE_OPERAND (base
, 0) != lhs
3668 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3672 else if (!DECL_P (base
)
3673 || TREE_CODE (lhs
) != ADDR_EXPR
3674 || TREE_OPERAND (lhs
, 0) != base
)
3677 /* If the access is completely outside of the memcpy destination
3678 area there is no aliasing. */
3679 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3681 /* And the access has to be contained within the memcpy destination. */
3682 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3685 /* Save the operands since we need to use the original ones for
3686 the hash entry we use. */
3687 if (!data
->saved_operands
.exists ())
3688 data
->saved_operands
= vr
->operands
.copy ();
3690 /* Make room for 2 operands in the new reference. */
3691 if (vr
->operands
.length () < 2)
3693 vec
<vn_reference_op_s
> old
= vr
->operands
;
3694 vr
->operands
.safe_grow_cleared (2, true);
3695 if (old
== shared_lookup_references
)
3696 shared_lookup_references
= vr
->operands
;
3699 vr
->operands
.truncate (2);
3701 /* The looked-through reference is a simple MEM_REF. */
3702 memset (&op
, 0, sizeof (op
));
3704 op
.opcode
= MEM_REF
;
3705 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3706 op
.off
= at
- lhs_offset
+ rhs_offset
;
3707 vr
->operands
[0] = op
;
3708 op
.type
= TREE_TYPE (rhs
);
3709 op
.opcode
= TREE_CODE (rhs
);
3712 vr
->operands
[1] = op
;
3713 vr
->hashcode
= vn_reference_compute_hash (vr
);
3715 /* Try folding the new reference to a constant. */
3716 tree val
= fully_constant_vn_reference_p (vr
);
3718 return data
->finish (0, 0, val
);
3720 /* Adjust *ref from the new operands. */
3721 if (!ao_ref_init_from_vn_reference (&r
, 0, 0, vr
->type
, vr
->operands
))
3723 /* This can happen with bitfields. */
3724 if (maybe_ne (ref
->size
, r
.size
))
3728 /* Do not update last seen VUSE after translating. */
3729 data
->last_vuse_ptr
= NULL
;
3730 /* Invalidate the original access path since it now contains
3732 data
->orig_ref
.ref
= NULL_TREE
;
3733 /* Use the alias-set of this stmt for recording an eventual result. */
3734 if (data
->first_set
== -2)
3736 data
->first_set
= 0;
3737 data
->first_base_set
= 0;
3740 /* Keep looking for the adjusted *REF / VR pair. */
3744 /* Bail out and stop walking. */
3748 /* Return a reference op vector from OP that can be used for
3749 vn_reference_lookup_pieces. The caller is responsible for releasing
3752 vec
<vn_reference_op_s
>
3753 vn_reference_operands_for_lookup (tree op
)
3756 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3759 /* Lookup a reference operation by it's parts, in the current hash table.
3760 Returns the resulting value number if it exists in the hash table,
3761 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3762 vn_reference_t stored in the hashtable if something is found. */
3765 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
,
3766 alias_set_type base_set
, tree type
,
3767 vec
<vn_reference_op_s
> operands
,
3768 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3770 struct vn_reference_s vr1
;
3778 vr1
.vuse
= vuse_ssa_val (vuse
);
3779 shared_lookup_references
.truncate (0);
3780 shared_lookup_references
.safe_grow (operands
.length (), true);
3781 memcpy (shared_lookup_references
.address (),
3782 operands
.address (),
3783 sizeof (vn_reference_op_s
)
3784 * operands
.length ());
3786 valueize_refs_1 (&shared_lookup_references
, &valueized_p
);
3787 vr1
.operands
= shared_lookup_references
;
3790 vr1
.base_set
= base_set
;
3791 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3792 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3795 vn_reference_lookup_1 (&vr1
, vnresult
);
3797 && kind
!= VN_NOWALK
3801 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3802 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true, NULL_TREE
,
3804 vec
<vn_reference_op_s
> ops_for_ref
;
3806 ops_for_ref
= vr1
.operands
;
3809 /* For ao_ref_from_mem we have to ensure only available SSA names
3810 end up in base and the only convenient way to make this work
3811 for PRE is to re-valueize with that in mind. */
3812 ops_for_ref
.create (operands
.length ());
3813 ops_for_ref
.quick_grow (operands
.length ());
3814 memcpy (ops_for_ref
.address (),
3815 operands
.address (),
3816 sizeof (vn_reference_op_s
)
3817 * operands
.length ());
3818 valueize_refs_1 (&ops_for_ref
, &valueized_p
, true);
3820 if (ao_ref_init_from_vn_reference (&r
, set
, base_set
, type
,
3824 walk_non_aliased_vuses (&r
, vr1
.vuse
, true, vn_reference_lookup_2
,
3825 vn_reference_lookup_3
, vuse_valueize
,
3827 if (ops_for_ref
!= shared_lookup_references
)
3828 ops_for_ref
.release ();
3829 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3832 && (!(*vnresult
)->result
3833 || !operand_equal_p ((*vnresult
)->result
, data
.same_val
)))
3841 return (*vnresult
)->result
;
3846 /* Lookup OP in the current hash table, and return the resulting value
3847 number if it exists in the hash table. Return NULL_TREE if it does
3848 not exist in the hash table or if the result field of the structure
3849 was NULL.. VNRESULT will be filled in with the vn_reference_t
3850 stored in the hashtable if one exists. When TBAA_P is false assume
3851 we are looking up a store and treat it as having alias-set zero.
3852 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3853 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3854 load is bitwise anded with MASK and so we are only interested in a subset
3855 of the bits and can ignore if the other bits are uninitialized or
3856 not initialized with constants. When doing redundant store removal
3857 the caller has to set REDUNDANT_STORE_REMOVAL_P. */
3860 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3861 vn_reference_t
*vnresult
, bool tbaa_p
,
3862 tree
*last_vuse_ptr
, tree mask
,
3863 bool redundant_store_removal_p
)
3865 vec
<vn_reference_op_s
> operands
;
3866 struct vn_reference_s vr1
;
3867 bool valueized_anything
;
3872 vr1
.vuse
= vuse_ssa_val (vuse
);
3873 vr1
.operands
= operands
3874 = valueize_shared_reference_ops_from_ref (op
, &valueized_anything
);
3876 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing
3877 this before the pass folding __builtin_object_size had a chance to run. */
3878 if ((cfun
->curr_properties
& PROP_objsz
)
3879 && operands
[0].opcode
== ADDR_EXPR
3880 && operands
.last ().opcode
== SSA_NAME
)
3883 vn_reference_op_t vro
;
3885 for (i
= 1; operands
.iterate (i
, &vro
); ++i
)
3887 if (vro
->opcode
== SSA_NAME
)
3889 else if (known_eq (vro
->off
, -1))
3893 if (i
== operands
.length () - 1
3894 /* Make sure we the offset we accumulated in a 64bit int
3895 fits the address computation carried out in target
3896 offset precision. */
3898 == sext_hwi (off
.coeffs
[0], TYPE_PRECISION (sizetype
))))
3900 gcc_assert (operands
[i
-1].opcode
== MEM_REF
);
3902 ops
[0] = operands
[i
].op0
;
3903 ops
[1] = wide_int_to_tree (sizetype
, off
);
3904 tree res
= vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR
,
3905 TREE_TYPE (op
), ops
, NULL
);
3912 vr1
.type
= TREE_TYPE (op
);
3914 ao_ref_init (&op_ref
, op
);
3915 vr1
.set
= ao_ref_alias_set (&op_ref
);
3916 vr1
.base_set
= ao_ref_base_alias_set (&op_ref
);
3917 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3918 if (mask
== NULL_TREE
)
3919 if (tree cst
= fully_constant_vn_reference_p (&vr1
))
3922 if (kind
!= VN_NOWALK
&& vr1
.vuse
)
3924 vn_reference_t wvnresult
;
3926 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3927 auto_vec
<vn_reference_op_s
> ops_for_ref
;
3928 if (valueized_anything
)
3930 copy_reference_ops_from_ref (op
, &ops_for_ref
);
3932 valueize_refs_1 (&ops_for_ref
, &tem
, true);
3934 /* Make sure to use a valueized reference if we valueized anything.
3935 Otherwise preserve the full reference for advanced TBAA. */
3936 if (!valueized_anything
3937 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.base_set
,
3938 vr1
.type
, ops_for_ref
))
3939 ao_ref_init (&r
, op
);
3940 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
3941 last_vuse_ptr
, kind
, tbaa_p
, mask
,
3942 redundant_store_removal_p
);
3946 walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
, vn_reference_lookup_2
,
3947 vn_reference_lookup_3
, vuse_valueize
, limit
,
3949 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3952 gcc_assert (mask
== NULL_TREE
);
3954 && (!wvnresult
->result
3955 || !operand_equal_p (wvnresult
->result
, data
.same_val
)))
3958 *vnresult
= wvnresult
;
3959 return wvnresult
->result
;
3962 return data
.masked_result
;
3968 *last_vuse_ptr
= vr1
.vuse
;
3971 return vn_reference_lookup_1 (&vr1
, vnresult
);
3974 /* Lookup CALL in the current hash table and return the entry in
3975 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3978 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
3984 tree vuse
= gimple_vuse (call
);
3986 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
3987 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
3988 tree lhs
= gimple_call_lhs (call
);
3989 /* For non-SSA return values the referece ops contain the LHS. */
3990 vr
->type
= ((lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3991 ? TREE_TYPE (lhs
) : NULL_TREE
);
3995 vr
->hashcode
= vn_reference_compute_hash (vr
);
3996 vn_reference_lookup_1 (vr
, vnresult
);
3999 /* Insert OP into the current hash table with a value number of RESULT. */
4002 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
4004 vn_reference_s
**slot
;
4008 vec
<vn_reference_op_s
> operands
4009 = valueize_shared_reference_ops_from_ref (op
, &tem
);
4010 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this
4011 before the pass folding __builtin_object_size had a chance to run. */
4012 if ((cfun
->curr_properties
& PROP_objsz
)
4013 && operands
[0].opcode
== ADDR_EXPR
4014 && operands
.last ().opcode
== SSA_NAME
)
4017 vn_reference_op_t vro
;
4019 for (i
= 1; operands
.iterate (i
, &vro
); ++i
)
4021 if (vro
->opcode
== SSA_NAME
)
4023 else if (known_eq (vro
->off
, -1))
4027 if (i
== operands
.length () - 1
4028 /* Make sure we the offset we accumulated in a 64bit int
4029 fits the address computation carried out in target
4030 offset precision. */
4032 == sext_hwi (off
.coeffs
[0], TYPE_PRECISION (sizetype
))))
4034 gcc_assert (operands
[i
-1].opcode
== MEM_REF
);
4036 ops
[0] = operands
[i
].op0
;
4037 ops
[1] = wide_int_to_tree (sizetype
, off
);
4038 vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR
,
4039 TREE_TYPE (op
), ops
, result
,
4040 VN_INFO (result
)->value_id
);
4045 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4046 if (TREE_CODE (result
) == SSA_NAME
)
4047 vr1
->value_id
= VN_INFO (result
)->value_id
;
4049 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
4050 vr1
->vuse
= vuse_ssa_val (vuse
);
4051 vr1
->operands
= operands
.copy ();
4052 vr1
->type
= TREE_TYPE (op
);
4053 vr1
->punned
= false;
4055 ao_ref_init (&op_ref
, op
);
4056 vr1
->set
= ao_ref_alias_set (&op_ref
);
4057 vr1
->base_set
= ao_ref_base_alias_set (&op_ref
);
4058 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
4059 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
4060 vr1
->result_vdef
= vdef
;
4062 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
4065 /* Because IL walking on reference lookup can end up visiting
4066 a def that is only to be visited later in iteration order
4067 when we are about to make an irreducible region reducible
4068 the def can be effectively processed and its ref being inserted
4069 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
4070 but save a lookup if we deal with already inserted refs here. */
4073 /* We cannot assert that we have the same value either because
4074 when disentangling an irreducible region we may end up visiting
4075 a use before the corresponding def. That's a missed optimization
4076 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
4077 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4078 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
4080 fprintf (dump_file
, "Keeping old value ");
4081 print_generic_expr (dump_file
, (*slot
)->result
);
4082 fprintf (dump_file
, " because of collision\n");
4084 free_reference (vr1
);
4085 obstack_free (&vn_tables_obstack
, vr1
);
4090 vr1
->next
= last_inserted_ref
;
4091 last_inserted_ref
= vr1
;
4094 /* Insert a reference by it's pieces into the current hash table with
4095 a value number of RESULT. Return the resulting reference
4096 structure we created. */
4099 vn_reference_insert_pieces (tree vuse
, alias_set_type set
,
4100 alias_set_type base_set
, tree type
,
4101 vec
<vn_reference_op_s
> operands
,
4102 tree result
, unsigned int value_id
)
4105 vn_reference_s
**slot
;
4108 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4109 vr1
->value_id
= value_id
;
4110 vr1
->vuse
= vuse_ssa_val (vuse
);
4111 vr1
->operands
= operands
;
4112 valueize_refs (&vr1
->operands
);
4114 vr1
->punned
= false;
4116 vr1
->base_set
= base_set
;
4117 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
4118 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4119 result
= SSA_VAL (result
);
4120 vr1
->result
= result
;
4121 vr1
->result_vdef
= NULL_TREE
;
4123 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
4126 /* At this point we should have all the things inserted that we have
4127 seen before, and we should never try inserting something that
4129 gcc_assert (!*slot
);
4132 vr1
->next
= last_inserted_ref
;
4133 last_inserted_ref
= vr1
;
4137 /* Compute and return the hash value for nary operation VBO1. */
4140 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
4142 inchash::hash hstate
;
4145 if (((vno1
->length
== 2
4146 && commutative_tree_code (vno1
->opcode
))
4147 || (vno1
->length
== 3
4148 && commutative_ternary_tree_code (vno1
->opcode
)))
4149 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
4150 std::swap (vno1
->op
[0], vno1
->op
[1]);
4151 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
4152 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
4154 std::swap (vno1
->op
[0], vno1
->op
[1]);
4155 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
4158 hstate
.add_int (vno1
->opcode
);
4159 for (i
= 0; i
< vno1
->length
; ++i
)
4160 inchash::add_expr (vno1
->op
[i
], hstate
);
4162 return hstate
.end ();
4165 /* Compare nary operations VNO1 and VNO2 and return true if they are
4169 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
4173 if (vno1
->hashcode
!= vno2
->hashcode
)
4176 if (vno1
->length
!= vno2
->length
)
4179 if (vno1
->opcode
!= vno2
->opcode
4180 || !types_compatible_p (vno1
->type
, vno2
->type
))
4183 for (i
= 0; i
< vno1
->length
; ++i
)
4184 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
4187 /* BIT_INSERT_EXPR has an implict operand as the type precision
4188 of op1. Need to check to make sure they are the same. */
4189 if (vno1
->opcode
== BIT_INSERT_EXPR
4190 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
4191 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
4192 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
4198 /* Initialize VNO from the pieces provided. */
4201 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
4202 enum tree_code code
, tree type
, tree
*ops
)
4205 vno
->length
= length
;
4207 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
4210 /* Return the number of operands for a vn_nary ops structure from STMT. */
4213 vn_nary_length_from_stmt (gimple
*stmt
)
4215 switch (gimple_assign_rhs_code (stmt
))
4219 case VIEW_CONVERT_EXPR
:
4226 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
4229 return gimple_num_ops (stmt
) - 1;
4233 /* Initialize VNO from STMT. */
4236 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gassign
*stmt
)
4240 vno
->opcode
= gimple_assign_rhs_code (stmt
);
4241 vno
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
4242 switch (vno
->opcode
)
4246 case VIEW_CONVERT_EXPR
:
4248 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
4253 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
4254 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
4255 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
4259 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
4260 for (i
= 0; i
< vno
->length
; ++i
)
4261 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
4265 gcc_checking_assert (!gimple_assign_single_p (stmt
));
4266 vno
->length
= gimple_num_ops (stmt
) - 1;
4267 for (i
= 0; i
< vno
->length
; ++i
)
4268 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
4272 /* Compute the hashcode for VNO and look for it in the hash table;
4273 return the resulting value number if it exists in the hash table.
4274 Return NULL_TREE if it does not exist in the hash table or if the
4275 result field of the operation is NULL. VNRESULT will contain the
4276 vn_nary_op_t from the hashtable if it exists. */
4279 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
4281 vn_nary_op_s
**slot
;
4286 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4287 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4288 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4290 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4291 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
4296 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
4299 /* Lookup a n-ary operation by its pieces and return the resulting value
4300 number if it exists in the hash table. Return NULL_TREE if it does
4301 not exist in the hash table or if the result field of the operation
4302 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4306 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
4307 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
4309 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
4310 sizeof_vn_nary_op (length
));
4311 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4312 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4315 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4316 value number if it exists in the hash table. Return NULL_TREE if
4317 it does not exist in the hash table. VNRESULT will contain the
4318 vn_nary_op_t from the hashtable if it exists. */
4321 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
4324 = XALLOCAVAR (struct vn_nary_op_s
,
4325 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
4326 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4327 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4330 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4333 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
4335 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
4338 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4342 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
4344 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
4346 vno1
->value_id
= value_id
;
4347 vno1
->length
= length
;
4348 vno1
->predicated_values
= 0;
4349 vno1
->u
.result
= result
;
4354 /* Insert VNO into TABLE. */
4357 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
)
4359 vn_nary_op_s
**slot
;
4361 gcc_assert (! vno
->predicated_values
4362 || (! vno
->u
.values
->next
4363 && vno
->u
.values
->n
== 1));
4365 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4366 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4367 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4369 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4370 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
4371 vno
->unwind_to
= *slot
;
4374 /* Prefer non-predicated values.
4375 ??? Only if those are constant, otherwise, with constant predicated
4376 value, turn them into predicated values with entry-block validity
4377 (??? but we always find the first valid result currently). */
4378 if ((*slot
)->predicated_values
4379 && ! vno
->predicated_values
)
4381 /* ??? We cannot remove *slot from the unwind stack list.
4382 For the moment we deal with this by skipping not found
4383 entries but this isn't ideal ... */
4385 /* ??? Maintain a stack of states we can unwind in
4386 vn_nary_op_s? But how far do we unwind? In reality
4387 we need to push change records somewhere... Or not
4388 unwind vn_nary_op_s and linking them but instead
4389 unwind the results "list", linking that, which also
4390 doesn't move on hashtable resize. */
4391 /* We can also have a ->unwind_to recording *slot there.
4392 That way we can make u.values a fixed size array with
4393 recording the number of entries but of course we then
4394 have always N copies for each unwind_to-state. Or we
4395 make sure to only ever append and each unwinding will
4396 pop off one entry (but how to deal with predicated
4397 replaced with non-predicated here?) */
4398 vno
->next
= last_inserted_nary
;
4399 last_inserted_nary
= vno
;
4402 else if (vno
->predicated_values
4403 && ! (*slot
)->predicated_values
)
4405 else if (vno
->predicated_values
4406 && (*slot
)->predicated_values
)
4408 /* ??? Factor this all into a insert_single_predicated_value
4410 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
4412 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
4413 vn_pval
*nval
= vno
->u
.values
;
4414 vn_pval
**next
= &vno
->u
.values
;
4416 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
4418 if (expressions_equal_p (val
->result
, nval
->result
))
4421 for (unsigned i
= 0; i
< val
->n
; ++i
)
4424 = BASIC_BLOCK_FOR_FN (cfun
,
4425 val
->valid_dominated_by_p
[i
]);
4426 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
4427 /* Value registered with more generic predicate. */
4429 else if (flag_checking
)
4430 /* Shouldn't happen, we insert in RPO order. */
4431 gcc_assert (!dominated_by_p (CDI_DOMINATORS
,
4435 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4437 + val
->n
* sizeof (int));
4438 (*next
)->next
= NULL
;
4439 (*next
)->result
= val
->result
;
4440 (*next
)->n
= val
->n
+ 1;
4441 memcpy ((*next
)->valid_dominated_by_p
,
4442 val
->valid_dominated_by_p
,
4443 val
->n
* sizeof (int));
4444 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
4445 next
= &(*next
)->next
;
4446 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4447 fprintf (dump_file
, "Appending predicate to value.\n");
4450 /* Copy other predicated values. */
4451 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4453 + (val
->n
-1) * sizeof (int));
4454 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
4455 (*next
)->next
= NULL
;
4456 next
= &(*next
)->next
;
4462 vno
->next
= last_inserted_nary
;
4463 last_inserted_nary
= vno
;
4467 /* While we do not want to insert things twice it's awkward to
4468 avoid it in the case where visit_nary_op pattern-matches stuff
4469 and ends up simplifying the replacement to itself. We then
4470 get two inserts, one from visit_nary_op and one from
4471 vn_nary_build_or_lookup.
4472 So allow inserts with the same value number. */
4473 if ((*slot
)->u
.result
== vno
->u
.result
)
4477 /* ??? There's also optimistic vs. previous commited state merging
4478 that is problematic for the case of unwinding. */
4480 /* ??? We should return NULL if we do not use 'vno' and have the
4481 caller release it. */
4482 gcc_assert (!*slot
);
4485 vno
->next
= last_inserted_nary
;
4486 last_inserted_nary
= vno
;
4490 /* Insert a n-ary operation into the current hash table using it's
4491 pieces. Return the vn_nary_op_t structure we created and put in
4495 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
4496 tree type
, tree
*ops
,
4497 tree result
, unsigned int value_id
)
4499 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
4500 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4501 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4504 /* Return whether we can track a predicate valid when PRED_E is executed. */
4507 can_track_predicate_on_edge (edge pred_e
)
4509 /* ??? As we are currently recording a basic-block index in
4510 vn_pval.valid_dominated_by_p and using dominance for the
4511 validity check we cannot track predicates on all edges. */
4512 if (single_pred_p (pred_e
->dest
))
4514 /* Never record for backedges. */
4515 if (pred_e
->flags
& EDGE_DFS_BACK
)
4517 /* When there's more than one predecessor we cannot track
4518 predicate validity based on the destination block. The
4519 exception is when all other incoming edges are backedges. */
4523 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
4524 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4530 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
4531 tree type
, tree
*ops
,
4532 tree result
, unsigned int value_id
,
4535 if (!can_track_predicate_on_edge (pred_e
))
4537 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4538 /* ??? Fix dumping, but currently we only get comparisons. */
4539 && TREE_CODE_CLASS (code
) == tcc_comparison
)
4541 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
4542 pred_e
->dest
->index
);
4543 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4544 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4545 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4546 fprintf (dump_file
, " == %s\n",
4547 integer_zerop (result
) ? "false" : "true");
4549 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
4550 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4551 vno1
->predicated_values
= 1;
4552 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4554 vno1
->u
.values
->next
= NULL
;
4555 vno1
->u
.values
->result
= result
;
4556 vno1
->u
.values
->n
= 1;
4557 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
4558 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4562 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool);
4565 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
4567 if (! vno
->predicated_values
)
4568 return vno
->u
.result
;
4569 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
4570 for (unsigned i
= 0; i
< val
->n
; ++i
)
4571 /* Do not handle backedge executability optimistically since
4572 when figuring out whether to iterate we do not consider
4573 changed predication. */
4574 if (dominated_by_p_w_unex
4575 (bb
, BASIC_BLOCK_FOR_FN (cfun
, val
->valid_dominated_by_p
[i
]),
4581 /* Insert the rhs of STMT into the current hash table with a value number of
4585 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4588 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4589 result
, VN_INFO (result
)->value_id
);
4590 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4591 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4594 /* Compute a hashcode for PHI operation VP1 and return it. */
4596 static inline hashval_t
4597 vn_phi_compute_hash (vn_phi_t vp1
)
4599 inchash::hash hstate
;
4605 hstate
.add_int (EDGE_COUNT (vp1
->block
->preds
));
4606 switch (EDGE_COUNT (vp1
->block
->preds
))
4611 if (vp1
->block
->loop_father
->header
== vp1
->block
)
4617 hstate
.add_int (vp1
->block
->index
);
4620 /* If all PHI arguments are constants we need to distinguish
4621 the PHI node via its type. */
4623 hstate
.merge_hash (vn_hash_type (type
));
4625 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4627 /* Don't hash backedge values they need to be handled as VN_TOP
4628 for optimistic value-numbering. */
4629 if (e
->flags
& EDGE_DFS_BACK
)
4632 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4633 if (phi1op
== VN_TOP
)
4635 inchash::add_expr (phi1op
, hstate
);
4638 return hstate
.end ();
4642 /* Return true if COND1 and COND2 represent the same condition, set
4643 *INVERTED_P if one needs to be inverted to make it the same as
4647 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4648 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4650 enum tree_code code1
= gimple_cond_code (cond1
);
4651 enum tree_code code2
= gimple_cond_code (cond2
);
4653 *inverted_p
= false;
4656 else if (code1
== swap_tree_comparison (code2
))
4657 std::swap (lhs2
, rhs2
);
4658 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4660 else if (code1
== invert_tree_comparison
4661 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4663 std::swap (lhs2
, rhs2
);
4669 return ((expressions_equal_p (lhs1
, lhs2
)
4670 && expressions_equal_p (rhs1
, rhs2
))
4671 || (commutative_tree_code (code1
)
4672 && expressions_equal_p (lhs1
, rhs2
)
4673 && expressions_equal_p (rhs1
, lhs2
)));
4676 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4679 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4681 if (vp1
->hashcode
!= vp2
->hashcode
)
4684 if (vp1
->block
!= vp2
->block
)
4686 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4689 switch (EDGE_COUNT (vp1
->block
->preds
))
4692 /* Single-arg PHIs are just copies. */
4697 /* Rule out backedges into the PHI. */
4698 if (vp1
->block
->loop_father
->header
== vp1
->block
4699 || vp2
->block
->loop_father
->header
== vp2
->block
)
4702 /* If the PHI nodes do not have compatible types
4703 they are not the same. */
4704 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4708 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4710 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4711 /* If the immediate dominator end in switch stmts multiple
4712 values may end up in the same PHI arg via intermediate
4714 if (EDGE_COUNT (idom1
->succs
) != 2
4715 || EDGE_COUNT (idom2
->succs
) != 2)
4718 /* Verify the controlling stmt is the same. */
4719 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
4720 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
4721 if (! last1
|| ! last2
)
4724 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4725 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4729 /* Get at true/false controlled edges into the PHI. */
4730 edge te1
, te2
, fe1
, fe2
;
4731 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4733 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4737 /* Swap edges if the second condition is the inverted of the
4740 std::swap (te2
, fe2
);
4742 /* Since we do not know which edge will be executed we have
4743 to be careful when matching VN_TOP. Be conservative and
4744 only match VN_TOP == VN_TOP for now, we could allow
4745 VN_TOP on the not prevailing PHI though. See for example
4747 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4748 vp2
->phiargs
[te2
->dest_idx
], false)
4749 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4750 vp2
->phiargs
[fe2
->dest_idx
], false))
4761 /* If the PHI nodes do not have compatible types
4762 they are not the same. */
4763 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4766 /* Any phi in the same block will have it's arguments in the
4767 same edge order, because of how we store phi nodes. */
4768 unsigned nargs
= EDGE_COUNT (vp1
->block
->preds
);
4769 for (unsigned i
= 0; i
< nargs
; ++i
)
4771 tree phi1op
= vp1
->phiargs
[i
];
4772 tree phi2op
= vp2
->phiargs
[i
];
4773 if (phi1op
== phi2op
)
4775 if (!expressions_equal_p (phi1op
, phi2op
, false))
4782 /* Lookup PHI in the current hash table, and return the resulting
4783 value number if it exists in the hash table. Return NULL_TREE if
4784 it does not exist in the hash table. */
4787 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4790 struct vn_phi_s
*vp1
;
4794 vp1
= XALLOCAVAR (struct vn_phi_s
,
4795 sizeof (struct vn_phi_s
)
4796 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4798 /* Canonicalize the SSA_NAME's to their value number. */
4799 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4801 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4802 if (TREE_CODE (def
) == SSA_NAME
4803 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4805 if (ssa_undefined_value_p (def
, false))
4808 def
= SSA_VAL (def
);
4810 vp1
->phiargs
[e
->dest_idx
] = def
;
4812 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4813 vp1
->block
= gimple_bb (phi
);
4814 /* Extract values of the controlling condition. */
4815 vp1
->cclhs
= NULL_TREE
;
4816 vp1
->ccrhs
= NULL_TREE
;
4817 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4818 if (EDGE_COUNT (idom1
->succs
) == 2)
4819 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4821 /* ??? We want to use SSA_VAL here. But possibly not
4823 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4824 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4826 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4827 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4830 return (*slot
)->result
;
4833 /* Insert PHI into the current hash table with a value number of
4837 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4840 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4842 + ((gimple_phi_num_args (phi
) - 1)
4847 /* Canonicalize the SSA_NAME's to their value number. */
4848 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4850 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4851 if (TREE_CODE (def
) == SSA_NAME
4852 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4854 if (ssa_undefined_value_p (def
, false))
4857 def
= SSA_VAL (def
);
4859 vp1
->phiargs
[e
->dest_idx
] = def
;
4861 vp1
->value_id
= VN_INFO (result
)->value_id
;
4862 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4863 vp1
->block
= gimple_bb (phi
);
4864 /* Extract values of the controlling condition. */
4865 vp1
->cclhs
= NULL_TREE
;
4866 vp1
->ccrhs
= NULL_TREE
;
4867 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4868 if (EDGE_COUNT (idom1
->succs
) == 2)
4869 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4871 /* ??? We want to use SSA_VAL here. But possibly not
4873 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4874 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4876 vp1
->result
= result
;
4877 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4879 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4880 gcc_assert (!*slot
);
4883 vp1
->next
= last_inserted_phi
;
4884 last_inserted_phi
= vp1
;
4889 /* Return true if BB1 is dominated by BB2 taking into account edges
4890 that are not executable. When ALLOW_BACK is false consider not
4891 executable backedges as executable. */
4894 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool allow_back
)
4899 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4902 /* Before iterating we'd like to know if there exists a
4903 (executable) path from bb2 to bb1 at all, if not we can
4904 directly return false. For now simply iterate once. */
4906 /* Iterate to the single executable bb1 predecessor. */
4907 if (EDGE_COUNT (bb1
->preds
) > 1)
4910 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
4911 if ((e
->flags
& EDGE_EXECUTABLE
)
4912 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
4925 /* Re-do the dominance check with changed bb1. */
4926 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4931 /* Iterate to the single executable bb2 successor. */
4932 if (EDGE_COUNT (bb2
->succs
) > 1)
4935 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
4936 if ((e
->flags
& EDGE_EXECUTABLE
)
4937 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
4948 /* Verify the reached block is only reached through succe.
4949 If there is only one edge we can spare us the dominator
4950 check and iterate directly. */
4951 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
4953 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
4955 && ((e
->flags
& EDGE_EXECUTABLE
)
4956 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
))))
4966 /* Re-do the dominance check with changed bb2. */
4967 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4973 /* We could now iterate updating bb1 / bb2. */
4977 /* Set the value number of FROM to TO, return true if it has changed
4981 set_ssa_val_to (tree from
, tree to
)
4983 vn_ssa_aux_t from_info
= VN_INFO (from
);
4984 tree currval
= from_info
->valnum
; // SSA_VAL (from)
4985 poly_int64 toff
, coff
;
4986 bool curr_undefined
= false;
4987 bool curr_invariant
= false;
4989 /* The only thing we allow as value numbers are ssa_names
4990 and invariants. So assert that here. We don't allow VN_TOP
4991 as visiting a stmt should produce a value-number other than
4993 ??? Still VN_TOP can happen for unreachable code, so force
4994 it to varying in that case. Not all code is prepared to
4995 get VN_TOP on valueization. */
4998 /* ??? When iterating and visiting PHI <undef, backedge-value>
4999 for the first time we rightfully get VN_TOP and we need to
5000 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
5001 With SCCVN we were simply lucky we iterated the other PHI
5002 cycles first and thus visited the backedge-value DEF. */
5003 if (currval
== VN_TOP
)
5005 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5006 fprintf (dump_file
, "Forcing value number to varying on "
5007 "receiving VN_TOP\n");
5011 gcc_checking_assert (to
!= NULL_TREE
5012 && ((TREE_CODE (to
) == SSA_NAME
5013 && (to
== from
|| SSA_VAL (to
) == to
))
5014 || is_gimple_min_invariant (to
)));
5018 if (currval
== from
)
5020 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5022 fprintf (dump_file
, "Not changing value number of ");
5023 print_generic_expr (dump_file
, from
);
5024 fprintf (dump_file
, " from VARYING to ");
5025 print_generic_expr (dump_file
, to
);
5026 fprintf (dump_file
, "\n");
5030 curr_invariant
= is_gimple_min_invariant (currval
);
5031 curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
5032 && ssa_undefined_value_p (currval
, false));
5033 if (currval
!= VN_TOP
5036 && is_gimple_min_invariant (to
))
5038 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5040 fprintf (dump_file
, "Forcing VARYING instead of changing "
5041 "value number of ");
5042 print_generic_expr (dump_file
, from
);
5043 fprintf (dump_file
, " from ");
5044 print_generic_expr (dump_file
, currval
);
5045 fprintf (dump_file
, " (non-constant) to ");
5046 print_generic_expr (dump_file
, to
);
5047 fprintf (dump_file
, " (constant)\n");
5051 else if (currval
!= VN_TOP
5053 && TREE_CODE (to
) == SSA_NAME
5054 && ssa_undefined_value_p (to
, false))
5056 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5058 fprintf (dump_file
, "Forcing VARYING instead of changing "
5059 "value number of ");
5060 print_generic_expr (dump_file
, from
);
5061 fprintf (dump_file
, " from ");
5062 print_generic_expr (dump_file
, currval
);
5063 fprintf (dump_file
, " (non-undefined) to ");
5064 print_generic_expr (dump_file
, to
);
5065 fprintf (dump_file
, " (undefined)\n");
5069 else if (TREE_CODE (to
) == SSA_NAME
5070 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
5075 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5077 fprintf (dump_file
, "Setting value number of ");
5078 print_generic_expr (dump_file
, from
);
5079 fprintf (dump_file
, " to ");
5080 print_generic_expr (dump_file
, to
);
5084 && !operand_equal_p (currval
, to
, 0)
5085 /* Different undefined SSA names are not actually different. See
5086 PR82320 for a testcase were we'd otherwise not terminate iteration. */
5088 && TREE_CODE (to
) == SSA_NAME
5089 && ssa_undefined_value_p (to
, false))
5090 /* ??? For addresses involving volatile objects or types operand_equal_p
5091 does not reliably detect ADDR_EXPRs as equal. We know we are only
5092 getting invariant gimple addresses here, so can use
5093 get_addr_base_and_unit_offset to do this comparison. */
5094 && !(TREE_CODE (currval
) == ADDR_EXPR
5095 && TREE_CODE (to
) == ADDR_EXPR
5096 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
5097 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
5098 && known_eq (coff
, toff
)))
5101 && currval
!= VN_TOP
5103 /* We do not want to allow lattice transitions from one value
5104 to another since that may lead to not terminating iteration
5105 (see PR95049). Since there's no convenient way to check
5106 for the allowed transition of VAL -> PHI (loop entry value,
5107 same on two PHIs, to same PHI result) we restrict the check
5110 && is_gimple_min_invariant (to
))
5112 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5113 fprintf (dump_file
, " forced VARYING");
5116 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5117 fprintf (dump_file
, " (changed)\n");
5118 from_info
->valnum
= to
;
5121 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5122 fprintf (dump_file
, "\n");
5126 /* Set all definitions in STMT to value number to themselves.
5127 Return true if a value number changed. */
5130 defs_to_varying (gimple
*stmt
)
5132 bool changed
= false;
5136 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
5138 tree def
= DEF_FROM_PTR (defp
);
5139 changed
|= set_ssa_val_to (def
, def
);
5144 /* Visit a copy between LHS and RHS, return true if the value number
5148 visit_copy (tree lhs
, tree rhs
)
5151 rhs
= SSA_VAL (rhs
);
5153 return set_ssa_val_to (lhs
, rhs
);
5156 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
5160 valueized_wider_op (tree wide_type
, tree op
, bool allow_truncate
)
5162 if (TREE_CODE (op
) == SSA_NAME
)
5163 op
= vn_valueize (op
);
5165 /* Either we have the op widened available. */
5168 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
5169 wide_type
, ops
, NULL
);
5173 /* Or the op is truncated from some existing value. */
5174 if (allow_truncate
&& TREE_CODE (op
) == SSA_NAME
)
5176 gimple
*def
= SSA_NAME_DEF_STMT (op
);
5177 if (is_gimple_assign (def
)
5178 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
5180 tem
= gimple_assign_rhs1 (def
);
5181 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
5183 if (TREE_CODE (tem
) == SSA_NAME
)
5184 tem
= vn_valueize (tem
);
5190 /* For constants simply extend it. */
5191 if (TREE_CODE (op
) == INTEGER_CST
)
5192 return wide_int_to_tree (wide_type
, wi::to_widest (op
));
5197 /* Visit a nary operator RHS, value number it, and return true if the
5198 value number of LHS has changed as a result. */
5201 visit_nary_op (tree lhs
, gassign
*stmt
)
5203 vn_nary_op_t vnresult
;
5204 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
5205 if (! result
&& vnresult
)
5206 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
5208 return set_ssa_val_to (lhs
, result
);
5210 /* Do some special pattern matching for redundancies of operations
5211 in different types. */
5212 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5213 tree type
= TREE_TYPE (lhs
);
5214 tree rhs1
= gimple_assign_rhs1 (stmt
);
5218 /* Match arithmetic done in a different type where we can easily
5219 substitute the result from some earlier sign-changed or widened
5221 if (INTEGRAL_TYPE_P (type
)
5222 && TREE_CODE (rhs1
) == SSA_NAME
5223 /* We only handle sign-changes, zero-extension -> & mask or
5224 sign-extension if we know the inner operation doesn't
5226 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
5227 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5228 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
5229 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
5230 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
5232 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
5234 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
5235 || gimple_assign_rhs_code (def
) == MINUS_EXPR
5236 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
5239 /* When requiring a sign-extension we cannot model a
5240 previous truncation with a single op so don't bother. */
5241 bool allow_truncate
= TYPE_UNSIGNED (TREE_TYPE (rhs1
));
5242 /* Either we have the op widened available. */
5243 ops
[0] = valueized_wider_op (type
, gimple_assign_rhs1 (def
),
5246 ops
[1] = valueized_wider_op (type
, gimple_assign_rhs2 (def
),
5248 if (ops
[0] && ops
[1])
5250 ops
[0] = vn_nary_op_lookup_pieces
5251 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
5252 /* We have wider operation available. */
5254 /* If the leader is a wrapping operation we can
5255 insert it for code hoisting w/o introducing
5256 undefined overflow. If it is not it has to
5257 be available. See PR86554. */
5258 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
5259 || (rpo_avail
&& vn_context_bb
5260 && rpo_avail
->eliminate_avail (vn_context_bb
,
5263 unsigned lhs_prec
= TYPE_PRECISION (type
);
5264 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
5265 if (lhs_prec
== rhs_prec
5266 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5267 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
5269 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5270 NOP_EXPR
, type
, ops
[0]);
5271 result
= vn_nary_build_or_lookup (&match_op
);
5274 bool changed
= set_ssa_val_to (lhs
, result
);
5275 vn_nary_op_insert_stmt (stmt
, result
);
5281 tree mask
= wide_int_to_tree
5282 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
5283 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5287 result
= vn_nary_build_or_lookup (&match_op
);
5290 bool changed
= set_ssa_val_to (lhs
, result
);
5291 vn_nary_op_insert_stmt (stmt
, result
);
5301 if (INTEGRAL_TYPE_P (type
)
5302 && TREE_CODE (rhs1
) == SSA_NAME
5303 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
5304 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)
5305 && default_vn_walk_kind
!= VN_NOWALK
5307 && BITS_PER_UNIT
== 8
5308 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
5309 && !integer_all_onesp (gimple_assign_rhs2 (stmt
))
5310 && !integer_zerop (gimple_assign_rhs2 (stmt
)))
5312 gassign
*ass
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
5314 && !gimple_has_volatile_ops (ass
)
5315 && vn_get_stmt_kind (ass
) == VN_REFERENCE
)
5317 tree last_vuse
= gimple_vuse (ass
);
5318 tree op
= gimple_assign_rhs1 (ass
);
5319 tree result
= vn_reference_lookup (op
, gimple_vuse (ass
),
5320 default_vn_walk_kind
,
5321 NULL
, true, &last_vuse
,
5322 gimple_assign_rhs2 (stmt
));
5324 && useless_type_conversion_p (TREE_TYPE (result
),
5326 return set_ssa_val_to (lhs
, result
);
5330 case TRUNC_DIV_EXPR
:
5331 if (TYPE_UNSIGNED (type
))
5336 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5337 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type
))
5341 rhs
[1] = gimple_assign_rhs2 (stmt
);
5342 for (unsigned i
= 0; i
<= 1; ++i
)
5344 unsigned j
= i
== 0 ? 1 : 0;
5346 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5347 NEGATE_EXPR
, type
, rhs
[i
]);
5348 ops
[i
] = vn_nary_build_or_lookup_1 (&match_op
, false, true);
5351 && (ops
[0] = vn_nary_op_lookup_pieces (2, code
,
5354 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5355 NEGATE_EXPR
, type
, ops
[0]);
5356 result
= vn_nary_build_or_lookup_1 (&match_op
, true, false);
5359 bool changed
= set_ssa_val_to (lhs
, result
);
5360 vn_nary_op_insert_stmt (stmt
, result
);
5371 bool changed
= set_ssa_val_to (lhs
, lhs
);
5372 vn_nary_op_insert_stmt (stmt
, lhs
);
5376 /* Visit a call STMT storing into LHS. Return true if the value number
5377 of the LHS has changed as a result. */
5380 visit_reference_op_call (tree lhs
, gcall
*stmt
)
5382 bool changed
= false;
5383 struct vn_reference_s vr1
;
5384 vn_reference_t vnresult
= NULL
;
5385 tree vdef
= gimple_vdef (stmt
);
5386 modref_summary
*summary
;
5388 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5389 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5392 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
5394 /* If the lookup did not succeed for pure functions try to use
5395 modref info to find a candidate to CSE to. */
5396 const unsigned accesses_limit
= 8;
5400 && gimple_vuse (stmt
)
5401 && (((summary
= get_modref_function_summary (stmt
, NULL
))
5402 && !summary
->global_memory_read
5403 && summary
->load_accesses
< accesses_limit
)
5404 || gimple_call_flags (stmt
) & ECF_CONST
))
5406 /* First search if we can do someting useful and build a
5407 vector of all loads we have to check. */
5408 bool unknown_memory_access
= false;
5409 auto_vec
<ao_ref
, accesses_limit
> accesses
;
5410 unsigned load_accesses
= summary
? summary
->load_accesses
: 0;
5411 if (!unknown_memory_access
)
5412 /* Add loads done as part of setting up the call arguments.
5413 That's also necessary for CONST functions which will
5414 not have a modref summary. */
5415 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5417 tree arg
= gimple_call_arg (stmt
, i
);
5418 if (TREE_CODE (arg
) != SSA_NAME
5419 && !is_gimple_min_invariant (arg
))
5421 if (accesses
.length () >= accesses_limit
- load_accesses
)
5423 unknown_memory_access
= true;
5426 accesses
.quick_grow (accesses
.length () + 1);
5427 ao_ref_init (&accesses
.last (), arg
);
5430 if (summary
&& !unknown_memory_access
)
5432 /* Add loads as analyzed by IPA modref. */
5433 for (auto base_node
: summary
->loads
->bases
)
5434 if (unknown_memory_access
)
5436 else for (auto ref_node
: base_node
->refs
)
5437 if (unknown_memory_access
)
5439 else for (auto access_node
: ref_node
->accesses
)
5441 accesses
.quick_grow (accesses
.length () + 1);
5442 ao_ref
*r
= &accesses
.last ();
5443 if (!access_node
.get_ao_ref (stmt
, r
))
5445 /* Initialize a ref based on the argument and
5446 unknown offset if possible. */
5447 tree arg
= access_node
.get_call_arg (stmt
);
5448 if (arg
&& TREE_CODE (arg
) == SSA_NAME
)
5449 arg
= SSA_VAL (arg
);
5451 && TREE_CODE (arg
) == ADDR_EXPR
5452 && (arg
= get_base_address (arg
))
5455 ao_ref_init (r
, arg
);
5461 unknown_memory_access
= true;
5465 r
->base_alias_set
= base_node
->base
;
5466 r
->ref_alias_set
= ref_node
->ref
;
5470 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5471 for the call in the hashtable. */
5472 unsigned limit
= (unknown_memory_access
5474 : (param_sccvn_max_alias_queries_per_access
5475 / (accesses
.length () + 1)));
5476 tree saved_vuse
= vr1
.vuse
;
5477 hashval_t saved_hashcode
= vr1
.hashcode
;
5478 while (limit
> 0 && !vnresult
&& !SSA_NAME_IS_DEFAULT_DEF (vr1
.vuse
))
5480 vr1
.hashcode
= vr1
.hashcode
- SSA_NAME_VERSION (vr1
.vuse
);
5481 gimple
*def
= SSA_NAME_DEF_STMT (vr1
.vuse
);
5482 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5483 do not bother for now. */
5484 if (is_a
<gphi
*> (def
))
5486 vr1
.vuse
= vuse_ssa_val (gimple_vuse (def
));
5487 vr1
.hashcode
= vr1
.hashcode
+ SSA_NAME_VERSION (vr1
.vuse
);
5488 vn_reference_lookup_1 (&vr1
, &vnresult
);
5492 /* If we found a candidate to CSE to verify it is valid. */
5493 if (vnresult
&& !accesses
.is_empty ())
5495 tree vuse
= vuse_ssa_val (gimple_vuse (stmt
));
5496 while (vnresult
&& vuse
!= vr1
.vuse
)
5498 gimple
*def
= SSA_NAME_DEF_STMT (vuse
);
5499 for (auto &ref
: accesses
)
5501 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5502 analysis overhead that we might be able to cache. */
5503 if (stmt_may_clobber_ref_p_1 (def
, &ref
, true))
5509 vuse
= vuse_ssa_val (gimple_vuse (def
));
5512 vr1
.vuse
= saved_vuse
;
5513 vr1
.hashcode
= saved_hashcode
;
5520 if (vnresult
->result_vdef
)
5521 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
5522 else if (!lhs
&& gimple_call_lhs (stmt
))
5523 /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
5524 as the call still acts as a lhs store. */
5525 changed
|= set_ssa_val_to (vdef
, vdef
);
5527 /* If the call was discovered to be pure or const reflect
5528 that as far as possible. */
5529 changed
|= set_ssa_val_to (vdef
,
5530 vuse_ssa_val (gimple_vuse (stmt
)));
5533 if (!vnresult
->result
&& lhs
)
5534 vnresult
->result
= lhs
;
5536 if (vnresult
->result
&& lhs
)
5537 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
5542 vn_reference_s
**slot
;
5543 tree vdef_val
= vdef
;
5546 /* If we value numbered an indirect functions function to
5547 one not clobbering memory value number its VDEF to its
5549 tree fn
= gimple_call_fn (stmt
);
5550 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5553 if (TREE_CODE (fn
) == ADDR_EXPR
5554 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5555 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
5556 & (ECF_CONST
| ECF_PURE
))
5557 /* If stmt has non-SSA_NAME lhs, value number the
5558 vdef to itself, as the call still acts as a lhs
5560 && (lhs
|| gimple_call_lhs (stmt
) == NULL_TREE
))
5561 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
5563 changed
|= set_ssa_val_to (vdef
, vdef_val
);
5566 changed
|= set_ssa_val_to (lhs
, lhs
);
5567 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
5568 vr2
->vuse
= vr1
.vuse
;
5569 /* As we are not walking the virtual operand chain we know the
5570 shared_lookup_references are still original so we can re-use
5572 vr2
->operands
= vr1
.operands
.copy ();
5573 vr2
->type
= vr1
.type
;
5574 vr2
->punned
= vr1
.punned
;
5576 vr2
->base_set
= vr1
.base_set
;
5577 vr2
->hashcode
= vr1
.hashcode
;
5579 vr2
->result_vdef
= vdef_val
;
5581 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
5583 gcc_assert (!*slot
);
5585 vr2
->next
= last_inserted_ref
;
5586 last_inserted_ref
= vr2
;
5592 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5593 and return true if the value number of the LHS has changed as a result. */
5596 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
5598 bool changed
= false;
5602 tree vuse
= gimple_vuse (stmt
);
5603 tree last_vuse
= vuse
;
5604 result
= vn_reference_lookup (op
, vuse
, default_vn_walk_kind
, &res
, true, &last_vuse
);
5606 /* We handle type-punning through unions by value-numbering based
5607 on offset and size of the access. Be prepared to handle a
5608 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5610 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
5612 /* Avoid the type punning in case the result mode has padding where
5613 the op we lookup has not. */
5614 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result
))),
5615 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op
)))))
5619 /* We will be setting the value number of lhs to the value number
5620 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5621 So first simplify and lookup this expression to see if it
5622 is already available. */
5623 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
5624 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
5625 result
= vn_nary_build_or_lookup (&res_op
);
5627 && TREE_CODE (result
) == SSA_NAME
5628 && VN_INFO (result
)->needs_insertion
)
5629 /* Track whether this is the canonical expression for different
5630 typed loads. We use that as a stopgap measure for code
5631 hoisting when dealing with floating point loads. */
5635 /* When building the conversion fails avoid inserting the reference
5638 return set_ssa_val_to (lhs
, lhs
);
5642 changed
= set_ssa_val_to (lhs
, result
);
5645 changed
= set_ssa_val_to (lhs
, lhs
);
5646 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
5647 if (vuse
&& SSA_VAL (last_vuse
) != SSA_VAL (vuse
))
5649 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5651 fprintf (dump_file
, "Using extra use virtual operand ");
5652 print_generic_expr (dump_file
, last_vuse
);
5653 fprintf (dump_file
, "\n");
5655 vn_reference_insert (op
, lhs
, vuse
, NULL_TREE
);
5663 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5664 and return true if the value number of the LHS has changed as a result. */
5667 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
5669 bool changed
= false;
5670 vn_reference_t vnresult
= NULL
;
5672 bool resultsame
= false;
5673 tree vuse
= gimple_vuse (stmt
);
5674 tree vdef
= gimple_vdef (stmt
);
5676 if (TREE_CODE (op
) == SSA_NAME
)
5679 /* First we want to lookup using the *vuses* from the store and see
5680 if there the last store to this location with the same address
5683 The vuses represent the memory state before the store. If the
5684 memory state, address, and value of the store is the same as the
5685 last store to this location, then this store will produce the
5686 same memory state as that store.
5688 In this case the vdef versions for this store are value numbered to those
5689 vuse versions, since they represent the same memory state after
5692 Otherwise, the vdefs for the store are used when inserting into
5693 the table, since the store generates a new memory state. */
5695 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
5697 && vnresult
->result
)
5699 tree result
= vnresult
->result
;
5700 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
5701 || result
== SSA_VAL (result
));
5702 resultsame
= expressions_equal_p (result
, op
);
5705 /* If the TBAA state isn't compatible for downstream reads
5706 we cannot value-number the VDEFs the same. */
5708 ao_ref_init (&lhs_ref
, lhs
);
5709 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
5710 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
5711 if ((vnresult
->set
!= set
5712 && ! alias_set_subset_of (set
, vnresult
->set
))
5713 || (vnresult
->base_set
!= base_set
5714 && ! alias_set_subset_of (base_set
, vnresult
->base_set
)))
5721 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5723 fprintf (dump_file
, "No store match\n");
5724 fprintf (dump_file
, "Value numbering store ");
5725 print_generic_expr (dump_file
, lhs
);
5726 fprintf (dump_file
, " to ");
5727 print_generic_expr (dump_file
, op
);
5728 fprintf (dump_file
, "\n");
5730 /* Have to set value numbers before insert, since insert is
5731 going to valueize the references in-place. */
5733 changed
|= set_ssa_val_to (vdef
, vdef
);
5735 /* Do not insert structure copies into the tables. */
5736 if (is_gimple_min_invariant (op
)
5737 || is_gimple_reg (op
))
5738 vn_reference_insert (lhs
, op
, vdef
, NULL
);
5740 /* Only perform the following when being called from PRE
5741 which embeds tail merging. */
5742 if (default_vn_walk_kind
== VN_WALK
)
5744 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5745 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
5747 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
5752 /* We had a match, so value number the vdef to have the value
5753 number of the vuse it came from. */
5755 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5756 fprintf (dump_file
, "Store matched earlier value, "
5757 "value numbering store vdefs to matching vuses.\n");
5759 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
5765 /* Visit and value number PHI, return true if the value number
5766 changed. When BACKEDGES_VARYING_P is true then assume all
5767 backedge values are varying. When INSERTED is not NULL then
5768 this is just a ahead query for a possible iteration, set INSERTED
5769 to true if we'd insert into the hashtable. */
5772 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
5774 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
5775 tree backedge_val
= NULL_TREE
;
5776 bool seen_non_backedge
= false;
5777 tree sameval_base
= NULL_TREE
;
5778 poly_int64 soff
, doff
;
5779 unsigned n_executable
= 0;
5783 /* TODO: We could check for this in initialization, and replace this
5784 with a gcc_assert. */
5785 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
5786 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
5788 /* We track whether a PHI was CSEd to to avoid excessive iterations
5789 that would be necessary only because the PHI changed arguments
5792 gimple_set_plf (phi
, GF_PLF_1
, false);
5794 /* See if all non-TOP arguments have the same value. TOP is
5795 equivalent to everything, so we can ignore it. */
5796 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
5797 if (e
->flags
& EDGE_EXECUTABLE
)
5799 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5801 if (def
== PHI_RESULT (phi
))
5804 if (TREE_CODE (def
) == SSA_NAME
)
5806 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
5807 def
= SSA_VAL (def
);
5808 if (e
->flags
& EDGE_DFS_BACK
)
5811 if (!(e
->flags
& EDGE_DFS_BACK
))
5812 seen_non_backedge
= true;
5815 /* Ignore undefined defs for sameval but record one. */
5816 else if (TREE_CODE (def
) == SSA_NAME
5817 && ! virtual_operand_p (def
)
5818 && ssa_undefined_value_p (def
, false))
5820 else if (sameval
== VN_TOP
)
5822 else if (!expressions_equal_p (def
, sameval
))
5824 /* We know we're arriving only with invariant addresses here,
5825 try harder comparing them. We can do some caching here
5826 which we cannot do in expressions_equal_p. */
5827 if (TREE_CODE (def
) == ADDR_EXPR
5828 && TREE_CODE (sameval
) == ADDR_EXPR
5829 && sameval_base
!= (void *)-1)
5832 sameval_base
= get_addr_base_and_unit_offset
5833 (TREE_OPERAND (sameval
, 0), &soff
);
5835 sameval_base
= (tree
)(void *)-1;
5836 else if ((get_addr_base_and_unit_offset
5837 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
5838 && known_eq (soff
, doff
))
5841 sameval
= NULL_TREE
;
5846 /* If the value we want to use is flowing over the backedge and we
5847 should take it as VARYING but it has a non-VARYING value drop to
5849 If we value-number a virtual operand never value-number to the
5850 value from the backedge as that confuses the alias-walking code.
5851 See gcc.dg/torture/pr87176.c. If the value is the same on a
5852 non-backedge everything is OK though. */
5855 && !seen_non_backedge
5856 && TREE_CODE (backedge_val
) == SSA_NAME
5857 && sameval
== backedge_val
5858 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
5859 || SSA_VAL (backedge_val
) != backedge_val
))
5860 /* Do not value-number a virtual operand to sth not visited though
5861 given that allows us to escape a region in alias walking. */
5863 && TREE_CODE (sameval
) == SSA_NAME
5864 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
5865 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
5866 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
5867 /* Note this just drops to VARYING without inserting the PHI into
5869 result
= PHI_RESULT (phi
);
5870 /* If none of the edges was executable keep the value-number at VN_TOP,
5871 if only a single edge is exectuable use its value. */
5872 else if (n_executable
<= 1)
5873 result
= seen_undef
? seen_undef
: sameval
;
5874 /* If we saw only undefined values and VN_TOP use one of the
5875 undefined values. */
5876 else if (sameval
== VN_TOP
)
5877 result
= seen_undef
? seen_undef
: sameval
;
5878 /* First see if it is equivalent to a phi node in this block. We prefer
5879 this as it allows IV elimination - see PRs 66502 and 67167. */
5880 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
5883 && TREE_CODE (result
) == SSA_NAME
5884 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
5886 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
5887 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5889 fprintf (dump_file
, "Marking CSEd to PHI node ");
5890 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
5892 fprintf (dump_file
, "\n");
5896 /* If all values are the same use that, unless we've seen undefined
5897 values as well and the value isn't constant.
5898 CCP/copyprop have the same restriction to not remove uninit warnings. */
5900 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
5904 result
= PHI_RESULT (phi
);
5905 /* Only insert PHIs that are varying, for constant value numbers
5906 we mess up equivalences otherwise as we are only comparing
5907 the immediate controlling predicates. */
5908 vn_phi_insert (phi
, result
, backedges_varying_p
);
5913 return set_ssa_val_to (PHI_RESULT (phi
), result
);
5916 /* Try to simplify RHS using equivalences and constant folding. */
5919 try_to_simplify (gassign
*stmt
)
5921 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5924 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5925 in this case, there is no point in doing extra work. */
5926 if (code
== SSA_NAME
)
5929 /* First try constant folding based on our current lattice. */
5930 mprts_hook
= vn_lookup_simplify_result
;
5931 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
5934 && (TREE_CODE (tem
) == SSA_NAME
5935 || is_gimple_min_invariant (tem
)))
5941 /* Visit and value number STMT, return true if the value number
5945 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
5947 bool changed
= false;
5949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5951 fprintf (dump_file
, "Value numbering stmt = ");
5952 print_gimple_stmt (dump_file
, stmt
, 0);
5955 if (gimple_code (stmt
) == GIMPLE_PHI
)
5956 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
5957 else if (gimple_has_volatile_ops (stmt
))
5958 changed
= defs_to_varying (stmt
);
5959 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
5961 enum tree_code code
= gimple_assign_rhs_code (ass
);
5962 tree lhs
= gimple_assign_lhs (ass
);
5963 tree rhs1
= gimple_assign_rhs1 (ass
);
5966 /* Shortcut for copies. Simplifying copies is pointless,
5967 since we copy the expression and value they represent. */
5968 if (code
== SSA_NAME
5969 && TREE_CODE (lhs
) == SSA_NAME
)
5971 changed
= visit_copy (lhs
, rhs1
);
5974 simplified
= try_to_simplify (ass
);
5977 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5979 fprintf (dump_file
, "RHS ");
5980 print_gimple_expr (dump_file
, ass
, 0);
5981 fprintf (dump_file
, " simplified to ");
5982 print_generic_expr (dump_file
, simplified
);
5983 fprintf (dump_file
, "\n");
5986 /* Setting value numbers to constants will occasionally
5987 screw up phi congruence because constants are not
5988 uniquely associated with a single ssa name that can be
5991 && is_gimple_min_invariant (simplified
)
5992 && TREE_CODE (lhs
) == SSA_NAME
)
5994 changed
= set_ssa_val_to (lhs
, simplified
);
5998 && TREE_CODE (simplified
) == SSA_NAME
5999 && TREE_CODE (lhs
) == SSA_NAME
)
6001 changed
= visit_copy (lhs
, simplified
);
6005 if ((TREE_CODE (lhs
) == SSA_NAME
6006 /* We can substitute SSA_NAMEs that are live over
6007 abnormal edges with their constant value. */
6008 && !(gimple_assign_copy_p (ass
)
6009 && is_gimple_min_invariant (rhs1
))
6011 && is_gimple_min_invariant (simplified
))
6012 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
6013 /* Stores or copies from SSA_NAMEs that are live over
6014 abnormal edges are a problem. */
6015 || (code
== SSA_NAME
6016 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6017 changed
= defs_to_varying (ass
);
6018 else if (REFERENCE_CLASS_P (lhs
)
6020 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
6021 else if (TREE_CODE (lhs
) == SSA_NAME
)
6023 if ((gimple_assign_copy_p (ass
)
6024 && is_gimple_min_invariant (rhs1
))
6026 && is_gimple_min_invariant (simplified
)))
6029 changed
= set_ssa_val_to (lhs
, simplified
);
6031 changed
= set_ssa_val_to (lhs
, rhs1
);
6035 /* Visit the original statement. */
6036 switch (vn_get_stmt_kind (ass
))
6039 changed
= visit_nary_op (lhs
, ass
);
6042 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
6045 changed
= defs_to_varying (ass
);
6051 changed
= defs_to_varying (ass
);
6053 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6055 tree lhs
= gimple_call_lhs (call_stmt
);
6056 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
6058 /* Try constant folding based on our current lattice. */
6059 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
6063 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6065 fprintf (dump_file
, "call ");
6066 print_gimple_expr (dump_file
, call_stmt
, 0);
6067 fprintf (dump_file
, " simplified to ");
6068 print_generic_expr (dump_file
, simplified
);
6069 fprintf (dump_file
, "\n");
6072 /* Setting value numbers to constants will occasionally
6073 screw up phi congruence because constants are not
6074 uniquely associated with a single ssa name that can be
6077 && is_gimple_min_invariant (simplified
))
6079 changed
= set_ssa_val_to (lhs
, simplified
);
6080 if (gimple_vdef (call_stmt
))
6081 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
6082 SSA_VAL (gimple_vuse (call_stmt
)));
6086 && TREE_CODE (simplified
) == SSA_NAME
)
6088 changed
= visit_copy (lhs
, simplified
);
6089 if (gimple_vdef (call_stmt
))
6090 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
6091 SSA_VAL (gimple_vuse (call_stmt
)));
6094 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
6096 changed
= defs_to_varying (call_stmt
);
6101 /* Pick up flags from a devirtualization target. */
6102 tree fn
= gimple_call_fn (stmt
);
6103 int extra_fnflags
= 0;
6104 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
6107 if (TREE_CODE (fn
) == ADDR_EXPR
6108 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
6109 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
6111 if ((/* Calls to the same function with the same vuse
6112 and the same operands do not necessarily return the same
6113 value, unless they're pure or const. */
6114 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
6115 & (ECF_PURE
| ECF_CONST
))
6116 /* If calls have a vdef, subsequent calls won't have
6117 the same incoming vuse. So, if 2 calls with vdef have the
6118 same vuse, we know they're not subsequent.
6119 We can value number 2 calls to the same function with the
6120 same vuse and the same operands which are not subsequent
6121 the same, because there is no code in the program that can
6122 compare the 2 values... */
6123 || (gimple_vdef (call_stmt
)
6124 /* ... unless the call returns a pointer which does
6125 not alias with anything else. In which case the
6126 information that the values are distinct are encoded
6128 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
6129 /* Only perform the following when being called from PRE
6130 which embeds tail merging. */
6131 && default_vn_walk_kind
== VN_WALK
))
6132 /* Do not process .DEFERRED_INIT since that confuses uninit
6134 && !gimple_call_internal_p (call_stmt
, IFN_DEFERRED_INIT
))
6135 changed
= visit_reference_op_call (lhs
, call_stmt
);
6137 changed
= defs_to_varying (call_stmt
);
6140 changed
= defs_to_varying (stmt
);
6146 /* Allocate a value number table. */
6149 allocate_vn_table (vn_tables_t table
, unsigned size
)
6151 table
->phis
= new vn_phi_table_type (size
);
6152 table
->nary
= new vn_nary_op_table_type (size
);
6153 table
->references
= new vn_reference_table_type (size
);
6156 /* Free a value number table. */
6159 free_vn_table (vn_tables_t table
)
6161 /* Walk over elements and release vectors. */
6162 vn_reference_iterator_type hir
;
6164 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
6165 vr
->operands
.release ();
6170 delete table
->references
;
6171 table
->references
= NULL
;
6174 /* Set *ID according to RESULT. */
6177 set_value_id_for_result (tree result
, unsigned int *id
)
6179 if (result
&& TREE_CODE (result
) == SSA_NAME
)
6180 *id
= VN_INFO (result
)->value_id
;
6181 else if (result
&& is_gimple_min_invariant (result
))
6182 *id
= get_or_alloc_constant_value_id (result
);
6184 *id
= get_next_value_id ();
6187 /* Set the value ids in the valid hash tables. */
6190 set_hashtable_value_ids (void)
6192 vn_nary_op_iterator_type hin
;
6193 vn_phi_iterator_type hip
;
6194 vn_reference_iterator_type hir
;
6199 /* Now set the value ids of the things we had put in the hash
6202 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
6203 if (! vno
->predicated_values
)
6204 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
6206 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
6207 set_value_id_for_result (vp
->result
, &vp
->value_id
);
6209 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
6211 set_value_id_for_result (vr
->result
, &vr
->value_id
);
6214 /* Return the maximum value id we have ever seen. */
6217 get_max_value_id (void)
6219 return next_value_id
;
6222 /* Return the maximum constant value id we have ever seen. */
6225 get_max_constant_value_id (void)
6227 return -next_constant_value_id
;
6230 /* Return the next unique value id. */
6233 get_next_value_id (void)
6235 gcc_checking_assert ((int)next_value_id
> 0);
6236 return next_value_id
++;
6239 /* Return the next unique value id for constants. */
6242 get_next_constant_value_id (void)
6244 gcc_checking_assert (next_constant_value_id
< 0);
6245 return next_constant_value_id
--;
6249 /* Compare two expressions E1 and E2 and return true if they are equal.
6250 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6251 otherwise VN_TOP only matches VN_TOP. */
6254 expressions_equal_p (tree e1
, tree e2
, bool match_vn_top_optimistically
)
6256 /* The obvious case. */
6260 /* If either one is VN_TOP consider them equal. */
6261 if (match_vn_top_optimistically
6262 && (e1
== VN_TOP
|| e2
== VN_TOP
))
6265 /* SSA_NAME compare pointer equal. */
6266 if (TREE_CODE (e1
) == SSA_NAME
|| TREE_CODE (e2
) == SSA_NAME
)
6269 /* Now perform the actual comparison. */
6270 if (TREE_CODE (e1
) == TREE_CODE (e2
)
6271 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
6278 /* Return true if the nary operation NARY may trap. This is a copy
6279 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6282 vn_nary_may_trap (vn_nary_op_t nary
)
6285 tree rhs2
= NULL_TREE
;
6286 bool honor_nans
= false;
6287 bool honor_snans
= false;
6288 bool fp_operation
= false;
6289 bool honor_trapv
= false;
6293 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
6294 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
6295 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
6298 fp_operation
= FLOAT_TYPE_P (type
);
6301 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
6302 honor_snans
= flag_signaling_nans
!= 0;
6304 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
6307 if (nary
->length
>= 2)
6309 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
6310 honor_trapv
, honor_nans
, honor_snans
,
6315 for (i
= 0; i
< nary
->length
; ++i
)
6316 if (tree_could_trap_p (nary
->op
[i
]))
6322 /* Return true if the reference operation REF may trap. */
6325 vn_reference_may_trap (vn_reference_t ref
)
6327 switch (ref
->operands
[0].opcode
)
6331 /* We do not handle calls. */
6334 /* And toplevel address computations never trap. */
6339 vn_reference_op_t op
;
6341 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
6345 case WITH_SIZE_EXPR
:
6346 case TARGET_MEM_REF
:
6347 /* Always variable. */
6350 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
6353 case ARRAY_RANGE_REF
:
6354 if (TREE_CODE (op
->op0
) == SSA_NAME
)
6359 if (TREE_CODE (op
->op0
) != INTEGER_CST
)
6362 /* !in_array_bounds */
6363 tree domain_type
= TYPE_DOMAIN (ref
->operands
[i
+1].type
);
6368 tree max
= TYPE_MAX_VALUE (domain_type
);
6371 || TREE_CODE (min
) != INTEGER_CST
6372 || TREE_CODE (max
) != INTEGER_CST
)
6375 if (tree_int_cst_lt (op
->op0
, min
)
6376 || tree_int_cst_lt (max
, op
->op0
))
6382 /* Nothing interesting in itself, the base is separate. */
6384 /* The following are the address bases. */
6389 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
6397 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
6398 bitmap inserted_exprs_
)
6399 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
6400 el_todo (0), eliminations (0), insertions (0),
6401 inserted_exprs (inserted_exprs_
)
6403 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
6404 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
6407 eliminate_dom_walker::~eliminate_dom_walker ()
6409 BITMAP_FREE (need_eh_cleanup
);
6410 BITMAP_FREE (need_ab_cleanup
);
6413 /* Return a leader for OP that is available at the current point of the
6414 eliminate domwalk. */
6417 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
6419 tree valnum
= VN_INFO (op
)->valnum
;
6420 if (TREE_CODE (valnum
) == SSA_NAME
)
6422 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6424 if (avail
.length () > SSA_NAME_VERSION (valnum
))
6425 return avail
[SSA_NAME_VERSION (valnum
)];
6427 else if (is_gimple_min_invariant (valnum
))
6432 /* At the current point of the eliminate domwalk make OP available. */
6435 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
6437 tree valnum
= VN_INFO (op
)->valnum
;
6438 if (TREE_CODE (valnum
) == SSA_NAME
)
6440 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
6441 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1, true);
6443 if (avail
[SSA_NAME_VERSION (valnum
)])
6444 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
6445 avail_stack
.safe_push (pushop
);
6446 avail
[SSA_NAME_VERSION (valnum
)] = op
;
6450 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6451 the leader for the expression if insertion was successful. */
6454 eliminate_dom_walker::eliminate_insert (basic_block bb
,
6455 gimple_stmt_iterator
*gsi
, tree val
)
6457 /* We can insert a sequence with a single assignment only. */
6458 gimple_seq stmts
= VN_INFO (val
)->expr
;
6459 if (!gimple_seq_singleton_p (stmts
))
6461 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
6463 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6464 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
6465 && gimple_assign_rhs_code (stmt
) != NEGATE_EXPR
6466 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
6467 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
6468 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
6471 tree op
= gimple_assign_rhs1 (stmt
);
6472 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
6473 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6474 op
= TREE_OPERAND (op
, 0);
6475 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
6481 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6482 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
6483 TREE_TYPE (val
), leader
,
6484 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
6485 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
6486 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
6487 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
6488 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
6490 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
6491 TREE_TYPE (val
), leader
);
6492 if (TREE_CODE (res
) != SSA_NAME
6493 || SSA_NAME_IS_DEFAULT_DEF (res
)
6494 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
6496 gimple_seq_discard (stmts
);
6498 /* During propagation we have to treat SSA info conservatively
6499 and thus we can end up simplifying the inserted expression
6500 at elimination time to sth not defined in stmts. */
6501 /* But then this is a redundancy we failed to detect. Which means
6502 res now has two values. That doesn't play well with how
6503 we track availability here, so give up. */
6504 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6506 if (TREE_CODE (res
) == SSA_NAME
)
6507 res
= eliminate_avail (bb
, res
);
6510 fprintf (dump_file
, "Failed to insert expression for value ");
6511 print_generic_expr (dump_file
, val
);
6512 fprintf (dump_file
, " which is really fully redundant to ");
6513 print_generic_expr (dump_file
, res
);
6514 fprintf (dump_file
, "\n");
6522 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
6523 vn_ssa_aux_t vn_info
= VN_INFO (res
);
6524 vn_info
->valnum
= val
;
6525 vn_info
->visited
= true;
6529 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6531 fprintf (dump_file
, "Inserted ");
6532 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
6539 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
6541 tree sprime
= NULL_TREE
;
6542 gimple
*stmt
= gsi_stmt (*gsi
);
6543 tree lhs
= gimple_get_lhs (stmt
);
6544 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
6545 && !gimple_has_volatile_ops (stmt
)
6546 /* See PR43491. Do not replace a global register variable when
6547 it is a the RHS of an assignment. Do replace local register
6548 variables since gcc does not guarantee a local variable will
6549 be allocated in register.
6550 ??? The fix isn't effective here. This should instead
6551 be ensured by not value-numbering them the same but treating
6552 them like volatiles? */
6553 && !(gimple_assign_single_p (stmt
)
6554 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
6555 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
6556 && is_global_var (gimple_assign_rhs1 (stmt
)))))
6558 sprime
= eliminate_avail (b
, lhs
);
6561 /* If there is no existing usable leader but SCCVN thinks
6562 it has an expression it wants to use as replacement,
6564 tree val
= VN_INFO (lhs
)->valnum
;
6565 vn_ssa_aux_t vn_info
;
6567 && TREE_CODE (val
) == SSA_NAME
6568 && (vn_info
= VN_INFO (val
), true)
6569 && vn_info
->needs_insertion
6570 && vn_info
->expr
!= NULL
6571 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
6572 eliminate_push_avail (b
, sprime
);
6575 /* If this now constitutes a copy duplicate points-to
6576 and range info appropriately. This is especially
6577 important for inserted code. See tree-ssa-copy.cc
6578 for similar code. */
6580 && TREE_CODE (sprime
) == SSA_NAME
)
6582 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
6583 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6584 && SSA_NAME_PTR_INFO (lhs
)
6585 && ! SSA_NAME_PTR_INFO (sprime
))
6587 duplicate_ssa_name_ptr_info (sprime
,
6588 SSA_NAME_PTR_INFO (lhs
));
6590 reset_flow_sensitive_info (sprime
);
6592 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6593 && SSA_NAME_RANGE_INFO (lhs
)
6594 && ! SSA_NAME_RANGE_INFO (sprime
)
6596 duplicate_ssa_name_range_info (sprime
, lhs
);
6599 /* Inhibit the use of an inserted PHI on a loop header when
6600 the address of the memory reference is a simple induction
6601 variable. In other cases the vectorizer won't do anything
6602 anyway (either it's loop invariant or a complicated
6605 && TREE_CODE (sprime
) == SSA_NAME
6607 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
6608 && loop_outer (b
->loop_father
)
6609 && has_zero_uses (sprime
)
6610 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
6611 && gimple_assign_load_p (stmt
))
6613 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
6614 basic_block def_bb
= gimple_bb (def_stmt
);
6615 if (gimple_code (def_stmt
) == GIMPLE_PHI
6616 && def_bb
->loop_father
->header
== def_bb
)
6618 loop_p loop
= def_bb
->loop_father
;
6622 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
6625 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
6627 && flow_bb_inside_loop_p (loop
, def_bb
)
6628 && simple_iv (loop
, loop
, op
, &iv
, true))
6636 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6638 fprintf (dump_file
, "Not replacing ");
6639 print_gimple_expr (dump_file
, stmt
, 0);
6640 fprintf (dump_file
, " with ");
6641 print_generic_expr (dump_file
, sprime
);
6642 fprintf (dump_file
, " which would add a loop"
6643 " carried dependence to loop %d\n",
6646 /* Don't keep sprime available. */
6654 /* If we can propagate the value computed for LHS into
6655 all uses don't bother doing anything with this stmt. */
6656 if (may_propagate_copy (lhs
, sprime
))
6658 /* Mark it for removal. */
6659 to_remove
.safe_push (stmt
);
6661 /* ??? Don't count copy/constant propagations. */
6662 if (gimple_assign_single_p (stmt
)
6663 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6664 || gimple_assign_rhs1 (stmt
) == sprime
))
6667 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6669 fprintf (dump_file
, "Replaced ");
6670 print_gimple_expr (dump_file
, stmt
, 0);
6671 fprintf (dump_file
, " with ");
6672 print_generic_expr (dump_file
, sprime
);
6673 fprintf (dump_file
, " in all uses of ");
6674 print_gimple_stmt (dump_file
, stmt
, 0);
6681 /* If this is an assignment from our leader (which
6682 happens in the case the value-number is a constant)
6683 then there is nothing to do. Likewise if we run into
6684 inserted code that needed a conversion because of
6685 our type-agnostic value-numbering of loads. */
6686 if ((gimple_assign_single_p (stmt
)
6687 || (is_gimple_assign (stmt
)
6688 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6689 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
6690 && sprime
== gimple_assign_rhs1 (stmt
))
6693 /* Else replace its RHS. */
6694 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6696 fprintf (dump_file
, "Replaced ");
6697 print_gimple_expr (dump_file
, stmt
, 0);
6698 fprintf (dump_file
, " with ");
6699 print_generic_expr (dump_file
, sprime
);
6700 fprintf (dump_file
, " in ");
6701 print_gimple_stmt (dump_file
, stmt
, 0);
6705 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
6706 && stmt_can_make_abnormal_goto (stmt
));
6707 gimple
*orig_stmt
= stmt
;
6708 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
6709 TREE_TYPE (sprime
)))
6711 /* We preserve conversions to but not from function or method
6712 types. This asymmetry makes it necessary to re-instantiate
6713 conversions here. */
6714 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6715 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
6716 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
6720 tree vdef
= gimple_vdef (stmt
);
6721 tree vuse
= gimple_vuse (stmt
);
6722 propagate_tree_value_into_stmt (gsi
, sprime
);
6723 stmt
= gsi_stmt (*gsi
);
6725 /* In case the VDEF on the original stmt was released, value-number
6726 it to the VUSE. This is to make vuse_ssa_val able to skip
6727 released virtual operands. */
6728 if (vdef
!= gimple_vdef (stmt
))
6730 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
6731 VN_INFO (vdef
)->valnum
= vuse
;
6734 /* If we removed EH side-effects from the statement, clean
6735 its EH information. */
6736 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
6738 bitmap_set_bit (need_eh_cleanup
,
6739 gimple_bb (stmt
)->index
);
6740 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6741 fprintf (dump_file
, " Removed EH side-effects.\n");
6744 /* Likewise for AB side-effects. */
6745 if (can_make_abnormal_goto
6746 && !stmt_can_make_abnormal_goto (stmt
))
6748 bitmap_set_bit (need_ab_cleanup
,
6749 gimple_bb (stmt
)->index
);
6750 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6751 fprintf (dump_file
, " Removed AB side-effects.\n");
6758 /* If the statement is a scalar store, see if the expression
6759 has the same value number as its rhs. If so, the store is
6761 if (gimple_assign_single_p (stmt
)
6762 && !gimple_has_volatile_ops (stmt
)
6763 && !is_gimple_reg (gimple_assign_lhs (stmt
))
6764 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6765 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
6767 tree rhs
= gimple_assign_rhs1 (stmt
);
6768 vn_reference_t vnresult
;
6769 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6770 typed load of a byte known to be 0x11 as 1 so a store of
6771 a boolean 1 is detected as redundant. Because of this we
6772 have to make sure to lookup with a ref where its size
6773 matches the precision. */
6774 tree lookup_lhs
= lhs
;
6775 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6776 && (TREE_CODE (lhs
) != COMPONENT_REF
6777 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6778 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
6780 if (TREE_CODE (lhs
) == COMPONENT_REF
6781 || TREE_CODE (lhs
) == MEM_REF
)
6783 tree ltype
= build_nonstandard_integer_type
6784 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
6785 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
6786 if (TREE_CODE (lhs
) == COMPONENT_REF
)
6788 tree foff
= component_ref_field_offset (lhs
);
6789 tree f
= TREE_OPERAND (lhs
, 1);
6790 if (!poly_int_tree_p (foff
))
6791 lookup_lhs
= NULL_TREE
;
6793 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
6794 TREE_OPERAND (lhs
, 0),
6795 TYPE_SIZE (TREE_TYPE (lhs
)),
6797 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
6800 lookup_lhs
= build2 (MEM_REF
, ltype
,
6801 TREE_OPERAND (lhs
, 0),
6802 TREE_OPERAND (lhs
, 1));
6805 lookup_lhs
= NULL_TREE
;
6807 tree val
= NULL_TREE
;
6809 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
6810 VN_WALKREWRITE
, &vnresult
, false,
6811 NULL
, NULL_TREE
, true);
6812 if (TREE_CODE (rhs
) == SSA_NAME
)
6813 rhs
= VN_INFO (rhs
)->valnum
;
6815 && (operand_equal_p (val
, rhs
, 0)
6816 /* Due to the bitfield lookups above we can get bit
6817 interpretations of the same RHS as values here. Those
6818 are redundant as well. */
6819 || (TREE_CODE (val
) == SSA_NAME
6820 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
6821 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
6822 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
6823 && TREE_OPERAND (val
, 0) == rhs
)))
6825 /* We can only remove the later store if the former aliases
6826 at least all accesses the later one does or if the store
6827 was to readonly memory storing the same value. */
6829 ao_ref_init (&lhs_ref
, lhs
);
6830 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
6831 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
6833 || ((vnresult
->set
== set
6834 || alias_set_subset_of (set
, vnresult
->set
))
6835 && (vnresult
->base_set
== base_set
6836 || alias_set_subset_of (base_set
, vnresult
->base_set
))))
6838 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6840 fprintf (dump_file
, "Deleted redundant store ");
6841 print_gimple_stmt (dump_file
, stmt
, 0);
6844 /* Queue stmt for removal. */
6845 to_remove
.safe_push (stmt
);
6851 /* If this is a control statement value numbering left edges
6852 unexecuted on force the condition in a way consistent with
6854 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
6856 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
6857 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
6859 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6861 fprintf (dump_file
, "Removing unexecutable edge from ");
6862 print_gimple_stmt (dump_file
, stmt
, 0);
6864 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
6865 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
6866 gimple_cond_make_true (cond
);
6868 gimple_cond_make_false (cond
);
6870 el_todo
|= TODO_cleanup_cfg
;
6875 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
6876 bool was_noreturn
= (is_gimple_call (stmt
)
6877 && gimple_call_noreturn_p (stmt
));
6878 tree vdef
= gimple_vdef (stmt
);
6879 tree vuse
= gimple_vuse (stmt
);
6881 /* If we didn't replace the whole stmt (or propagate the result
6882 into all uses), replace all uses on this stmt with their
6884 bool modified
= false;
6885 use_operand_p use_p
;
6887 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
6889 tree use
= USE_FROM_PTR (use_p
);
6890 /* ??? The call code above leaves stmt operands un-updated. */
6891 if (TREE_CODE (use
) != SSA_NAME
)
6894 if (SSA_NAME_IS_DEFAULT_DEF (use
))
6895 /* ??? For default defs BB shouldn't matter, but we have to
6896 solve the inconsistency between rpo eliminate and
6897 dom eliminate avail valueization first. */
6898 sprime
= eliminate_avail (b
, use
);
6900 /* Look for sth available at the definition block of the argument.
6901 This avoids inconsistencies between availability there which
6902 decides if the stmt can be removed and availability at the
6903 use site. The SSA property ensures that things available
6904 at the definition are also available at uses. */
6905 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
6906 if (sprime
&& sprime
!= use
6907 && may_propagate_copy (use
, sprime
, true)
6908 /* We substitute into debug stmts to avoid excessive
6909 debug temporaries created by removed stmts, but we need
6910 to avoid doing so for inserted sprimes as we never want
6911 to create debug temporaries for them. */
6913 || TREE_CODE (sprime
) != SSA_NAME
6914 || !is_gimple_debug (stmt
)
6915 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
6917 propagate_value (use_p
, sprime
);
6922 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6923 into which is a requirement for the IPA devirt machinery. */
6924 gimple
*old_stmt
= stmt
;
6927 /* If a formerly non-invariant ADDR_EXPR is turned into an
6928 invariant one it was on a separate stmt. */
6929 if (gimple_assign_single_p (stmt
)
6930 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
6931 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
6932 gimple_stmt_iterator prev
= *gsi
;
6934 if (fold_stmt (gsi
, follow_all_ssa_edges
))
6936 /* fold_stmt may have created new stmts inbetween
6937 the previous stmt and the folded stmt. Mark
6938 all defs created there as varying to not confuse
6939 the SCCVN machinery as we're using that even during
6941 if (gsi_end_p (prev
))
6942 prev
= gsi_start_bb (b
);
6945 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
6950 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
6951 dit
, SSA_OP_ALL_DEFS
)
6952 /* As existing DEFs may move between stmts
6953 only process new ones. */
6954 if (! has_VN_INFO (def
))
6956 vn_ssa_aux_t vn_info
= VN_INFO (def
);
6957 vn_info
->valnum
= def
;
6958 vn_info
->visited
= true;
6960 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
6966 stmt
= gsi_stmt (*gsi
);
6967 /* In case we folded the stmt away schedule the NOP for removal. */
6968 if (gimple_nop_p (stmt
))
6969 to_remove
.safe_push (stmt
);
6972 /* Visit indirect calls and turn them into direct calls if
6973 possible using the devirtualization machinery. Do this before
6974 checking for required EH/abnormal/noreturn cleanup as devird
6975 may expose more of those. */
6976 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6978 tree fn
= gimple_call_fn (call_stmt
);
6980 && flag_devirtualize
6981 && virtual_method_call_p (fn
))
6983 tree otr_type
= obj_type_ref_class (fn
);
6984 unsigned HOST_WIDE_INT otr_tok
6985 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
6987 ipa_polymorphic_call_context
context (current_function_decl
,
6988 fn
, stmt
, &instance
);
6989 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
6990 otr_type
, stmt
, NULL
);
6992 vec
<cgraph_node
*> targets
6993 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
6994 otr_tok
, context
, &final
);
6996 dump_possible_polymorphic_call_targets (dump_file
,
6997 obj_type_ref_class (fn
),
6999 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
7002 if (targets
.length () == 1)
7003 fn
= targets
[0]->decl
;
7005 fn
= builtin_decl_unreachable ();
7006 if (dump_enabled_p ())
7008 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
7009 "converting indirect call to "
7011 lang_hooks
.decl_printable_name (fn
, 2));
7013 gimple_call_set_fndecl (call_stmt
, fn
);
7014 /* If changing the call to __builtin_unreachable
7015 or similar noreturn function, adjust gimple_call_fntype
7017 if (gimple_call_noreturn_p (call_stmt
)
7018 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
7019 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
7020 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
7022 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
7023 maybe_remove_unused_call_args (cfun
, call_stmt
);
7031 /* When changing a call into a noreturn call, cfg cleanup
7032 is needed to fix up the noreturn call. */
7034 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
7035 to_fixup
.safe_push (stmt
);
7036 /* When changing a condition or switch into one we know what
7037 edge will be executed, schedule a cfg cleanup. */
7038 if ((gimple_code (stmt
) == GIMPLE_COND
7039 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
7040 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
7041 || (gimple_code (stmt
) == GIMPLE_SWITCH
7042 && TREE_CODE (gimple_switch_index
7043 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
7044 el_todo
|= TODO_cleanup_cfg
;
7045 /* If we removed EH side-effects from the statement, clean
7046 its EH information. */
7047 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
7049 bitmap_set_bit (need_eh_cleanup
,
7050 gimple_bb (stmt
)->index
);
7051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7052 fprintf (dump_file
, " Removed EH side-effects.\n");
7054 /* Likewise for AB side-effects. */
7055 if (can_make_abnormal_goto
7056 && !stmt_can_make_abnormal_goto (stmt
))
7058 bitmap_set_bit (need_ab_cleanup
,
7059 gimple_bb (stmt
)->index
);
7060 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7061 fprintf (dump_file
, " Removed AB side-effects.\n");
7064 /* In case the VDEF on the original stmt was released, value-number
7065 it to the VUSE. This is to make vuse_ssa_val able to skip
7066 released virtual operands. */
7067 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
7068 VN_INFO (vdef
)->valnum
= vuse
;
7071 /* Make new values available - for fully redundant LHS we
7072 continue with the next stmt above and skip this. */
7074 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
7075 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
7078 /* Perform elimination for the basic-block B during the domwalk. */
7081 eliminate_dom_walker::before_dom_children (basic_block b
)
7084 avail_stack
.safe_push (NULL_TREE
);
7086 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
7087 if (!(b
->flags
& BB_EXECUTABLE
))
7092 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
7094 gphi
*phi
= gsi
.phi ();
7095 tree res
= PHI_RESULT (phi
);
7097 if (virtual_operand_p (res
))
7103 tree sprime
= eliminate_avail (b
, res
);
7107 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7109 fprintf (dump_file
, "Replaced redundant PHI node defining ");
7110 print_generic_expr (dump_file
, res
);
7111 fprintf (dump_file
, " with ");
7112 print_generic_expr (dump_file
, sprime
);
7113 fprintf (dump_file
, "\n");
7116 /* If we inserted this PHI node ourself, it's not an elimination. */
7117 if (! inserted_exprs
7118 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
7121 /* If we will propagate into all uses don't bother to do
7123 if (may_propagate_copy (res
, sprime
))
7125 /* Mark the PHI for removal. */
7126 to_remove
.safe_push (phi
);
7131 remove_phi_node (&gsi
, false);
7133 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
7134 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
7135 gimple
*stmt
= gimple_build_assign (res
, sprime
);
7136 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
7137 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
7141 eliminate_push_avail (b
, res
);
7145 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
7148 eliminate_stmt (b
, &gsi
);
7150 /* Replace destination PHI arguments. */
7153 FOR_EACH_EDGE (e
, ei
, b
->succs
)
7154 if (e
->flags
& EDGE_EXECUTABLE
)
7155 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7159 gphi
*phi
= gsi
.phi ();
7160 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7161 tree arg
= USE_FROM_PTR (use_p
);
7162 if (TREE_CODE (arg
) != SSA_NAME
7163 || virtual_operand_p (arg
))
7165 tree sprime
= eliminate_avail (b
, arg
);
7166 if (sprime
&& may_propagate_copy (arg
, sprime
))
7167 propagate_value (use_p
, sprime
);
7170 vn_context_bb
= NULL
;
7175 /* Make no longer available leaders no longer available. */
7178 eliminate_dom_walker::after_dom_children (basic_block
)
7181 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
7183 tree valnum
= VN_INFO (entry
)->valnum
;
7184 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
7186 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
7188 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
7192 /* Remove queued stmts and perform delayed cleanups. */
7195 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
7197 statistics_counter_event (cfun
, "Eliminated", eliminations
);
7198 statistics_counter_event (cfun
, "Insertions", insertions
);
7200 /* We cannot remove stmts during BB walk, especially not release SSA
7201 names there as this confuses the VN machinery. The stmts ending
7202 up in to_remove are either stores or simple copies.
7203 Remove stmts in reverse order to make debug stmt creation possible. */
7204 while (!to_remove
.is_empty ())
7206 bool do_release_defs
= true;
7207 gimple
*stmt
= to_remove
.pop ();
7209 /* When we are value-numbering a region we do not require exit PHIs to
7210 be present so we have to make sure to deal with uses outside of the
7211 region of stmts that we thought are eliminated.
7212 ??? Note we may be confused by uses in dead regions we didn't run
7213 elimination on. Rather than checking individual uses we accept
7214 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
7215 contains such example). */
7218 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
7220 tree lhs
= gimple_phi_result (phi
);
7221 if (!has_zero_uses (lhs
))
7223 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7224 fprintf (dump_file
, "Keeping eliminated stmt live "
7225 "as copy because of out-of-region uses\n");
7226 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
7227 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
7228 gimple_stmt_iterator gsi
7229 = gsi_after_labels (gimple_bb (stmt
));
7230 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
7231 do_release_defs
= false;
7234 else if (tree lhs
= gimple_get_lhs (stmt
))
7235 if (TREE_CODE (lhs
) == SSA_NAME
7236 && !has_zero_uses (lhs
))
7238 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7239 fprintf (dump_file
, "Keeping eliminated stmt live "
7240 "as copy because of out-of-region uses\n");
7241 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
7242 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
7243 if (is_gimple_assign (stmt
))
7245 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
7246 stmt
= gsi_stmt (gsi
);
7248 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
7249 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
7254 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
7255 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
7256 do_release_defs
= false;
7261 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7263 fprintf (dump_file
, "Removing dead stmt ");
7264 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
7267 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
7268 if (gimple_code (stmt
) == GIMPLE_PHI
)
7269 remove_phi_node (&gsi
, do_release_defs
);
7272 basic_block bb
= gimple_bb (stmt
);
7273 unlink_stmt_vdef (stmt
);
7274 if (gsi_remove (&gsi
, true))
7275 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
7276 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
7277 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
7278 if (do_release_defs
)
7279 release_defs (stmt
);
7282 /* Removing a stmt may expose a forwarder block. */
7283 el_todo
|= TODO_cleanup_cfg
;
7286 /* Fixup stmts that became noreturn calls. This may require splitting
7287 blocks and thus isn't possible during the dominator walk. Do this
7288 in reverse order so we don't inadvertedly remove a stmt we want to
7289 fixup by visiting a dominating now noreturn call first. */
7290 while (!to_fixup
.is_empty ())
7292 gimple
*stmt
= to_fixup
.pop ();
7294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7296 fprintf (dump_file
, "Fixing up noreturn call ");
7297 print_gimple_stmt (dump_file
, stmt
, 0);
7300 if (fixup_noreturn_call (stmt
))
7301 el_todo
|= TODO_cleanup_cfg
;
7304 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
7305 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
7308 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
7311 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
7313 if (do_eh_cleanup
|| do_ab_cleanup
)
7314 el_todo
|= TODO_cleanup_cfg
;
7319 /* Eliminate fully redundant computations. */
7322 eliminate_with_rpo_vn (bitmap inserted_exprs
)
7324 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
7326 eliminate_dom_walker
*saved_rpo_avail
= rpo_avail
;
7327 rpo_avail
= &walker
;
7328 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
7329 rpo_avail
= saved_rpo_avail
;
7331 return walker
.eliminate_cleanup ();
7335 do_rpo_vn_1 (function
*fn
, edge entry
, bitmap exit_bbs
,
7336 bool iterate
, bool eliminate
, vn_lookup_kind kind
);
7339 run_rpo_vn (vn_lookup_kind kind
)
7341 do_rpo_vn_1 (cfun
, NULL
, NULL
, true, false, kind
);
7343 /* ??? Prune requirement of these. */
7344 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
7346 /* Initialize the value ids and prune out remaining VN_TOPs
7350 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7352 vn_ssa_aux_t info
= VN_INFO (name
);
7354 || info
->valnum
== VN_TOP
)
7355 info
->valnum
= name
;
7356 if (info
->valnum
== name
)
7357 info
->value_id
= get_next_value_id ();
7358 else if (is_gimple_min_invariant (info
->valnum
))
7359 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
7363 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7365 vn_ssa_aux_t info
= VN_INFO (name
);
7366 if (TREE_CODE (info
->valnum
) == SSA_NAME
7367 && info
->valnum
!= name
7368 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
7369 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
7372 set_hashtable_value_ids ();
7374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7376 fprintf (dump_file
, "Value numbers:\n");
7377 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7379 if (VN_INFO (name
)->visited
7380 && SSA_VAL (name
) != name
)
7382 print_generic_expr (dump_file
, name
);
7383 fprintf (dump_file
, " = ");
7384 print_generic_expr (dump_file
, SSA_VAL (name
));
7385 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
7391 /* Free VN associated data structures. */
7396 free_vn_table (valid_info
);
7397 XDELETE (valid_info
);
7398 obstack_free (&vn_tables_obstack
, NULL
);
7399 obstack_free (&vn_tables_insert_obstack
, NULL
);
7401 vn_ssa_aux_iterator_type it
;
7403 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
7404 if (info
->needs_insertion
)
7405 release_ssa_name (info
->name
);
7406 obstack_free (&vn_ssa_aux_obstack
, NULL
);
7407 delete vn_ssa_aux_hash
;
7409 delete constant_to_value_id
;
7410 constant_to_value_id
= NULL
;
7413 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7416 vn_lookup_simplify_result (gimple_match_op
*res_op
)
7418 if (!res_op
->code
.is_tree_code ())
7420 tree
*ops
= res_op
->ops
;
7421 unsigned int length
= res_op
->num_ops
;
7422 if (res_op
->code
== CONSTRUCTOR
7423 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7424 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7425 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
7427 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
7428 ops
= XALLOCAVEC (tree
, length
);
7429 for (unsigned i
= 0; i
< length
; ++i
)
7430 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
7432 vn_nary_op_t vnresult
= NULL
;
7433 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
7434 res_op
->type
, ops
, &vnresult
);
7435 /* If this is used from expression simplification make sure to
7436 return an available expression. */
7437 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
7438 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
7442 /* Return a leader for OPs value that is valid at BB. */
7445 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
7448 tree valnum
= SSA_VAL (op
, &visited
);
7449 /* If we didn't visit OP then it must be defined outside of the
7450 region we process and also dominate it. So it is available. */
7453 if (TREE_CODE (valnum
) == SSA_NAME
)
7455 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
7457 vn_avail
*av
= VN_INFO (valnum
)->avail
;
7460 if (av
->location
== bb
->index
)
7461 /* On tramp3d 90% of the cases are here. */
7462 return ssa_name (av
->leader
);
7465 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
7466 /* ??? During elimination we have to use availability at the
7467 definition site of a use we try to replace. This
7468 is required to not run into inconsistencies because
7469 of dominated_by_p_w_unex behavior and removing a definition
7470 while not replacing all uses.
7471 ??? We could try to consistently walk dominators
7472 ignoring non-executable regions. The nearest common
7473 dominator of bb and abb is where we can stop walking. We
7474 may also be able to "pre-compute" (bits of) the next immediate
7475 (non-)dominator during the RPO walk when marking edges as
7477 if (dominated_by_p_w_unex (bb
, abb
, true))
7479 tree leader
= ssa_name (av
->leader
);
7480 /* Prevent eliminations that break loop-closed SSA. */
7481 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
7482 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
7483 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7484 (leader
))->loop_father
,
7487 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7489 print_generic_expr (dump_file
, leader
);
7490 fprintf (dump_file
, " is available for ");
7491 print_generic_expr (dump_file
, valnum
);
7492 fprintf (dump_file
, "\n");
7494 /* On tramp3d 99% of the _remaining_ cases succeed at
7498 /* ??? Can we somehow skip to the immediate dominator
7499 RPO index (bb_to_rpo)? Again, maybe not worth, on
7500 tramp3d the worst number of elements in the vector is 9. */
7505 else if (valnum
!= VN_TOP
)
7506 /* valnum is is_gimple_min_invariant. */
7511 /* Make LEADER a leader for its value at BB. */
7514 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
7516 tree valnum
= VN_INFO (leader
)->valnum
;
7517 if (valnum
== VN_TOP
7518 || is_gimple_min_invariant (valnum
))
7520 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7522 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
7523 print_generic_expr (dump_file
, leader
);
7524 fprintf (dump_file
, " for value ");
7525 print_generic_expr (dump_file
, valnum
);
7526 fprintf (dump_file
, "\n");
7528 vn_ssa_aux_t value
= VN_INFO (valnum
);
7530 if (m_avail_freelist
)
7532 av
= m_avail_freelist
;
7533 m_avail_freelist
= m_avail_freelist
->next
;
7536 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
7537 av
->location
= bb
->index
;
7538 av
->leader
= SSA_NAME_VERSION (leader
);
7539 av
->next
= value
->avail
;
7540 av
->next_undo
= last_pushed_avail
;
7541 last_pushed_avail
= value
;
7545 /* Valueization hook for RPO VN plus required state. */
7548 rpo_vn_valueize (tree name
)
7550 if (TREE_CODE (name
) == SSA_NAME
)
7552 vn_ssa_aux_t val
= VN_INFO (name
);
7555 tree tem
= val
->valnum
;
7556 if (tem
!= VN_TOP
&& tem
!= name
)
7558 if (TREE_CODE (tem
) != SSA_NAME
)
7560 /* For all values we only valueize to an available leader
7561 which means we can use SSA name info without restriction. */
7562 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
7571 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7572 inverted condition. */
7575 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
7580 /* a < b -> a {!,<}= b */
7581 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7582 ops
, boolean_true_node
, 0, pred_e
);
7583 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
7584 ops
, boolean_true_node
, 0, pred_e
);
7585 /* a < b -> ! a {>,=} b */
7586 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7587 ops
, boolean_false_node
, 0, pred_e
);
7588 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7589 ops
, boolean_false_node
, 0, pred_e
);
7592 /* a > b -> a {!,>}= b */
7593 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7594 ops
, boolean_true_node
, 0, pred_e
);
7595 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
7596 ops
, boolean_true_node
, 0, pred_e
);
7597 /* a > b -> ! a {<,=} b */
7598 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7599 ops
, boolean_false_node
, 0, pred_e
);
7600 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7601 ops
, boolean_false_node
, 0, pred_e
);
7604 /* a == b -> ! a {<,>} b */
7605 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7606 ops
, boolean_false_node
, 0, pred_e
);
7607 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7608 ops
, boolean_false_node
, 0, pred_e
);
7613 /* Nothing besides inverted condition. */
7619 /* Main stmt worker for RPO VN, process BB. */
7622 process_bb (rpo_elim
&avail
, basic_block bb
,
7623 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
7624 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
7632 /* If we are in loop-closed SSA preserve this state. This is
7633 relevant when called on regions from outside of FRE/PRE. */
7634 bool lc_phi_nodes
= false;
7636 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
7637 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7638 if (e
->src
->loop_father
!= e
->dest
->loop_father
7639 && flow_loop_nested_p (e
->dest
->loop_father
,
7640 e
->src
->loop_father
))
7642 lc_phi_nodes
= true;
7646 /* When we visit a loop header substitute into loop info. */
7647 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
7649 /* Keep fields in sync with substitute_in_loop_info. */
7650 if (bb
->loop_father
->nb_iterations
)
7651 bb
->loop_father
->nb_iterations
7652 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
7653 NULL_TREE
, NULL_TREE
, &vn_valueize_for_srt
);
7656 /* Value-number all defs in the basic-block. */
7658 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7661 gphi
*phi
= gsi
.phi ();
7662 tree res
= PHI_RESULT (phi
);
7663 vn_ssa_aux_t res_info
= VN_INFO (res
);
7666 gcc_assert (!res_info
->visited
);
7667 res_info
->valnum
= VN_TOP
;
7668 res_info
->visited
= true;
7671 /* When not iterating force backedge values to varying. */
7672 visit_stmt (phi
, !iterate_phis
);
7673 if (virtual_operand_p (res
))
7677 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7678 how we handle backedges and availability.
7679 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7680 tree val
= res_info
->valnum
;
7681 if (res
!= val
&& !iterate
&& eliminate
)
7683 if (tree leader
= avail
.eliminate_avail (bb
, res
))
7686 /* Preserve loop-closed SSA form. */
7688 || is_gimple_min_invariant (leader
)))
7690 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7692 fprintf (dump_file
, "Replaced redundant PHI node "
7694 print_generic_expr (dump_file
, res
);
7695 fprintf (dump_file
, " with ");
7696 print_generic_expr (dump_file
, leader
);
7697 fprintf (dump_file
, "\n");
7699 avail
.eliminations
++;
7701 if (may_propagate_copy (res
, leader
))
7703 /* Schedule for removal. */
7704 avail
.to_remove
.safe_push (phi
);
7707 /* ??? Else generate a copy stmt. */
7711 /* Only make defs available that not already are. But make
7712 sure loop-closed SSA PHI node defs are picked up for
7716 || ! avail
.eliminate_avail (bb
, res
))
7717 avail
.eliminate_push_avail (bb
, res
);
7720 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7721 we do this when processing the last stmt as we have to do this
7722 before elimination which otherwise forces GIMPLE_CONDs to
7723 if (1 != 0) style when seeing non-executable edges. */
7724 if (gsi_end_p (gsi_start_bb (bb
)))
7726 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7728 if (!(e
->flags
& EDGE_EXECUTABLE
))
7730 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7732 "marking outgoing edge %d -> %d executable\n",
7733 e
->src
->index
, e
->dest
->index
);
7734 e
->flags
|= EDGE_EXECUTABLE
;
7735 e
->dest
->flags
|= BB_EXECUTABLE
;
7737 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7739 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7741 "marking destination block %d reachable\n",
7743 e
->dest
->flags
|= BB_EXECUTABLE
;
7747 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7748 !gsi_end_p (gsi
); gsi_next (&gsi
))
7754 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
7756 vn_ssa_aux_t op_info
= VN_INFO (op
);
7757 gcc_assert (!op_info
->visited
);
7758 op_info
->valnum
= VN_TOP
;
7759 op_info
->visited
= true;
7762 /* We somehow have to deal with uses that are not defined
7763 in the processed region. Forcing unvisited uses to
7764 varying here doesn't play well with def-use following during
7765 expression simplification, so we deal with this by checking
7766 the visited flag in SSA_VAL. */
7769 visit_stmt (gsi_stmt (gsi
));
7771 gimple
*last
= gsi_stmt (gsi
);
7773 switch (gimple_code (last
))
7776 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
7777 (as_a
<gswitch
*> (last
))));
7781 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
7782 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
7783 tree val
= gimple_simplify (gimple_cond_code (last
),
7784 boolean_type_node
, lhs
, rhs
,
7786 /* If the condition didn't simplfy see if we have recorded
7787 an expression from sofar taken edges. */
7788 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
7790 vn_nary_op_t vnresult
;
7794 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
7795 boolean_type_node
, ops
,
7797 /* Did we get a predicated value? */
7798 if (! val
&& vnresult
&& vnresult
->predicated_values
)
7800 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
7801 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7803 fprintf (dump_file
, "Got predicated value ");
7804 print_generic_expr (dump_file
, val
, TDF_NONE
);
7805 fprintf (dump_file
, " for ");
7806 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
7811 e
= find_taken_edge (bb
, val
);
7814 /* If we didn't manage to compute the taken edge then
7815 push predicated expressions for the condition itself
7816 and related conditions to the hashtables. This allows
7817 simplification of redundant conditions which is
7818 important as early cleanup. */
7819 edge true_e
, false_e
;
7820 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
7821 enum tree_code code
= gimple_cond_code (last
);
7822 enum tree_code icode
7823 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
7828 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
7831 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
7834 vn_nary_op_insert_pieces_predicated
7835 (2, code
, boolean_type_node
, ops
,
7836 boolean_true_node
, 0, true_e
);
7838 vn_nary_op_insert_pieces_predicated
7839 (2, code
, boolean_type_node
, ops
,
7840 boolean_false_node
, 0, false_e
);
7841 if (icode
!= ERROR_MARK
)
7844 vn_nary_op_insert_pieces_predicated
7845 (2, icode
, boolean_type_node
, ops
,
7846 boolean_false_node
, 0, true_e
);
7848 vn_nary_op_insert_pieces_predicated
7849 (2, icode
, boolean_type_node
, ops
,
7850 boolean_true_node
, 0, false_e
);
7852 /* Relax for non-integers, inverted condition handled
7854 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
7857 insert_related_predicates_on_edge (code
, ops
, true_e
);
7859 insert_related_predicates_on_edge (icode
, ops
, false_e
);
7865 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
7872 todo
= TODO_cleanup_cfg
;
7873 if (!(e
->flags
& EDGE_EXECUTABLE
))
7875 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7877 "marking known outgoing %sedge %d -> %d executable\n",
7878 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
7879 e
->src
->index
, e
->dest
->index
);
7880 e
->flags
|= EDGE_EXECUTABLE
;
7881 e
->dest
->flags
|= BB_EXECUTABLE
;
7883 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7885 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7887 "marking destination block %d reachable\n",
7889 e
->dest
->flags
|= BB_EXECUTABLE
;
7892 else if (gsi_one_before_end_p (gsi
))
7894 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7896 if (!(e
->flags
& EDGE_EXECUTABLE
))
7898 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7900 "marking outgoing edge %d -> %d executable\n",
7901 e
->src
->index
, e
->dest
->index
);
7902 e
->flags
|= EDGE_EXECUTABLE
;
7903 e
->dest
->flags
|= BB_EXECUTABLE
;
7905 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7907 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7909 "marking destination block %d reachable\n",
7911 e
->dest
->flags
|= BB_EXECUTABLE
;
7916 /* Eliminate. That also pushes to avail. */
7917 if (eliminate
&& ! iterate
)
7918 avail
.eliminate_stmt (bb
, &gsi
);
7920 /* If not eliminating, make all not already available defs
7922 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
7923 if (! avail
.eliminate_avail (bb
, op
))
7924 avail
.eliminate_push_avail (bb
, op
);
7927 /* Eliminate in destination PHI arguments. Always substitute in dest
7928 PHIs, even for non-executable edges. This handles region
7930 if (!iterate
&& eliminate
)
7931 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7932 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7933 !gsi_end_p (gsi
); gsi_next (&gsi
))
7935 gphi
*phi
= gsi
.phi ();
7936 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7937 tree arg
= USE_FROM_PTR (use_p
);
7938 if (TREE_CODE (arg
) != SSA_NAME
7939 || virtual_operand_p (arg
))
7942 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
7944 sprime
= SSA_VAL (arg
);
7945 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
7946 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
7949 /* Look for sth available at the definition block of the argument.
7950 This avoids inconsistencies between availability there which
7951 decides if the stmt can be removed and availability at the
7952 use site. The SSA property ensures that things available
7953 at the definition are also available at uses. */
7954 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
7958 && may_propagate_copy (arg
, sprime
))
7959 propagate_value (use_p
, sprime
);
7962 vn_context_bb
= NULL
;
7966 /* Unwind state per basic-block. */
7970 /* Times this block has been visited. */
7972 /* Whether to handle this as iteration point or whether to treat
7973 incoming backedge PHI values as varying. */
7975 /* Maximum RPO index this block is reachable from. */
7979 vn_reference_t ref_top
;
7981 vn_nary_op_t nary_top
;
7982 vn_avail
*avail_top
;
7985 /* Unwind the RPO VN state for iteration. */
7988 do_unwind (unwind_state
*to
, rpo_elim
&avail
)
7990 gcc_assert (to
->iterate
);
7991 for (; last_inserted_nary
!= to
->nary_top
;
7992 last_inserted_nary
= last_inserted_nary
->next
)
7995 slot
= valid_info
->nary
->find_slot_with_hash
7996 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
7997 /* Predication causes the need to restore previous state. */
7998 if ((*slot
)->unwind_to
)
7999 *slot
= (*slot
)->unwind_to
;
8001 valid_info
->nary
->clear_slot (slot
);
8003 for (; last_inserted_phi
!= to
->phi_top
;
8004 last_inserted_phi
= last_inserted_phi
->next
)
8007 slot
= valid_info
->phis
->find_slot_with_hash
8008 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
8009 valid_info
->phis
->clear_slot (slot
);
8011 for (; last_inserted_ref
!= to
->ref_top
;
8012 last_inserted_ref
= last_inserted_ref
->next
)
8014 vn_reference_t
*slot
;
8015 slot
= valid_info
->references
->find_slot_with_hash
8016 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
8017 (*slot
)->operands
.release ();
8018 valid_info
->references
->clear_slot (slot
);
8020 obstack_free (&vn_tables_obstack
, to
->ob_top
);
8022 /* Prune [rpo_idx, ] from avail. */
8023 for (; last_pushed_avail
&& last_pushed_avail
->avail
!= to
->avail_top
;)
8025 vn_ssa_aux_t val
= last_pushed_avail
;
8026 vn_avail
*av
= val
->avail
;
8027 val
->avail
= av
->next
;
8028 last_pushed_avail
= av
->next_undo
;
8029 av
->next
= avail
.m_avail_freelist
;
8030 avail
.m_avail_freelist
= av
;
8034 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
8035 If ITERATE is true then treat backedges optimistically as not
8036 executed and iterate. If ELIMINATE is true then perform
8037 elimination, otherwise leave that to the caller. */
8040 do_rpo_vn_1 (function
*fn
, edge entry
, bitmap exit_bbs
,
8041 bool iterate
, bool eliminate
, vn_lookup_kind kind
)
8044 default_vn_walk_kind
= kind
;
8046 /* We currently do not support region-based iteration when
8047 elimination is requested. */
8048 gcc_assert (!entry
|| !iterate
|| !eliminate
);
8049 /* When iterating we need loop info up-to-date. */
8050 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
8052 bool do_region
= entry
!= NULL
;
8055 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
8056 exit_bbs
= BITMAP_ALLOC (NULL
);
8057 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
8060 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
8061 re-mark those that are contained in the region. */
8064 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
8065 e
->flags
&= ~EDGE_DFS_BACK
;
8067 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
8068 auto_vec
<std::pair
<int, int> > toplevel_scc_extents
;
8069 int n
= rev_post_order_and_mark_dfs_back_seme
8070 (fn
, entry
, exit_bbs
, true, rpo
, !iterate
? &toplevel_scc_extents
: NULL
);
8073 BITMAP_FREE (exit_bbs
);
8075 /* If there are any non-DFS_BACK edges into entry->dest skip
8076 processing PHI nodes for that block. This supports
8077 value-numbering loop bodies w/o the actual loop. */
8078 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
8080 && !(e
->flags
& EDGE_DFS_BACK
))
8082 bool skip_entry_phis
= e
!= NULL
;
8083 if (skip_entry_phis
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
8084 fprintf (dump_file
, "Region does not contain all edges into "
8085 "the entry block, skipping its PHIs.\n");
8087 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
8088 for (int i
= 0; i
< n
; ++i
)
8089 bb_to_rpo
[rpo
[i
]] = i
;
8091 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
8093 rpo_elim
avail (entry
->dest
);
8096 /* Verify we have no extra entries into the region. */
8097 if (flag_checking
&& do_region
)
8099 auto_bb_flag
bb_in_region (fn
);
8100 for (int i
= 0; i
< n
; ++i
)
8102 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8103 bb
->flags
|= bb_in_region
;
8105 /* We can't merge the first two loops because we cannot rely
8106 on EDGE_DFS_BACK for edges not within the region. But if
8107 we decide to always have the bb_in_region flag we can
8108 do the checking during the RPO walk itself (but then it's
8109 also easy to handle MEME conservatively). */
8110 for (int i
= 0; i
< n
; ++i
)
8112 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8115 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8116 gcc_assert (e
== entry
8117 || (skip_entry_phis
&& bb
== entry
->dest
)
8118 || (e
->src
->flags
& bb_in_region
));
8120 for (int i
= 0; i
< n
; ++i
)
8122 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8123 bb
->flags
&= ~bb_in_region
;
8127 /* Create the VN state. For the initial size of the various hashtables
8128 use a heuristic based on region size and number of SSA names. */
8129 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
8130 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
8131 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
8133 next_constant_value_id
= -1;
8135 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
8136 gcc_obstack_init (&vn_ssa_aux_obstack
);
8138 gcc_obstack_init (&vn_tables_obstack
);
8139 gcc_obstack_init (&vn_tables_insert_obstack
);
8140 valid_info
= XCNEW (struct vn_tables_s
);
8141 allocate_vn_table (valid_info
, region_size
);
8142 last_inserted_ref
= NULL
;
8143 last_inserted_phi
= NULL
;
8144 last_inserted_nary
= NULL
;
8145 last_pushed_avail
= NULL
;
8147 vn_valueize
= rpo_vn_valueize
;
8149 /* Initialize the unwind state and edge/BB executable state. */
8150 unsigned curr_scc
= 0;
8151 for (int i
= 0; i
< n
; ++i
)
8153 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8154 rpo_state
[i
].visited
= 0;
8155 rpo_state
[i
].max_rpo
= i
;
8156 if (!iterate
&& curr_scc
< toplevel_scc_extents
.length ())
8158 if (i
>= toplevel_scc_extents
[curr_scc
].first
8159 && i
<= toplevel_scc_extents
[curr_scc
].second
)
8160 rpo_state
[i
].max_rpo
= toplevel_scc_extents
[curr_scc
].second
;
8161 if (i
== toplevel_scc_extents
[curr_scc
].second
)
8164 bb
->flags
&= ~BB_EXECUTABLE
;
8165 bool has_backedges
= false;
8168 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8170 if (e
->flags
& EDGE_DFS_BACK
)
8171 has_backedges
= true;
8172 e
->flags
&= ~EDGE_EXECUTABLE
;
8173 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
8176 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
8178 entry
->flags
|= EDGE_EXECUTABLE
;
8179 entry
->dest
->flags
|= BB_EXECUTABLE
;
8181 /* As heuristic to improve compile-time we handle only the N innermost
8182 loops and the outermost one optimistically. */
8185 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
8186 for (auto loop
: loops_list (cfun
, LI_ONLY_INNERMOST
))
8187 if (loop_depth (loop
) > max_depth
)
8188 for (unsigned i
= 2;
8189 i
< loop_depth (loop
) - max_depth
; ++i
)
8191 basic_block header
= superloop_at_depth (loop
, i
)->header
;
8192 bool non_latch_backedge
= false;
8195 FOR_EACH_EDGE (e
, ei
, header
->preds
)
8196 if (e
->flags
& EDGE_DFS_BACK
)
8198 /* There can be a non-latch backedge into the header
8199 which is part of an outer irreducible region. We
8200 cannot avoid iterating this block then. */
8201 if (!dominated_by_p (CDI_DOMINATORS
,
8204 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8205 fprintf (dump_file
, "non-latch backedge %d -> %d "
8206 "forces iteration of loop %d\n",
8207 e
->src
->index
, e
->dest
->index
, loop
->num
);
8208 non_latch_backedge
= true;
8211 e
->flags
|= EDGE_EXECUTABLE
;
8213 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
8220 /* Go and process all blocks, iterating as necessary. */
8223 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
8225 /* If the block has incoming backedges remember unwind state. This
8226 is required even for non-executable blocks since in irreducible
8227 regions we might reach them via the backedge and re-start iterating
8229 Note we can individually mark blocks with incoming backedges to
8230 not iterate where we then handle PHIs conservatively. We do that
8231 heuristically to reduce compile-time for degenerate cases. */
8232 if (rpo_state
[idx
].iterate
)
8234 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
8235 rpo_state
[idx
].ref_top
= last_inserted_ref
;
8236 rpo_state
[idx
].phi_top
= last_inserted_phi
;
8237 rpo_state
[idx
].nary_top
= last_inserted_nary
;
8238 rpo_state
[idx
].avail_top
8239 = last_pushed_avail
? last_pushed_avail
->avail
: NULL
;
8242 if (!(bb
->flags
& BB_EXECUTABLE
))
8244 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8245 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
8251 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8252 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
8254 todo
|= process_bb (avail
, bb
,
8255 rpo_state
[idx
].visited
!= 0,
8256 rpo_state
[idx
].iterate
,
8257 iterate
, eliminate
, do_region
, exit_bbs
, false);
8258 rpo_state
[idx
].visited
++;
8260 /* Verify if changed values flow over executable outgoing backedges
8261 and those change destination PHI values (that's the thing we
8262 can easily verify). Reduce over all such edges to the farthest
8264 int iterate_to
= -1;
8267 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8268 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
8269 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
8270 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
8272 int destidx
= bb_to_rpo
[e
->dest
->index
];
8273 if (!rpo_state
[destidx
].visited
)
8275 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8276 fprintf (dump_file
, "Unvisited destination %d\n",
8278 if (iterate_to
== -1 || destidx
< iterate_to
)
8279 iterate_to
= destidx
;
8282 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8283 fprintf (dump_file
, "Looking for changed values of backedge"
8284 " %d->%d destination PHIs\n",
8285 e
->src
->index
, e
->dest
->index
);
8286 vn_context_bb
= e
->dest
;
8288 for (gsi
= gsi_start_phis (e
->dest
);
8289 !gsi_end_p (gsi
); gsi_next (&gsi
))
8291 bool inserted
= false;
8292 /* While we'd ideally just iterate on value changes
8293 we CSE PHIs and do that even across basic-block
8294 boundaries. So even hashtable state changes can
8295 be important (which is roughly equivalent to
8296 PHI argument value changes). To not excessively
8297 iterate because of that we track whether a PHI
8298 was CSEd to with GF_PLF_1. */
8299 bool phival_changed
;
8300 if ((phival_changed
= visit_phi (gsi
.phi (),
8302 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
8305 && dump_file
&& (dump_flags
& TDF_DETAILS
))
8306 fprintf (dump_file
, "PHI was CSEd and hashtable "
8307 "state (changed)\n");
8308 if (iterate_to
== -1 || destidx
< iterate_to
)
8309 iterate_to
= destidx
;
8313 vn_context_bb
= NULL
;
8315 if (iterate_to
!= -1)
8317 do_unwind (&rpo_state
[iterate_to
], avail
);
8319 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8320 fprintf (dump_file
, "Iterating to %d BB%d\n",
8321 iterate_to
, rpo
[iterate_to
]);
8331 /* Process all blocks greedily with a worklist that enforces RPO
8332 processing of reachable blocks. */
8333 auto_bitmap worklist
;
8334 bitmap_set_bit (worklist
, 0);
8335 while (!bitmap_empty_p (worklist
))
8337 int idx
= bitmap_first_set_bit (worklist
);
8338 bitmap_clear_bit (worklist
, idx
);
8339 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
8340 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
8341 && !rpo_state
[idx
].visited
);
8343 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8344 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
8346 /* When we run into predecessor edges where we cannot trust its
8347 executable state mark them executable so PHI processing will
8349 ??? Do we need to force arguments flowing over that edge
8350 to be varying or will they even always be? */
8353 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8354 if (!(e
->flags
& EDGE_EXECUTABLE
)
8355 && (bb
== entry
->dest
8356 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
8357 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
8360 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8361 fprintf (dump_file
, "Cannot trust state of predecessor "
8362 "edge %d -> %d, marking executable\n",
8363 e
->src
->index
, e
->dest
->index
);
8364 e
->flags
|= EDGE_EXECUTABLE
;
8368 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
8369 do_region
, exit_bbs
,
8370 skip_entry_phis
&& bb
== entry
->dest
);
8371 rpo_state
[idx
].visited
++;
8373 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8374 if ((e
->flags
& EDGE_EXECUTABLE
)
8375 && e
->dest
->index
!= EXIT_BLOCK
8376 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
8377 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
8378 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
8382 /* If statistics or dump file active. */
8384 unsigned max_visited
= 1;
8385 for (int i
= 0; i
< n
; ++i
)
8387 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8388 if (bb
->flags
& BB_EXECUTABLE
)
8390 statistics_histogram_event (cfun
, "RPO block visited times",
8391 rpo_state
[i
].visited
);
8392 if (rpo_state
[i
].visited
> max_visited
)
8393 max_visited
= rpo_state
[i
].visited
;
8395 unsigned nvalues
= 0, navail
= 0;
8396 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
8397 i
!= vn_ssa_aux_hash
->end (); ++i
)
8400 vn_avail
*av
= (*i
)->avail
;
8407 statistics_counter_event (cfun
, "RPO blocks", n
);
8408 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
8409 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
8410 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
8411 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
8412 statistics_histogram_event (cfun
, "RPO num avail", navail
);
8413 statistics_histogram_event (cfun
, "RPO num lattice",
8414 vn_ssa_aux_hash
->elements ());
8415 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
8417 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
8418 " blocks in total discovering %d executable blocks iterating "
8419 "%d.%d times, a block was visited max. %u times\n",
8421 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
8423 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
8424 "and %" PRIu64
" lattice elements\n",
8425 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
8430 /* When !iterate we already performed elimination during the RPO
8434 /* Elimination for region-based VN needs to be done within the
8436 gcc_assert (! do_region
);
8437 /* Note we can't use avail.walk here because that gets confused
8438 by the existing availability and it will be less efficient
8440 todo
|= eliminate_with_rpo_vn (NULL
);
8443 todo
|= avail
.eliminate_cleanup (do_region
);
8449 XDELETEVEC (bb_to_rpo
);
8451 XDELETEVEC (rpo_state
);
8456 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8457 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8458 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8460 If ITERATE is true then treat backedges optimistically as not
8461 executed and iterate. If ELIMINATE is true then perform
8462 elimination, otherwise leave that to the caller.
8463 KIND specifies the amount of work done for handling memory operations. */
8466 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
8467 bool iterate
, bool eliminate
, vn_lookup_kind kind
)
8469 auto_timevar
tv (TV_TREE_RPO_VN
);
8470 unsigned todo
= do_rpo_vn_1 (fn
, entry
, exit_bbs
, iterate
, eliminate
, kind
);
8478 const pass_data pass_data_fre
=
8480 GIMPLE_PASS
, /* type */
8482 OPTGROUP_NONE
, /* optinfo_flags */
8483 TV_TREE_FRE
, /* tv_id */
8484 ( PROP_cfg
| PROP_ssa
), /* properties_required */
8485 0, /* properties_provided */
8486 0, /* properties_destroyed */
8487 0, /* todo_flags_start */
8488 0, /* todo_flags_finish */
8491 class pass_fre
: public gimple_opt_pass
8494 pass_fre (gcc::context
*ctxt
)
8495 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
8498 /* opt_pass methods: */
8499 opt_pass
* clone () final override
{ return new pass_fre (m_ctxt
); }
8500 void set_pass_param (unsigned int n
, bool param
) final override
8502 gcc_assert (n
== 0);
8503 may_iterate
= param
;
8505 bool gate (function
*) final override
8507 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
8509 unsigned int execute (function
*) final override
;
8513 }; // class pass_fre
8516 pass_fre::execute (function
*fun
)
8520 /* At -O[1g] use the cheap non-iterating mode. */
8521 bool iterate_p
= may_iterate
&& (optimize
> 1);
8522 calculate_dominance_info (CDI_DOMINATORS
);
8524 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
8526 todo
= do_rpo_vn_1 (fun
, NULL
, NULL
, iterate_p
, true, VN_WALKREWRITE
);
8530 loop_optimizer_finalize ();
8532 if (scev_initialized_p ())
8535 /* For late FRE after IVOPTs and unrolling, see if we can
8536 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8538 todo
|= TODO_update_address_taken
;
8546 make_pass_fre (gcc::context
*ctxt
)
8548 return new pass_fre (ctxt
);
8551 #undef BB_EXECUTABLE