1 /* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
56 #include "tree-ssa-propagate.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
78 /* This algorithm is based on the SCC algorithm presented by Keith
79 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
80 (http://citeseer.ist.psu.edu/41805.html). In
81 straight line code, it is equivalent to a regular hash based value
82 numbering that is performed in reverse postorder.
84 For code with cycles, there are two alternatives, both of which
85 require keeping the hashtables separate from the actual list of
86 value numbers for SSA names.
88 1. Iterate value numbering in an RPO walk of the blocks, removing
89 all the entries from the hashtable after each iteration (but
90 keeping the SSA name->value number mapping between iterations).
91 Iterate until it does not change.
93 2. Perform value numbering as part of an SCC walk on the SSA graph,
94 iterating only the cycles in the SSA graph until they do not change
95 (using a separate, optimistic hashtable for value numbering the SCC
98 The second is not just faster in practice (because most SSA graph
99 cycles do not involve all the variables in the graph), it also has
100 some nice properties.
102 One of these nice properties is that when we pop an SCC off the
103 stack, we are guaranteed to have processed all the operands coming from
104 *outside of that SCC*, so we do not need to do anything special to
105 ensure they have value numbers.
107 Another nice property is that the SCC walk is done as part of a DFS
108 of the SSA graph, which makes it easy to perform combining and
109 simplifying operations at the same time.
111 The code below is deliberately written in a way that makes it easy
112 to separate the SCC walk from the other work it does.
114 In order to propagate constants through the code, we track which
115 expressions contain constants, and use those while folding. In
116 theory, we could also track expressions whose value numbers are
117 replaced, in case we end up folding based on expression
120 In order to value number memory, we assign value numbers to vuses.
121 This enables us to note that, for example, stores to the same
122 address of the same value from the same starting memory states are
126 1. We can iterate only the changing portions of the SCC's, but
127 I have not seen an SCC big enough for this to be a win.
128 2. If you differentiate between phi nodes for loops and phi nodes
129 for if-then-else, you can properly consider phi nodes in different
130 blocks for equivalence.
131 3. We could value number vuses in more cases, particularly, whole
135 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
136 #define BB_EXECUTABLE BB_VISITED
138 static vn_lookup_kind default_vn_walk_kind
;
140 /* vn_nary_op hashtable helpers. */
142 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
144 typedef vn_nary_op_s
*compare_type
;
145 static inline hashval_t
hash (const vn_nary_op_s
*);
146 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
149 /* Return the computed hashcode for nary operation P1. */
152 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
154 return vno1
->hashcode
;
157 /* Compare nary operations P1 and P2 and return true if they are
161 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
163 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
166 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
167 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
170 /* vn_phi hashtable helpers. */
173 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
175 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
177 static inline hashval_t
hash (const vn_phi_s
*);
178 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
181 /* Return the computed hashcode for phi operation P1. */
184 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
186 return vp1
->hashcode
;
189 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
192 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
194 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
197 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
198 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
201 /* Compare two reference operands P1 and P2 for equality. Return true if
202 they are equal, and false otherwise. */
205 vn_reference_op_eq (const void *p1
, const void *p2
)
207 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
208 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
210 return (vro1
->opcode
== vro2
->opcode
211 /* We do not care for differences in type qualification. */
212 && (vro1
->type
== vro2
->type
213 || (vro1
->type
&& vro2
->type
214 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
215 TYPE_MAIN_VARIANT (vro2
->type
))))
216 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
217 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
218 && expressions_equal_p (vro1
->op2
, vro2
->op2
)
219 && (vro1
->opcode
!= CALL_EXPR
|| vro1
->clique
== vro2
->clique
));
222 /* Free a reference operation structure VP. */
225 free_reference (vn_reference_s
*vr
)
227 vr
->operands
.release ();
231 /* vn_reference hashtable helpers. */
233 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
235 static inline hashval_t
hash (const vn_reference_s
*);
236 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
239 /* Return the hashcode for a given reference operation P1. */
242 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
244 return vr1
->hashcode
;
248 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
250 return v
== c
|| vn_reference_eq (v
, c
);
253 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
254 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
256 /* Pretty-print OPS to OUTFILE. */
259 print_vn_reference_ops (FILE *outfile
, const vec
<vn_reference_op_s
> ops
)
261 vn_reference_op_t vro
;
263 fprintf (outfile
, "{");
264 for (i
= 0; ops
.iterate (i
, &vro
); i
++)
266 bool closebrace
= false;
267 if (vro
->opcode
!= SSA_NAME
268 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
270 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
271 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
273 fprintf (outfile
, "<");
277 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
280 fprintf (outfile
, internal_fn_name ((internal_fn
)vro
->clique
));
282 print_generic_expr (outfile
, vro
->op0
);
285 fprintf (outfile
, ",");
286 print_generic_expr (outfile
, vro
->op1
);
290 fprintf (outfile
, ",");
291 print_generic_expr (outfile
, vro
->op2
);
295 fprintf (outfile
, ">");
296 if (i
!= ops
.length () - 1)
297 fprintf (outfile
, ",");
299 fprintf (outfile
, "}");
303 debug_vn_reference_ops (const vec
<vn_reference_op_s
> ops
)
305 print_vn_reference_ops (stderr
, ops
);
306 fputc ('\n', stderr
);
309 /* The set of VN hashtables. */
311 typedef struct vn_tables_s
313 vn_nary_op_table_type
*nary
;
314 vn_phi_table_type
*phis
;
315 vn_reference_table_type
*references
;
319 /* vn_constant hashtable helpers. */
321 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
323 static inline hashval_t
hash (const vn_constant_s
*);
324 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
327 /* Hash table hash function for vn_constant_t. */
330 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
332 return vc1
->hashcode
;
335 /* Hash table equality function for vn_constant_t. */
338 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
340 if (vc1
->hashcode
!= vc2
->hashcode
)
343 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
346 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
349 /* Obstack we allocate the vn-tables elements from. */
350 static obstack vn_tables_obstack
;
351 /* Special obstack we never unwind. */
352 static obstack vn_tables_insert_obstack
;
354 static vn_reference_t last_inserted_ref
;
355 static vn_phi_t last_inserted_phi
;
356 static vn_nary_op_t last_inserted_nary
;
357 static vn_ssa_aux_t last_pushed_avail
;
359 /* Valid hashtables storing information we have proven to be
361 static vn_tables_t valid_info
;
364 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
365 an SSA name, otherwise just return it. */
366 tree (*vn_valueize
) (tree
);
368 vn_valueize_for_srt (tree t
, void* context ATTRIBUTE_UNUSED
)
370 basic_block saved_vn_context_bb
= vn_context_bb
;
371 /* Look for sth available at the definition block of the argument.
372 This avoids inconsistencies between availability there which
373 decides if the stmt can be removed and availability at the
374 use site. The SSA property ensures that things available
375 at the definition are also available at uses. */
376 if (!SSA_NAME_IS_DEFAULT_DEF (t
))
377 vn_context_bb
= gimple_bb (SSA_NAME_DEF_STMT (t
));
378 tree res
= vn_valueize (t
);
379 vn_context_bb
= saved_vn_context_bb
;
384 /* This represents the top of the VN lattice, which is the universal
389 /* Unique counter for our value ids. */
391 static unsigned int next_value_id
;
392 static int next_constant_value_id
;
395 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
396 are allocated on an obstack for locality reasons, and to free them
397 without looping over the vec. */
399 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
401 typedef vn_ssa_aux_t value_type
;
402 typedef tree compare_type
;
403 static inline hashval_t
hash (const value_type
&);
404 static inline bool equal (const value_type
&, const compare_type
&);
405 static inline void mark_deleted (value_type
&) {}
406 static const bool empty_zero_p
= true;
407 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
408 static inline bool is_deleted (value_type
&) { return false; }
409 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
413 vn_ssa_aux_hasher::hash (const value_type
&entry
)
415 return SSA_NAME_VERSION (entry
->name
);
419 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
421 return name
== entry
->name
;
424 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
425 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
426 static struct obstack vn_ssa_aux_obstack
;
428 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
429 static unsigned int vn_nary_length_from_stmt (gimple
*);
430 static vn_nary_op_t
alloc_vn_nary_op_noinit (unsigned int, obstack
*);
431 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
432 vn_nary_op_table_type
*);
433 static void init_vn_nary_op_from_stmt (vn_nary_op_t
, gassign
*);
434 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
435 enum tree_code
, tree
, tree
*);
436 static tree
vn_lookup_simplify_result (gimple_match_op
*);
437 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
438 (tree
, alias_set_type
, alias_set_type
, tree
,
439 vec
<vn_reference_op_s
, va_heap
>, tree
);
441 /* Return whether there is value numbering information for a given SSA name. */
444 has_VN_INFO (tree name
)
446 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
453 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
458 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
459 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
460 newinfo
->name
= name
;
461 newinfo
->valnum
= VN_TOP
;
462 /* We are using the visited flag to handle uses with defs not within the
463 region being value-numbered. */
464 newinfo
->visited
= false;
466 /* Given we create the VN_INFOs on-demand now we have to do initialization
467 different than VN_TOP here. */
468 if (SSA_NAME_IS_DEFAULT_DEF (name
))
469 switch (TREE_CODE (SSA_NAME_VAR (name
)))
472 /* All undefined vars are VARYING. */
473 newinfo
->valnum
= name
;
474 newinfo
->visited
= true;
478 /* Parameters are VARYING but we can record a condition
479 if we know it is a non-NULL pointer. */
480 newinfo
->visited
= true;
481 newinfo
->valnum
= name
;
482 if (POINTER_TYPE_P (TREE_TYPE (name
))
483 && nonnull_arg_p (SSA_NAME_VAR (name
)))
487 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
489 /* Allocate from non-unwinding stack. */
490 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
491 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
492 boolean_type_node
, ops
);
493 nary
->predicated_values
= 0;
494 nary
->u
.result
= boolean_true_node
;
495 vn_nary_op_insert_into (nary
, valid_info
->nary
);
496 gcc_assert (nary
->unwind_to
== NULL
);
497 /* Also do not link it into the undo chain. */
498 last_inserted_nary
= nary
->next
;
499 nary
->next
= (vn_nary_op_t
)(void *)-1;
500 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
501 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
502 boolean_type_node
, ops
);
503 nary
->predicated_values
= 0;
504 nary
->u
.result
= boolean_false_node
;
505 vn_nary_op_insert_into (nary
, valid_info
->nary
);
506 gcc_assert (nary
->unwind_to
== NULL
);
507 last_inserted_nary
= nary
->next
;
508 nary
->next
= (vn_nary_op_t
)(void *)-1;
509 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
511 fprintf (dump_file
, "Recording ");
512 print_generic_expr (dump_file
, name
, TDF_SLIM
);
513 fprintf (dump_file
, " != 0\n");
519 /* If the result is passed by invisible reference the default
520 def is initialized, otherwise it's uninitialized. Still
521 undefined is varying. */
522 newinfo
->visited
= true;
523 newinfo
->valnum
= name
;
532 /* Return the SSA value of X. */
535 SSA_VAL (tree x
, bool *visited
= NULL
)
537 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
539 *visited
= tem
&& tem
->visited
;
540 return tem
&& tem
->visited
? tem
->valnum
: x
;
543 /* Return the SSA value of the VUSE x, supporting released VDEFs
544 during elimination which will value-number the VDEF to the
545 associated VUSE (but not substitute in the whole lattice). */
548 vuse_ssa_val (tree x
)
556 gcc_assert (x
!= VN_TOP
);
558 while (SSA_NAME_IN_FREE_LIST (x
));
563 /* Similar to the above but used as callback for walk_non_aliased_vuses
564 and thus should stop at unvisited VUSE to not walk across region
568 vuse_valueize (tree vuse
)
573 vuse
= SSA_VAL (vuse
, &visited
);
576 gcc_assert (vuse
!= VN_TOP
);
578 while (SSA_NAME_IN_FREE_LIST (vuse
));
583 /* Return the vn_kind the expression computed by the stmt should be
587 vn_get_stmt_kind (gimple
*stmt
)
589 switch (gimple_code (stmt
))
597 enum tree_code code
= gimple_assign_rhs_code (stmt
);
598 tree rhs1
= gimple_assign_rhs1 (stmt
);
599 switch (get_gimple_rhs_class (code
))
601 case GIMPLE_UNARY_RHS
:
602 case GIMPLE_BINARY_RHS
:
603 case GIMPLE_TERNARY_RHS
:
605 case GIMPLE_SINGLE_RHS
:
606 switch (TREE_CODE_CLASS (code
))
609 /* VOP-less references can go through unary case. */
610 if ((code
== REALPART_EXPR
611 || code
== IMAGPART_EXPR
612 || code
== VIEW_CONVERT_EXPR
613 || code
== BIT_FIELD_REF
)
614 && (TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
615 || is_gimple_min_invariant (TREE_OPERAND (rhs1
, 0))))
619 case tcc_declaration
:
626 if (code
== ADDR_EXPR
)
627 return (is_gimple_min_invariant (rhs1
)
628 ? VN_CONSTANT
: VN_REFERENCE
);
629 else if (code
== CONSTRUCTOR
)
642 /* Lookup a value id for CONSTANT and return it. If it does not
646 get_constant_value_id (tree constant
)
648 vn_constant_s
**slot
;
649 struct vn_constant_s vc
;
651 vc
.hashcode
= vn_hash_constant_with_type (constant
);
652 vc
.constant
= constant
;
653 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
655 return (*slot
)->value_id
;
659 /* Lookup a value id for CONSTANT, and if it does not exist, create a
660 new one and return it. If it does exist, return it. */
663 get_or_alloc_constant_value_id (tree constant
)
665 vn_constant_s
**slot
;
666 struct vn_constant_s vc
;
669 /* If the hashtable isn't initialized we're not running from PRE and thus
670 do not need value-ids. */
671 if (!constant_to_value_id
)
674 vc
.hashcode
= vn_hash_constant_with_type (constant
);
675 vc
.constant
= constant
;
676 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
678 return (*slot
)->value_id
;
680 vcp
= XNEW (struct vn_constant_s
);
681 vcp
->hashcode
= vc
.hashcode
;
682 vcp
->constant
= constant
;
683 vcp
->value_id
= get_next_constant_value_id ();
685 return vcp
->value_id
;
688 /* Compute the hash for a reference operand VRO1. */
691 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
693 hstate
.add_int (vro1
->opcode
);
694 if (vro1
->opcode
== CALL_EXPR
&& !vro1
->op0
)
695 hstate
.add_int (vro1
->clique
);
697 inchash::add_expr (vro1
->op0
, hstate
);
699 inchash::add_expr (vro1
->op1
, hstate
);
701 inchash::add_expr (vro1
->op2
, hstate
);
704 /* Compute a hash for the reference operation VR1 and return it. */
707 vn_reference_compute_hash (const vn_reference_t vr1
)
709 inchash::hash hstate
;
712 vn_reference_op_t vro
;
716 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
718 if (vro
->opcode
== MEM_REF
)
720 else if (vro
->opcode
!= ADDR_EXPR
)
722 if (maybe_ne (vro
->off
, -1))
724 if (known_eq (off
, -1))
730 if (maybe_ne (off
, -1)
731 && maybe_ne (off
, 0))
732 hstate
.add_poly_int (off
);
735 && vro
->opcode
== ADDR_EXPR
)
739 tree op
= TREE_OPERAND (vro
->op0
, 0);
740 hstate
.add_int (TREE_CODE (op
));
741 inchash::add_expr (op
, hstate
);
745 vn_reference_op_compute_hash (vro
, hstate
);
748 result
= hstate
.end ();
749 /* ??? We would ICE later if we hash instead of adding that in. */
751 result
+= SSA_NAME_VERSION (vr1
->vuse
);
756 /* Return true if reference operations VR1 and VR2 are equivalent. This
757 means they have the same set of operands and vuses. */
760 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
764 /* Early out if this is not a hash collision. */
765 if (vr1
->hashcode
!= vr2
->hashcode
)
768 /* The VOP needs to be the same. */
769 if (vr1
->vuse
!= vr2
->vuse
)
772 /* If the operands are the same we are done. */
773 if (vr1
->operands
== vr2
->operands
)
776 if (!vr1
->type
|| !vr2
->type
)
778 if (vr1
->type
!= vr2
->type
)
781 else if (vr1
->type
== vr2
->type
)
783 else if (COMPLETE_TYPE_P (vr1
->type
) != COMPLETE_TYPE_P (vr2
->type
)
784 || (COMPLETE_TYPE_P (vr1
->type
)
785 && !expressions_equal_p (TYPE_SIZE (vr1
->type
),
786 TYPE_SIZE (vr2
->type
))))
788 else if (vr1
->operands
[0].opcode
== CALL_EXPR
789 && !types_compatible_p (vr1
->type
, vr2
->type
))
791 else if (INTEGRAL_TYPE_P (vr1
->type
)
792 && INTEGRAL_TYPE_P (vr2
->type
))
794 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
797 else if (INTEGRAL_TYPE_P (vr1
->type
)
798 && (TYPE_PRECISION (vr1
->type
)
799 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
801 else if (INTEGRAL_TYPE_P (vr2
->type
)
802 && (TYPE_PRECISION (vr2
->type
)
803 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
810 poly_int64 off1
= 0, off2
= 0;
811 vn_reference_op_t vro1
, vro2
;
812 vn_reference_op_s tem1
, tem2
;
813 bool deref1
= false, deref2
= false;
814 bool reverse1
= false, reverse2
= false;
815 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
817 if (vro1
->opcode
== MEM_REF
)
819 /* Do not look through a storage order barrier. */
820 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
822 reverse1
|= vro1
->reverse
;
823 if (known_eq (vro1
->off
, -1))
827 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
829 if (vro2
->opcode
== MEM_REF
)
831 /* Do not look through a storage order barrier. */
832 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
834 reverse2
|= vro2
->reverse
;
835 if (known_eq (vro2
->off
, -1))
839 if (maybe_ne (off1
, off2
) || reverse1
!= reverse2
)
841 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
843 memset (&tem1
, 0, sizeof (tem1
));
844 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
845 tem1
.type
= TREE_TYPE (tem1
.op0
);
846 tem1
.opcode
= TREE_CODE (tem1
.op0
);
850 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
852 memset (&tem2
, 0, sizeof (tem2
));
853 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
854 tem2
.type
= TREE_TYPE (tem2
.op0
);
855 tem2
.opcode
= TREE_CODE (tem2
.op0
);
859 if (deref1
!= deref2
)
861 if (!vn_reference_op_eq (vro1
, vro2
))
866 while (vr1
->operands
.length () != i
867 || vr2
->operands
.length () != j
);
872 /* Copy the operations present in load/store REF into RESULT, a vector of
873 vn_reference_op_s's. */
876 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
878 /* For non-calls, store the information that makes up the address. */
882 vn_reference_op_s temp
;
884 memset (&temp
, 0, sizeof (temp
));
885 temp
.type
= TREE_TYPE (ref
);
886 temp
.opcode
= TREE_CODE (ref
);
892 temp
.op0
= TREE_OPERAND (ref
, 1);
895 temp
.op0
= TREE_OPERAND (ref
, 1);
899 /* The base address gets its own vn_reference_op_s structure. */
900 temp
.op0
= TREE_OPERAND (ref
, 1);
901 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
903 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
904 temp
.base
= MR_DEPENDENCE_BASE (ref
);
905 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
908 /* The base address gets its own vn_reference_op_s structure. */
909 temp
.op0
= TMR_INDEX (ref
);
910 temp
.op1
= TMR_STEP (ref
);
911 temp
.op2
= TMR_OFFSET (ref
);
912 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
913 temp
.base
= MR_DEPENDENCE_BASE (ref
);
914 result
->safe_push (temp
);
915 memset (&temp
, 0, sizeof (temp
));
916 temp
.type
= NULL_TREE
;
917 temp
.opcode
= ERROR_MARK
;
918 temp
.op0
= TMR_INDEX2 (ref
);
922 /* Record bits, position and storage order. */
923 temp
.op0
= TREE_OPERAND (ref
, 1);
924 temp
.op1
= TREE_OPERAND (ref
, 2);
925 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
927 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
930 /* The field decl is enough to unambiguously specify the field,
931 so use its type here. */
932 temp
.type
= TREE_TYPE (TREE_OPERAND (ref
, 1));
933 temp
.op0
= TREE_OPERAND (ref
, 1);
934 temp
.op1
= TREE_OPERAND (ref
, 2);
935 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
936 && TYPE_REVERSE_STORAGE_ORDER
937 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
939 tree this_offset
= component_ref_field_offset (ref
);
941 && poly_int_tree_p (this_offset
))
943 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
944 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
947 = (wi::to_poly_offset (this_offset
)
948 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
949 /* Probibit value-numbering zero offset components
950 of addresses the same before the pass folding
951 __builtin_object_size had a chance to run. */
952 if (TREE_CODE (orig
) != ADDR_EXPR
954 || (cfun
->curr_properties
& PROP_objsz
))
955 off
.to_shwi (&temp
.off
);
960 case ARRAY_RANGE_REF
:
963 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
964 /* Record index as operand. */
965 temp
.op0
= TREE_OPERAND (ref
, 1);
966 /* Always record lower bounds and element size. */
967 temp
.op1
= array_ref_low_bound (ref
);
968 /* But record element size in units of the type alignment. */
969 temp
.op2
= TREE_OPERAND (ref
, 3);
970 temp
.align
= eltype
->type_common
.align
;
972 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
973 size_int (TYPE_ALIGN_UNIT (eltype
)));
974 if (poly_int_tree_p (temp
.op0
)
975 && poly_int_tree_p (temp
.op1
)
976 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
978 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
979 - wi::to_poly_offset (temp
.op1
))
980 * wi::to_offset (temp
.op2
)
981 * vn_ref_op_align_unit (&temp
));
982 off
.to_shwi (&temp
.off
);
984 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
985 && TYPE_REVERSE_STORAGE_ORDER
986 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
990 if (DECL_HARD_REGISTER (ref
))
999 /* Canonicalize decls to MEM[&decl] which is what we end up with
1000 when valueizing MEM[ptr] with ptr = &decl. */
1001 temp
.opcode
= MEM_REF
;
1002 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
1004 result
->safe_push (temp
);
1005 temp
.opcode
= ADDR_EXPR
;
1006 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
1007 temp
.type
= TREE_TYPE (temp
.op0
);
1022 if (is_gimple_min_invariant (ref
))
1028 /* These are only interesting for their operands, their
1029 existence, and their type. They will never be the last
1030 ref in the chain of references (IE they require an
1031 operand), so we don't have to put anything
1032 for op* as it will be handled by the iteration */
1036 case VIEW_CONVERT_EXPR
:
1038 temp
.reverse
= storage_order_barrier_p (ref
);
1041 /* This is only interesting for its constant offset. */
1042 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
1047 result
->safe_push (temp
);
1049 if (REFERENCE_CLASS_P (ref
)
1050 || TREE_CODE (ref
) == MODIFY_EXPR
1051 || TREE_CODE (ref
) == WITH_SIZE_EXPR
1052 || (TREE_CODE (ref
) == ADDR_EXPR
1053 && !is_gimple_min_invariant (ref
)))
1054 ref
= TREE_OPERAND (ref
, 0);
1060 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1061 operands in *OPS, the reference alias set SET and the reference type TYPE.
1062 Return true if something useful was produced. */
1065 ao_ref_init_from_vn_reference (ao_ref
*ref
,
1066 alias_set_type set
, alias_set_type base_set
,
1067 tree type
, const vec
<vn_reference_op_s
> &ops
)
1070 tree base
= NULL_TREE
;
1071 tree
*op0_p
= &base
;
1072 poly_offset_int offset
= 0;
1073 poly_offset_int max_size
;
1074 poly_offset_int size
= -1;
1075 tree size_tree
= NULL_TREE
;
1077 /* We don't handle calls. */
1081 machine_mode mode
= TYPE_MODE (type
);
1082 if (mode
== BLKmode
)
1083 size_tree
= TYPE_SIZE (type
);
1085 size
= GET_MODE_BITSIZE (mode
);
1086 if (size_tree
!= NULL_TREE
1087 && poly_int_tree_p (size_tree
))
1088 size
= wi::to_poly_offset (size_tree
);
1090 /* Lower the final access size from the outermost expression. */
1091 const_vn_reference_op_t cst_op
= &ops
[0];
1092 /* Cast away constness for the sake of the const-unsafe
1093 FOR_EACH_VEC_ELT(). */
1094 vn_reference_op_t op
= const_cast<vn_reference_op_t
>(cst_op
);
1095 size_tree
= NULL_TREE
;
1096 if (op
->opcode
== COMPONENT_REF
)
1097 size_tree
= DECL_SIZE (op
->op0
);
1098 else if (op
->opcode
== BIT_FIELD_REF
)
1099 size_tree
= op
->op0
;
1100 if (size_tree
!= NULL_TREE
1101 && poly_int_tree_p (size_tree
)
1102 && (!known_size_p (size
)
1103 || known_lt (wi::to_poly_offset (size_tree
), size
)))
1104 size
= wi::to_poly_offset (size_tree
);
1106 /* Initially, maxsize is the same as the accessed element size.
1107 In the following it will only grow (or become -1). */
1110 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1111 and find the ultimate containing object. */
1112 FOR_EACH_VEC_ELT (ops
, i
, op
)
1116 /* These may be in the reference ops, but we cannot do anything
1117 sensible with them here. */
1119 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1120 if (base
!= NULL_TREE
1121 && TREE_CODE (base
) == MEM_REF
1123 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1125 const_vn_reference_op_t pop
= &ops
[i
-1];
1126 base
= TREE_OPERAND (op
->op0
, 0);
1127 if (known_eq (pop
->off
, -1))
1133 offset
+= pop
->off
* BITS_PER_UNIT
;
1141 /* Record the base objects. */
1143 *op0_p
= build2 (MEM_REF
, op
->type
,
1144 NULL_TREE
, op
->op0
);
1145 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1146 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1147 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1158 /* And now the usual component-reference style ops. */
1160 offset
+= wi::to_poly_offset (op
->op1
);
1165 tree field
= op
->op0
;
1166 /* We do not have a complete COMPONENT_REF tree here so we
1167 cannot use component_ref_field_offset. Do the interesting
1169 tree this_offset
= DECL_FIELD_OFFSET (field
);
1171 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1175 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1176 << LOG2_BITS_PER_UNIT
);
1177 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1183 case ARRAY_RANGE_REF
:
1185 /* We recorded the lower bound and the element size. */
1186 if (!poly_int_tree_p (op
->op0
)
1187 || !poly_int_tree_p (op
->op1
)
1188 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1192 poly_offset_int woffset
1193 = wi::sext (wi::to_poly_offset (op
->op0
)
1194 - wi::to_poly_offset (op
->op1
),
1195 TYPE_PRECISION (sizetype
));
1196 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1197 woffset
<<= LOG2_BITS_PER_UNIT
;
1209 case VIEW_CONVERT_EXPR
:
1226 if (base
== NULL_TREE
)
1229 ref
->ref
= NULL_TREE
;
1231 ref
->ref_alias_set
= set
;
1232 ref
->base_alias_set
= base_set
;
1233 /* We discount volatiles from value-numbering elsewhere. */
1234 ref
->volatile_p
= false;
1236 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1244 if (!offset
.to_shwi (&ref
->offset
))
1251 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1257 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1258 vn_reference_op_s's. */
1261 copy_reference_ops_from_call (gcall
*call
,
1262 vec
<vn_reference_op_s
> *result
)
1264 vn_reference_op_s temp
;
1266 tree lhs
= gimple_call_lhs (call
);
1269 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1270 different. By adding the lhs here in the vector, we ensure that the
1271 hashcode is different, guaranteeing a different value number. */
1272 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1274 memset (&temp
, 0, sizeof (temp
));
1275 temp
.opcode
= MODIFY_EXPR
;
1276 temp
.type
= TREE_TYPE (lhs
);
1279 result
->safe_push (temp
);
1282 /* Copy the type, opcode, function, static chain and EH region, if any. */
1283 memset (&temp
, 0, sizeof (temp
));
1284 temp
.type
= gimple_call_fntype (call
);
1285 temp
.opcode
= CALL_EXPR
;
1286 temp
.op0
= gimple_call_fn (call
);
1287 if (gimple_call_internal_p (call
))
1288 temp
.clique
= gimple_call_internal_fn (call
);
1289 temp
.op1
= gimple_call_chain (call
);
1290 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1291 temp
.op2
= size_int (lr
);
1293 result
->safe_push (temp
);
1295 /* Copy the call arguments. As they can be references as well,
1296 just chain them together. */
1297 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1299 tree callarg
= gimple_call_arg (call
, i
);
1300 copy_reference_ops_from_ref (callarg
, result
);
1304 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1305 *I_P to point to the last element of the replacement. */
1307 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1310 unsigned int i
= *i_p
;
1311 vn_reference_op_t op
= &(*ops
)[i
];
1312 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1314 poly_int64 addr_offset
= 0;
1316 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1317 from .foo.bar to the preceding MEM_REF offset and replace the
1318 address with &OBJ. */
1319 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (op
->op0
, 0),
1320 &addr_offset
, vn_valueize
);
1321 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1322 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1325 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1328 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1329 op
->op0
= build_fold_addr_expr (addr_base
);
1330 if (tree_fits_shwi_p (mem_op
->op0
))
1331 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1339 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1340 *I_P to point to the last element of the replacement. */
1342 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1345 bool changed
= false;
1346 vn_reference_op_t op
;
1350 unsigned int i
= *i_p
;
1352 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1354 enum tree_code code
;
1355 poly_offset_int off
;
1357 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1358 if (!is_gimple_assign (def_stmt
))
1361 code
= gimple_assign_rhs_code (def_stmt
);
1362 if (code
!= ADDR_EXPR
1363 && code
!= POINTER_PLUS_EXPR
)
1366 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1368 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1369 from .foo.bar to the preceding MEM_REF offset and replace the
1370 address with &OBJ. */
1371 if (code
== ADDR_EXPR
)
1373 tree addr
, addr_base
;
1374 poly_int64 addr_offset
;
1376 addr
= gimple_assign_rhs1 (def_stmt
);
1377 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr
, 0),
1380 /* If that didn't work because the address isn't invariant propagate
1381 the reference tree from the address operation in case the current
1382 dereference isn't offsetted. */
1384 && *i_p
== ops
->length () - 1
1385 && known_eq (off
, 0)
1386 /* This makes us disable this transform for PRE where the
1387 reference ops might be also used for code insertion which
1389 && default_vn_walk_kind
== VN_WALKREWRITE
)
1391 auto_vec
<vn_reference_op_s
, 32> tem
;
1392 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1393 /* Make sure to preserve TBAA info. The only objects not
1394 wrapped in MEM_REFs that can have their address taken are
1396 if (tem
.length () >= 2
1397 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1399 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1401 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1402 wi::to_poly_wide (new_mem_op
->op0
));
1405 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1408 ops
->safe_splice (tem
);
1413 || TREE_CODE (addr_base
) != MEM_REF
1414 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1415 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1420 off
+= mem_ref_offset (addr_base
);
1421 op
->op0
= TREE_OPERAND (addr_base
, 0);
1426 ptr
= gimple_assign_rhs1 (def_stmt
);
1427 ptroff
= gimple_assign_rhs2 (def_stmt
);
1428 if (TREE_CODE (ptr
) != SSA_NAME
1429 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1430 /* Make sure to not endlessly recurse.
1431 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1432 happen when we value-number a PHI to its backedge value. */
1433 || SSA_VAL (ptr
) == op
->op0
1434 || !poly_int_tree_p (ptroff
))
1437 off
+= wi::to_poly_offset (ptroff
);
1441 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1442 if (tree_fits_shwi_p (mem_op
->op0
))
1443 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1446 /* ??? Can end up with endless recursion here!?
1447 gcc.c-torture/execute/strcmp-1.c */
1448 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1449 op
->op0
= SSA_VAL (op
->op0
);
1450 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1451 op
->opcode
= TREE_CODE (op
->op0
);
1456 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1458 /* Fold a remaining *&. */
1459 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1460 vn_reference_fold_indirect (ops
, i_p
);
1465 /* Optimize the reference REF to a constant if possible or return
1466 NULL_TREE if not. */
1469 fully_constant_vn_reference_p (vn_reference_t ref
)
1471 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1472 vn_reference_op_t op
;
1474 /* Try to simplify the translated expression if it is
1475 a call to a builtin function with at most two arguments. */
1477 if (op
->opcode
== CALL_EXPR
1479 || (TREE_CODE (op
->op0
) == ADDR_EXPR
1480 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1481 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0),
1483 && operands
.length () >= 2
1484 && operands
.length () <= 3)
1486 vn_reference_op_t arg0
, arg1
= NULL
;
1487 bool anyconst
= false;
1488 arg0
= &operands
[1];
1489 if (operands
.length () > 2)
1490 arg1
= &operands
[2];
1491 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1492 || (arg0
->opcode
== ADDR_EXPR
1493 && is_gimple_min_invariant (arg0
->op0
)))
1496 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1497 || (arg1
->opcode
== ADDR_EXPR
1498 && is_gimple_min_invariant (arg1
->op0
))))
1504 fn
= as_combined_fn (DECL_FUNCTION_CODE
1505 (TREE_OPERAND (op
->op0
, 0)));
1507 fn
= as_combined_fn ((internal_fn
) op
->clique
);
1510 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
, arg1
->op0
);
1512 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
);
1514 && is_gimple_min_invariant (folded
))
1519 /* Simplify reads from constants or constant initializers. */
1520 else if (BITS_PER_UNIT
== 8
1522 && COMPLETE_TYPE_P (ref
->type
)
1523 && is_gimple_reg_type (ref
->type
))
1527 if (INTEGRAL_TYPE_P (ref
->type
))
1528 size
= TYPE_PRECISION (ref
->type
);
1529 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1530 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1533 if (size
% BITS_PER_UNIT
!= 0
1534 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1536 size
/= BITS_PER_UNIT
;
1538 for (i
= 0; i
< operands
.length (); ++i
)
1540 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1545 if (known_eq (operands
[i
].off
, -1))
1547 off
+= operands
[i
].off
;
1548 if (operands
[i
].opcode
== MEM_REF
)
1554 vn_reference_op_t base
= &operands
[--i
];
1555 tree ctor
= error_mark_node
;
1556 tree decl
= NULL_TREE
;
1557 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1559 else if (base
->opcode
== MEM_REF
1560 && base
[1].opcode
== ADDR_EXPR
1561 && (TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == VAR_DECL
1562 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1563 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1565 decl
= TREE_OPERAND (base
[1].op0
, 0);
1566 if (TREE_CODE (decl
) == STRING_CST
)
1569 ctor
= ctor_for_folding (decl
);
1571 if (ctor
== NULL_TREE
)
1572 return build_zero_cst (ref
->type
);
1573 else if (ctor
!= error_mark_node
)
1575 HOST_WIDE_INT const_off
;
1578 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1579 off
* BITS_PER_UNIT
,
1580 size
* BITS_PER_UNIT
, decl
);
1583 STRIP_USELESS_TYPE_CONVERSION (res
);
1584 if (is_gimple_min_invariant (res
))
1588 else if (off
.is_constant (&const_off
))
1590 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1591 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1593 return native_interpret_expr (ref
->type
, buf
, len
);
1601 /* Return true if OPS contain a storage order barrier. */
1604 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1606 vn_reference_op_t op
;
1609 FOR_EACH_VEC_ELT (ops
, i
, op
)
1610 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1616 /* Return true if OPS represent an access with reverse storage order. */
1619 reverse_storage_order_for_component_p (vec
<vn_reference_op_s
> ops
)
1622 if (ops
[i
].opcode
== REALPART_EXPR
|| ops
[i
].opcode
== IMAGPART_EXPR
)
1624 switch (ops
[i
].opcode
)
1630 return ops
[i
].reverse
;
1636 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1637 structures into their value numbers. This is done in-place, and
1638 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1639 whether any operands were valueized. */
1642 valueize_refs_1 (vec
<vn_reference_op_s
> *orig
, bool *valueized_anything
,
1643 bool with_avail
= false)
1645 *valueized_anything
= false;
1647 for (unsigned i
= 0; i
< orig
->length (); ++i
)
1650 vn_reference_op_t vro
= &(*orig
)[i
];
1651 if (vro
->opcode
== SSA_NAME
1652 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1654 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1655 if (tem
!= vro
->op0
)
1657 *valueized_anything
= true;
1660 /* If it transforms from an SSA_NAME to a constant, update
1662 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1663 vro
->opcode
= TREE_CODE (vro
->op0
);
1665 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1667 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1668 if (tem
!= vro
->op1
)
1670 *valueized_anything
= true;
1674 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1676 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1677 if (tem
!= vro
->op2
)
1679 *valueized_anything
= true;
1683 /* If it transforms from an SSA_NAME to an address, fold with
1684 a preceding indirect reference. */
1687 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1688 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1690 if (vn_reference_fold_indirect (orig
, &i
))
1691 *valueized_anything
= true;
1694 && vro
->opcode
== SSA_NAME
1695 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1697 if (vn_reference_maybe_forwprop_address (orig
, &i
))
1699 *valueized_anything
= true;
1700 /* Re-valueize the current operand. */
1704 /* If it transforms a non-constant ARRAY_REF into a constant
1705 one, adjust the constant offset. */
1706 else if (vro
->opcode
== ARRAY_REF
1707 && known_eq (vro
->off
, -1)
1708 && poly_int_tree_p (vro
->op0
)
1709 && poly_int_tree_p (vro
->op1
)
1710 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1712 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1713 - wi::to_poly_offset (vro
->op1
))
1714 * wi::to_offset (vro
->op2
)
1715 * vn_ref_op_align_unit (vro
));
1716 off
.to_shwi (&vro
->off
);
1722 valueize_refs (vec
<vn_reference_op_s
> *orig
)
1725 valueize_refs_1 (orig
, &tem
);
1728 static vec
<vn_reference_op_s
> shared_lookup_references
;
1730 /* Create a vector of vn_reference_op_s structures from REF, a
1731 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1732 this function. *VALUEIZED_ANYTHING will specify whether any
1733 operands were valueized. */
1735 static vec
<vn_reference_op_s
>
1736 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1740 shared_lookup_references
.truncate (0);
1741 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1742 valueize_refs_1 (&shared_lookup_references
, valueized_anything
);
1743 return shared_lookup_references
;
1746 /* Create a vector of vn_reference_op_s structures from CALL, a
1747 call statement. The vector is shared among all callers of
1750 static vec
<vn_reference_op_s
>
1751 valueize_shared_reference_ops_from_call (gcall
*call
)
1755 shared_lookup_references
.truncate (0);
1756 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1757 valueize_refs (&shared_lookup_references
);
1758 return shared_lookup_references
;
1761 /* Lookup a SCCVN reference operation VR in the current hash table.
1762 Returns the resulting value number if it exists in the hash table,
1763 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1764 vn_reference_t stored in the hashtable if something is found. */
1767 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1769 vn_reference_s
**slot
;
1772 hash
= vr
->hashcode
;
1773 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1777 *vnresult
= (vn_reference_t
)*slot
;
1778 return ((vn_reference_t
)*slot
)->result
;
1785 /* Partial definition tracking support. */
1789 HOST_WIDE_INT offset
;
1796 HOST_WIDE_INT offset
;
1800 /* Context for alias walking. */
1802 struct vn_walk_cb_data
1804 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1805 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
, tree mask_
)
1806 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1807 mask (mask_
), masked_result (NULL_TREE
), vn_walk_kind (vn_walk_kind_
),
1808 tbaa_p (tbaa_p_
), saved_operands (vNULL
), first_set (-2),
1809 first_base_set (-2), known_ranges (NULL
)
1812 last_vuse_ptr
= &last_vuse
;
1813 ao_ref_init (&orig_ref
, orig_ref_
);
1816 wide_int w
= wi::to_wide (mask
);
1817 unsigned int pos
= 0, prec
= w
.get_precision ();
1819 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
1820 /* When bitwise and with a constant is done on a memory load,
1821 we don't really need all the bits to be defined or defined
1822 to constants, we don't really care what is in the position
1823 corresponding to 0 bits in the mask.
1824 So, push the ranges of those 0 bits in the mask as artificial
1825 zero stores and let the partial def handling code do the
1829 int tz
= wi::ctz (w
);
1830 if (pos
+ tz
> prec
)
1834 if (BYTES_BIG_ENDIAN
)
1835 pd
.offset
= prec
- pos
- tz
;
1839 void *r
= push_partial_def (pd
, 0, 0, 0, prec
);
1840 gcc_assert (r
== NULL_TREE
);
1845 w
= wi::lrshift (w
, tz
);
1846 tz
= wi::ctz (wi::bit_not (w
));
1847 if (pos
+ tz
> prec
)
1850 w
= wi::lrshift (w
, tz
);
1854 ~vn_walk_cb_data ();
1855 void *finish (alias_set_type
, alias_set_type
, tree
);
1856 void *push_partial_def (pd_data pd
,
1857 alias_set_type
, alias_set_type
, HOST_WIDE_INT
,
1862 tree
*last_vuse_ptr
;
1866 vn_lookup_kind vn_walk_kind
;
1868 vec
<vn_reference_op_s
> saved_operands
;
1870 /* The VDEFs of partial defs we come along. */
1871 auto_vec
<pd_data
, 2> partial_defs
;
1872 /* The first defs range to avoid splay tree setup in most cases. */
1873 pd_range first_range
;
1874 alias_set_type first_set
;
1875 alias_set_type first_base_set
;
1876 splay_tree known_ranges
;
1877 obstack ranges_obstack
;
1880 vn_walk_cb_data::~vn_walk_cb_data ()
1884 splay_tree_delete (known_ranges
);
1885 obstack_free (&ranges_obstack
, NULL
);
1887 saved_operands
.release ();
1891 vn_walk_cb_data::finish (alias_set_type set
, alias_set_type base_set
, tree val
)
1893 if (first_set
!= -2)
1896 base_set
= first_base_set
;
1900 masked_result
= val
;
1903 vec
<vn_reference_op_s
> &operands
1904 = saved_operands
.exists () ? saved_operands
: vr
->operands
;
1905 return vn_reference_lookup_or_insert_for_pieces (last_vuse
, set
, base_set
,
1906 vr
->type
, operands
, val
);
1909 /* pd_range splay-tree helpers. */
1912 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1914 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1915 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1916 if (offset1
< offset2
)
1918 else if (offset1
> offset2
)
1924 pd_tree_alloc (int size
, void *data_
)
1926 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1927 return obstack_alloc (&data
->ranges_obstack
, size
);
1931 pd_tree_dealloc (void *, void *)
1935 /* Push PD to the vector of partial definitions returning a
1936 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1937 NULL when we want to continue looking for partial defs or -1
1941 vn_walk_cb_data::push_partial_def (pd_data pd
,
1942 alias_set_type set
, alias_set_type base_set
,
1943 HOST_WIDE_INT offseti
,
1944 HOST_WIDE_INT maxsizei
)
1946 const HOST_WIDE_INT bufsize
= 64;
1947 /* We're using a fixed buffer for encoding so fail early if the object
1948 we want to interpret is bigger. */
1949 if (maxsizei
> bufsize
* BITS_PER_UNIT
1951 || BITS_PER_UNIT
!= 8
1952 /* Not prepared to handle PDP endian. */
1953 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1956 /* Turn too large constant stores into non-constant stores. */
1957 if (CONSTANT_CLASS_P (pd
.rhs
) && pd
.size
> bufsize
* BITS_PER_UNIT
)
1958 pd
.rhs
= error_mark_node
;
1960 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1961 most a partial byte before and/or after the region. */
1962 if (!CONSTANT_CLASS_P (pd
.rhs
))
1964 if (pd
.offset
< offseti
)
1966 HOST_WIDE_INT o
= ROUND_DOWN (offseti
- pd
.offset
, BITS_PER_UNIT
);
1967 gcc_assert (pd
.size
> o
);
1971 if (pd
.size
> maxsizei
)
1972 pd
.size
= maxsizei
+ ((pd
.size
- maxsizei
) % BITS_PER_UNIT
);
1975 pd
.offset
-= offseti
;
1977 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
1978 || CONSTANT_CLASS_P (pd
.rhs
));
1979 if (partial_defs
.is_empty ())
1981 /* If we get a clobber upfront, fail. */
1982 if (TREE_CLOBBER_P (pd
.rhs
))
1986 partial_defs
.safe_push (pd
);
1987 first_range
.offset
= pd
.offset
;
1988 first_range
.size
= pd
.size
;
1990 first_base_set
= base_set
;
1991 last_vuse_ptr
= NULL
;
1992 /* Continue looking for partial defs. */
1998 /* ??? Optimize the case where the 2nd partial def completes things. */
1999 gcc_obstack_init (&ranges_obstack
);
2000 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
2002 pd_tree_dealloc
, this);
2003 splay_tree_insert (known_ranges
,
2004 (splay_tree_key
)&first_range
.offset
,
2005 (splay_tree_value
)&first_range
);
2008 pd_range newr
= { pd
.offset
, pd
.size
};
2011 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2012 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
2013 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
2014 && ((r
= (pd_range
*)n
->value
), true)
2015 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2016 newr
.offset
, newr
.size
))
2018 /* Ignore partial defs already covered. Here we also drop shadowed
2019 clobbers arriving here at the floor. */
2020 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
2022 r
->size
= MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
2026 /* newr.offset wasn't covered yet, insert the range. */
2027 r
= XOBNEW (&ranges_obstack
, pd_range
);
2029 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
2030 (splay_tree_value
)r
);
2032 /* Merge r which now contains newr and is a member of the splay tree with
2033 adjacent overlapping ranges. */
2035 while ((n
= splay_tree_successor (known_ranges
, (splay_tree_key
)&r
->offset
))
2036 && ((rafter
= (pd_range
*)n
->value
), true)
2037 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2038 rafter
->offset
, rafter
->size
))
2040 r
->size
= MAX (r
->offset
+ r
->size
,
2041 rafter
->offset
+ rafter
->size
) - r
->offset
;
2042 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
2044 /* If we get a clobber, fail. */
2045 if (TREE_CLOBBER_P (pd
.rhs
))
2047 /* Non-constants are OK as long as they are shadowed by a constant. */
2050 partial_defs
.safe_push (pd
);
2052 /* Now we have merged newr into the range tree. When we have covered
2053 [offseti, sizei] then the tree will contain exactly one node which has
2054 the desired properties and it will be 'r'. */
2055 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
2056 /* Continue looking for partial defs. */
2059 /* Now simply native encode all partial defs in reverse order. */
2060 unsigned ndefs
= partial_defs
.length ();
2061 /* We support up to 512-bit values (for V8DFmode). */
2062 unsigned char buffer
[bufsize
+ 1];
2063 unsigned char this_buffer
[bufsize
+ 1];
2066 memset (buffer
, 0, bufsize
+ 1);
2067 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2068 while (!partial_defs
.is_empty ())
2070 pd_data pd
= partial_defs
.pop ();
2072 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
2074 /* Empty CONSTRUCTOR. */
2075 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
2078 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2079 memset (this_buffer
, 0, len
);
2083 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
2084 MAX (0, -pd
.offset
) / BITS_PER_UNIT
);
2086 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
2087 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
2089 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2090 fprintf (dump_file
, "Failed to encode %u "
2091 "partial definitions\n", ndefs
);
2096 unsigned char *p
= buffer
;
2097 HOST_WIDE_INT size
= pd
.size
;
2099 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
2100 this_buffer
[len
] = 0;
2101 if (BYTES_BIG_ENDIAN
)
2103 /* LSB of this_buffer[len - 1] byte should be at
2104 pd.offset + pd.size - 1 bits in buffer. */
2105 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
2106 + pd
.size
) % BITS_PER_UNIT
;
2108 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
2109 unsigned char *q
= this_buffer
;
2110 unsigned int off
= 0;
2114 off
= pd
.offset
/ BITS_PER_UNIT
;
2115 gcc_assert (off
< needed_len
);
2119 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
2120 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
2125 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2126 q
= (this_buffer
+ len
2127 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2129 if (pd
.offset
% BITS_PER_UNIT
)
2131 msk
= -1U << (BITS_PER_UNIT
2132 - (pd
.offset
% BITS_PER_UNIT
));
2133 *p
= (*p
& msk
) | (*q
& ~msk
);
2137 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
2138 gcc_assert (size
>= 0);
2142 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2144 q
= (this_buffer
+ len
2145 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2147 if (pd
.offset
% BITS_PER_UNIT
)
2150 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
2152 gcc_assert (size
>= 0);
2155 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
2157 size
= (needed_len
- off
) * BITS_PER_UNIT
;
2158 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
2159 if (size
% BITS_PER_UNIT
)
2162 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
2163 p
+= size
/ BITS_PER_UNIT
;
2164 q
+= size
/ BITS_PER_UNIT
;
2165 *p
= (*q
& msk
) | (*p
& ~msk
);
2172 /* LSB of this_buffer[0] byte should be at pd.offset bits
2175 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2176 amnt
= pd
.offset
% BITS_PER_UNIT
;
2178 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2179 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
2180 gcc_assert (off
< needed_len
);
2182 (HOST_WIDE_INT
) (needed_len
- off
) * BITS_PER_UNIT
);
2184 if (amnt
+ size
< BITS_PER_UNIT
)
2186 /* Low amnt bits come from *p, then size bits
2187 from this_buffer[0] and the remaining again from
2189 msk
= ((1 << size
) - 1) << amnt
;
2190 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2196 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2198 size
-= (BITS_PER_UNIT
- amnt
);
2203 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2205 size
-= BITS_PER_UNIT
- amnt
;
2206 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2208 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2210 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2211 p
+= size
/ BITS_PER_UNIT
;
2212 if (size
% BITS_PER_UNIT
)
2214 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2215 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2216 & ~msk
) | (*p
& msk
);
2221 tree type
= vr
->type
;
2222 /* Make sure to interpret in a type that has a range covering the whole
2224 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2225 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2227 if (BYTES_BIG_ENDIAN
)
2229 unsigned sz
= needed_len
;
2230 if (maxsizei
% BITS_PER_UNIT
)
2231 shift_bytes_in_array_right (buffer
, needed_len
,
2233 - (maxsizei
% BITS_PER_UNIT
));
2234 if (INTEGRAL_TYPE_P (type
))
2235 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2236 if (sz
> needed_len
)
2238 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2239 val
= native_interpret_expr (type
, this_buffer
, sz
);
2242 val
= native_interpret_expr (type
, buffer
, needed_len
);
2245 val
= native_interpret_expr (type
, buffer
, bufsize
);
2246 /* If we chop off bits because the types precision doesn't match the memory
2247 access size this is ok when optimizing reads but not when called from
2248 the DSE code during elimination. */
2249 if (val
&& type
!= vr
->type
)
2251 if (! int_fits_type_p (val
, vr
->type
))
2254 val
= fold_convert (vr
->type
, val
);
2259 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2261 "Successfully combined %u partial definitions\n", ndefs
);
2262 /* We are using the alias-set of the first store we encounter which
2263 should be appropriate here. */
2264 return finish (first_set
, first_base_set
, val
);
2268 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2270 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2275 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2276 with the current VUSE and performs the expression lookup. */
2279 vn_reference_lookup_2 (ao_ref
*op ATTRIBUTE_UNUSED
, tree vuse
, void *data_
)
2281 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2282 vn_reference_t vr
= data
->vr
;
2283 vn_reference_s
**slot
;
2286 /* If we have partial definitions recorded we have to go through
2287 vn_reference_lookup_3. */
2288 if (!data
->partial_defs
.is_empty ())
2291 if (data
->last_vuse_ptr
)
2293 *data
->last_vuse_ptr
= vuse
;
2294 data
->last_vuse
= vuse
;
2297 /* Fixup vuse and hash. */
2299 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2300 vr
->vuse
= vuse_ssa_val (vuse
);
2302 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2304 hash
= vr
->hashcode
;
2305 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2308 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2309 return data
->finish (vr
->set
, vr
->base_set
, (*slot
)->result
);
2316 /* Lookup an existing or insert a new vn_reference entry into the
2317 value table for the VUSE, SET, TYPE, OPERANDS reference which
2318 has the value VALUE which is either a constant or an SSA name. */
2320 static vn_reference_t
2321 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2323 alias_set_type base_set
,
2325 vec
<vn_reference_op_s
,
2330 vn_reference_t result
;
2332 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2333 vr1
.operands
= operands
;
2336 vr1
.base_set
= base_set
;
2337 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2338 if (vn_reference_lookup_1 (&vr1
, &result
))
2340 if (TREE_CODE (value
) == SSA_NAME
)
2341 value_id
= VN_INFO (value
)->value_id
;
2343 value_id
= get_or_alloc_constant_value_id (value
);
2344 return vn_reference_insert_pieces (vuse
, set
, base_set
, type
,
2345 operands
.copy (), value
, value_id
);
2348 /* Return a value-number for RCODE OPS... either by looking up an existing
2349 value-number for the possibly simplified result or by inserting the
2350 operation if INSERT is true. If SIMPLIFY is false, return a value
2351 number for the unsimplified expression. */
2354 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
,
2357 tree result
= NULL_TREE
;
2358 /* We will be creating a value number for
2360 So first simplify and lookup this expression to see if it
2361 is already available. */
2362 /* For simplification valueize. */
2365 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2366 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2368 tree tem
= vn_valueize (res_op
->ops
[i
]);
2371 res_op
->ops
[i
] = tem
;
2373 /* If valueization of an operand fails (it is not available), skip
2376 if (i
== res_op
->num_ops
)
2378 mprts_hook
= vn_lookup_simplify_result
;
2379 res
= res_op
->resimplify (NULL
, vn_valueize
);
2382 gimple
*new_stmt
= NULL
;
2384 && gimple_simplified_result_is_gimple_val (res_op
))
2386 /* The expression is already available. */
2387 result
= res_op
->ops
[0];
2388 /* Valueize it, simplification returns sth in AVAIL only. */
2389 if (TREE_CODE (result
) == SSA_NAME
)
2390 result
= SSA_VAL (result
);
2394 tree val
= vn_lookup_simplify_result (res_op
);
2397 gimple_seq stmts
= NULL
;
2398 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2401 gcc_assert (gimple_seq_singleton_p (stmts
));
2402 new_stmt
= gimple_seq_first_stmt (stmts
);
2406 /* The expression is already available. */
2411 /* The expression is not yet available, value-number lhs to
2412 the new SSA_NAME we created. */
2413 /* Initialize value-number information properly. */
2414 vn_ssa_aux_t result_info
= VN_INFO (result
);
2415 result_info
->valnum
= result
;
2416 result_info
->value_id
= get_next_value_id ();
2417 result_info
->visited
= 1;
2418 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2420 result_info
->needs_insertion
= true;
2421 /* ??? PRE phi-translation inserts NARYs without corresponding
2422 SSA name result. Re-use those but set their result according
2423 to the stmt we just built. */
2424 vn_nary_op_t nary
= NULL
;
2425 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2428 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2429 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2431 /* As all "inserted" statements are singleton SCCs, insert
2432 to the valid table. This is strictly needed to
2433 avoid re-generating new value SSA_NAMEs for the same
2434 expression during SCC iteration over and over (the
2435 optimistic table gets cleared after each iteration).
2436 We do not need to insert into the optimistic table, as
2437 lookups there will fall back to the valid table. */
2440 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2442 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2443 vno1
->value_id
= result_info
->value_id
;
2444 vno1
->length
= length
;
2445 vno1
->predicated_values
= 0;
2446 vno1
->u
.result
= result
;
2447 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (new_stmt
));
2448 vn_nary_op_insert_into (vno1
, valid_info
->nary
);
2449 /* Also do not link it into the undo chain. */
2450 last_inserted_nary
= vno1
->next
;
2451 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2453 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2455 fprintf (dump_file
, "Inserting name ");
2456 print_generic_expr (dump_file
, result
);
2457 fprintf (dump_file
, " for expression ");
2458 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2459 fprintf (dump_file
, "\n");
2465 /* Return a value-number for RCODE OPS... either by looking up an existing
2466 value-number for the simplified result or by inserting the operation. */
2469 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2471 return vn_nary_build_or_lookup_1 (res_op
, true, true);
2474 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2475 its value if present. */
2478 vn_nary_simplify (vn_nary_op_t nary
)
2480 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2482 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2483 nary
->type
, nary
->length
);
2484 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2485 return vn_nary_build_or_lookup_1 (&op
, false, true);
2488 /* Elimination engine. */
2490 class eliminate_dom_walker
: public dom_walker
2493 eliminate_dom_walker (cdi_direction
, bitmap
);
2494 ~eliminate_dom_walker ();
2496 virtual edge
before_dom_children (basic_block
);
2497 virtual void after_dom_children (basic_block
);
2499 virtual tree
eliminate_avail (basic_block
, tree op
);
2500 virtual void eliminate_push_avail (basic_block
, tree op
);
2501 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2503 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2505 unsigned eliminate_cleanup (bool region_p
= false);
2508 unsigned int el_todo
;
2509 unsigned int eliminations
;
2510 unsigned int insertions
;
2512 /* SSA names that had their defs inserted by PRE if do_pre. */
2513 bitmap inserted_exprs
;
2515 /* Blocks with statements that have had their EH properties changed. */
2516 bitmap need_eh_cleanup
;
2518 /* Blocks with statements that have had their AB properties changed. */
2519 bitmap need_ab_cleanup
;
2521 /* Local state for the eliminate domwalk. */
2522 auto_vec
<gimple
*> to_remove
;
2523 auto_vec
<gimple
*> to_fixup
;
2524 auto_vec
<tree
> avail
;
2525 auto_vec
<tree
> avail_stack
;
2528 /* Adaptor to the elimination engine using RPO availability. */
2530 class rpo_elim
: public eliminate_dom_walker
2533 rpo_elim(basic_block entry_
)
2534 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2535 m_avail_freelist (NULL
) {}
2537 virtual tree
eliminate_avail (basic_block
, tree op
);
2539 virtual void eliminate_push_avail (basic_block
, tree
);
2542 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2544 vn_avail
*m_avail_freelist
;
2547 /* Global RPO state for access from hooks. */
2548 static eliminate_dom_walker
*rpo_avail
;
2549 basic_block vn_context_bb
;
2551 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2552 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2553 Otherwise return false. */
2556 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2557 tree base2
, poly_int64
*offset2
)
2560 if (TREE_CODE (base1
) == MEM_REF
2561 && TREE_CODE (base2
) == MEM_REF
)
2563 if (mem_ref_offset (base1
).to_shwi (&soff
))
2565 base1
= TREE_OPERAND (base1
, 0);
2566 *offset1
+= soff
* BITS_PER_UNIT
;
2568 if (mem_ref_offset (base2
).to_shwi (&soff
))
2570 base2
= TREE_OPERAND (base2
, 0);
2571 *offset2
+= soff
* BITS_PER_UNIT
;
2573 return operand_equal_p (base1
, base2
, 0);
2575 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2578 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2579 from the statement defining VUSE and if not successful tries to
2580 translate *REFP and VR_ through an aggregate copy at the definition
2581 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2582 of *REF and *VR. If only disambiguation was performed then
2583 *DISAMBIGUATE_ONLY is set to true. */
2586 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2587 translate_flags
*disambiguate_only
)
2589 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2590 vn_reference_t vr
= data
->vr
;
2591 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2592 tree base
= ao_ref_base (ref
);
2593 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2594 static vec
<vn_reference_op_s
> lhs_ops
;
2596 bool lhs_ref_ok
= false;
2597 poly_int64 copy_size
;
2599 /* First try to disambiguate after value-replacing in the definitions LHS. */
2600 if (is_gimple_assign (def_stmt
))
2602 tree lhs
= gimple_assign_lhs (def_stmt
);
2603 bool valueized_anything
= false;
2604 /* Avoid re-allocation overhead. */
2605 lhs_ops
.truncate (0);
2606 basic_block saved_rpo_bb
= vn_context_bb
;
2607 vn_context_bb
= gimple_bb (def_stmt
);
2608 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2610 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2611 valueize_refs_1 (&lhs_ops
, &valueized_anything
, true);
2613 vn_context_bb
= saved_rpo_bb
;
2614 ao_ref_init (&lhs_ref
, lhs
);
2616 if (valueized_anything
2617 && ao_ref_init_from_vn_reference
2618 (&lhs_ref
, ao_ref_alias_set (&lhs_ref
),
2619 ao_ref_base_alias_set (&lhs_ref
), TREE_TYPE (lhs
), lhs_ops
)
2620 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2622 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2626 /* Besides valueizing the LHS we can also use access-path based
2627 disambiguation on the original non-valueized ref. */
2630 && data
->orig_ref
.ref
)
2632 /* We want to use the non-valueized LHS for this, but avoid redundant
2634 ao_ref
*lref
= &lhs_ref
;
2636 if (valueized_anything
)
2638 ao_ref_init (&lref_alt
, lhs
);
2641 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2643 *disambiguate_only
= (valueized_anything
2644 ? TR_VALUEIZE_AND_DISAMBIGUATE
2650 /* If we reach a clobbering statement try to skip it and see if
2651 we find a VN result with exactly the same value as the
2652 possible clobber. In this case we can ignore the clobber
2653 and return the found value. */
2654 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2655 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2656 && (ref
->ref
|| data
->orig_ref
.ref
))
2658 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2659 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2660 data
->last_vuse_ptr
= NULL
;
2661 tree saved_vuse
= vr
->vuse
;
2662 hashval_t saved_hashcode
= vr
->hashcode
;
2663 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
), data
);
2664 /* Need to restore vr->vuse and vr->hashcode. */
2665 vr
->vuse
= saved_vuse
;
2666 vr
->hashcode
= saved_hashcode
;
2667 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2668 if (res
&& res
!= (void *)-1)
2670 vn_reference_t vnresult
= (vn_reference_t
) res
;
2671 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2672 if (TREE_CODE (rhs
) == SSA_NAME
)
2673 rhs
= SSA_VAL (rhs
);
2674 if (vnresult
->result
2675 && operand_equal_p (vnresult
->result
, rhs
, 0)
2676 /* We have to honor our promise about union type punning
2677 and also support arbitrary overlaps with
2678 -fno-strict-aliasing. So simply resort to alignment to
2679 rule out overlaps. Do this check last because it is
2680 quite expensive compared to the hash-lookup above. */
2681 && multiple_p (get_object_alignment
2682 (ref
->ref
? ref
->ref
: data
->orig_ref
.ref
),
2684 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2689 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2690 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2691 && gimple_call_num_args (def_stmt
) <= 4)
2693 /* For builtin calls valueize its arguments and call the
2694 alias oracle again. Valueization may improve points-to
2695 info of pointers and constify size and position arguments.
2696 Originally this was motivated by PR61034 which has
2697 conditional calls to free falsely clobbering ref because
2698 of imprecise points-to info of the argument. */
2700 bool valueized_anything
= false;
2701 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2703 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2704 tree val
= vn_valueize (oldargs
[i
]);
2705 if (val
!= oldargs
[i
])
2707 gimple_call_set_arg (def_stmt
, i
, val
);
2708 valueized_anything
= true;
2711 if (valueized_anything
)
2713 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2715 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2716 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2719 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2725 if (*disambiguate_only
> TR_TRANSLATE
)
2728 /* If we cannot constrain the size of the reference we cannot
2729 test if anything kills it. */
2730 if (!ref
->max_size_known_p ())
2733 poly_int64 offset
= ref
->offset
;
2734 poly_int64 maxsize
= ref
->max_size
;
2736 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2737 from that definition.
2739 if (is_gimple_reg_type (vr
->type
)
2740 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2741 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET_CHK
))
2742 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2743 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2744 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2746 && BITS_PER_UNIT
== 8
2747 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2748 && offset
.is_constant (&offseti
)
2749 && ref
->size
.is_constant (&sizei
)
2750 && (offseti
% BITS_PER_UNIT
== 0
2751 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2752 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2753 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
2754 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)))))
2755 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2756 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2759 poly_int64 offset2
, size2
, maxsize2
;
2761 tree ref2
= gimple_call_arg (def_stmt
, 0);
2762 if (TREE_CODE (ref2
) == SSA_NAME
)
2764 ref2
= SSA_VAL (ref2
);
2765 if (TREE_CODE (ref2
) == SSA_NAME
2766 && (TREE_CODE (base
) != MEM_REF
2767 || TREE_OPERAND (base
, 0) != ref2
))
2769 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2770 if (gimple_assign_single_p (def_stmt
)
2771 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2772 ref2
= gimple_assign_rhs1 (def_stmt
);
2775 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2777 ref2
= TREE_OPERAND (ref2
, 0);
2778 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2780 if (!known_size_p (maxsize2
)
2781 || !known_eq (maxsize2
, size2
)
2782 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2785 else if (TREE_CODE (ref2
) == SSA_NAME
)
2788 if (TREE_CODE (base
) != MEM_REF
2789 || !(mem_ref_offset (base
)
2790 << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2794 if (TREE_OPERAND (base
, 0) != ref2
)
2796 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2797 if (is_gimple_assign (def
)
2798 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2799 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2800 && poly_int_tree_p (gimple_assign_rhs2 (def
)))
2802 tree rhs2
= gimple_assign_rhs2 (def
);
2803 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2
),
2805 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2807 ref2
= gimple_assign_rhs1 (def
);
2808 if (TREE_CODE (ref2
) == SSA_NAME
)
2809 ref2
= SSA_VAL (ref2
);
2817 tree len
= gimple_call_arg (def_stmt
, 2);
2818 HOST_WIDE_INT leni
, offset2i
;
2819 if (TREE_CODE (len
) == SSA_NAME
)
2820 len
= SSA_VAL (len
);
2821 /* Sometimes the above trickery is smarter than alias analysis. Take
2822 advantage of that. */
2823 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2824 (wi::to_poly_offset (len
)
2825 << LOG2_BITS_PER_UNIT
)))
2827 if (data
->partial_defs
.is_empty ()
2828 && known_subrange_p (offset
, maxsize
, offset2
,
2829 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2832 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2833 val
= build_zero_cst (vr
->type
);
2834 else if (INTEGRAL_TYPE_P (vr
->type
)
2835 && known_eq (ref
->size
, 8)
2836 && offseti
% BITS_PER_UNIT
== 0)
2838 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2839 vr
->type
, gimple_call_arg (def_stmt
, 1));
2840 val
= vn_nary_build_or_lookup (&res_op
);
2842 || (TREE_CODE (val
) == SSA_NAME
2843 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2848 unsigned buflen
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2849 if (INTEGRAL_TYPE_P (vr
->type
))
2850 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2851 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2852 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2854 if (BYTES_BIG_ENDIAN
)
2857 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2861 shift_bytes_in_array_right (buf
, buflen
,
2862 BITS_PER_UNIT
- amnt
);
2867 else if (offseti
% BITS_PER_UNIT
!= 0)
2870 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
2872 shift_bytes_in_array_left (buf
, buflen
, amnt
);
2876 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
2880 return data
->finish (0, 0, val
);
2882 /* For now handle clearing memory with partial defs. */
2883 else if (known_eq (ref
->size
, maxsize
)
2884 && integer_zerop (gimple_call_arg (def_stmt
, 1))
2885 && tree_fits_poly_int64_p (len
)
2886 && tree_to_poly_int64 (len
).is_constant (&leni
)
2887 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
2888 && offset
.is_constant (&offseti
)
2889 && offset2
.is_constant (&offset2i
)
2890 && maxsize
.is_constant (&maxsizei
)
2891 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
2892 leni
<< LOG2_BITS_PER_UNIT
))
2895 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
2896 pd
.offset
= offset2i
;
2897 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
2898 return data
->push_partial_def (pd
, 0, 0, offseti
, maxsizei
);
2902 /* 2) Assignment from an empty CONSTRUCTOR. */
2903 else if (is_gimple_reg_type (vr
->type
)
2904 && gimple_assign_single_p (def_stmt
)
2905 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
2906 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
2909 poly_int64 offset2
, size2
, maxsize2
;
2910 HOST_WIDE_INT offset2i
, size2i
;
2911 gcc_assert (lhs_ref_ok
);
2912 base2
= ao_ref_base (&lhs_ref
);
2913 offset2
= lhs_ref
.offset
;
2914 size2
= lhs_ref
.size
;
2915 maxsize2
= lhs_ref
.max_size
;
2916 if (known_size_p (maxsize2
)
2917 && known_eq (maxsize2
, size2
)
2918 && adjust_offsets_for_equal_base_address (base
, &offset
,
2921 if (data
->partial_defs
.is_empty ()
2922 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
2924 /* While technically undefined behavior do not optimize
2925 a full read from a clobber. */
2926 if (gimple_clobber_p (def_stmt
))
2928 tree val
= build_zero_cst (vr
->type
);
2929 return data
->finish (ao_ref_alias_set (&lhs_ref
),
2930 ao_ref_base_alias_set (&lhs_ref
), val
);
2932 else if (known_eq (ref
->size
, maxsize
)
2933 && maxsize
.is_constant (&maxsizei
)
2934 && offset
.is_constant (&offseti
)
2935 && offset2
.is_constant (&offset2i
)
2936 && size2
.is_constant (&size2i
)
2937 && ranges_known_overlap_p (offseti
, maxsizei
,
2940 /* Let clobbers be consumed by the partial-def tracker
2941 which can choose to ignore them if they are shadowed
2944 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
2945 pd
.offset
= offset2i
;
2947 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
2948 ao_ref_base_alias_set (&lhs_ref
),
2954 /* 3) Assignment from a constant. We can use folds native encode/interpret
2955 routines to extract the assigned bits. */
2956 else if (known_eq (ref
->size
, maxsize
)
2957 && is_gimple_reg_type (vr
->type
)
2958 && !reverse_storage_order_for_component_p (vr
->operands
)
2959 && !contains_storage_order_barrier_p (vr
->operands
)
2960 && gimple_assign_single_p (def_stmt
)
2962 && BITS_PER_UNIT
== 8
2963 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2964 /* native_encode and native_decode operate on arrays of bytes
2965 and so fundamentally need a compile-time size and offset. */
2966 && maxsize
.is_constant (&maxsizei
)
2967 && offset
.is_constant (&offseti
)
2968 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
2969 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
2970 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
2972 tree lhs
= gimple_assign_lhs (def_stmt
);
2974 poly_int64 offset2
, size2
, maxsize2
;
2975 HOST_WIDE_INT offset2i
, size2i
;
2977 gcc_assert (lhs_ref_ok
);
2978 base2
= ao_ref_base (&lhs_ref
);
2979 offset2
= lhs_ref
.offset
;
2980 size2
= lhs_ref
.size
;
2981 maxsize2
= lhs_ref
.max_size
;
2982 reverse
= reverse_storage_order_for_component_p (lhs
);
2985 && !storage_order_barrier_p (lhs
)
2986 && known_eq (maxsize2
, size2
)
2987 && adjust_offsets_for_equal_base_address (base
, &offset
,
2989 && offset
.is_constant (&offseti
)
2990 && offset2
.is_constant (&offset2i
)
2991 && size2
.is_constant (&size2i
))
2993 if (data
->partial_defs
.is_empty ()
2994 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
2996 /* We support up to 512-bit values (for V8DFmode). */
2997 unsigned char buffer
[65];
3000 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3001 if (TREE_CODE (rhs
) == SSA_NAME
)
3002 rhs
= SSA_VAL (rhs
);
3003 len
= native_encode_expr (rhs
,
3004 buffer
, sizeof (buffer
) - 1,
3005 (offseti
- offset2i
) / BITS_PER_UNIT
);
3006 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
3008 tree type
= vr
->type
;
3009 unsigned char *buf
= buffer
;
3010 unsigned int amnt
= 0;
3011 /* Make sure to interpret in a type that has a range
3012 covering the whole access size. */
3013 if (INTEGRAL_TYPE_P (vr
->type
)
3014 && maxsizei
!= TYPE_PRECISION (vr
->type
))
3015 type
= build_nonstandard_integer_type (maxsizei
,
3016 TYPE_UNSIGNED (type
));
3017 if (BYTES_BIG_ENDIAN
)
3019 /* For big-endian native_encode_expr stored the rhs
3020 such that the LSB of it is the LSB of buffer[len - 1].
3021 That bit is stored into memory at position
3022 offset2 + size2 - 1, i.e. in byte
3023 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3024 E.g. for offset2 1 and size2 14, rhs -1 and memory
3025 previously cleared that is:
3028 Now, if we want to extract offset 2 and size 12 from
3029 it using native_interpret_expr (which actually works
3030 for integral bitfield types in terms of byte size of
3031 the mode), the native_encode_expr stored the value
3034 and returned len 2 (the X bits are outside of
3036 Let sz be maxsize / BITS_PER_UNIT if not extracting
3037 a bitfield, and GET_MODE_SIZE otherwise.
3038 We need to align the LSB of the value we want to
3039 extract as the LSB of buf[sz - 1].
3040 The LSB from memory we need to read is at position
3041 offset + maxsize - 1. */
3042 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
3043 if (INTEGRAL_TYPE_P (type
))
3044 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
3045 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3046 - offseti
- maxsizei
) % BITS_PER_UNIT
;
3048 shift_bytes_in_array_right (buffer
, len
, amnt
);
3049 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3050 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
3051 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
3055 buf
= buffer
+ len
- sz
- amnt
;
3056 len
-= (buf
- buffer
);
3061 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
3062 - offseti
) % BITS_PER_UNIT
;
3066 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
3070 tree val
= native_interpret_expr (type
, buf
, len
);
3071 /* If we chop off bits because the types precision doesn't
3072 match the memory access size this is ok when optimizing
3073 reads but not when called from the DSE code during
3076 && type
!= vr
->type
)
3078 if (! int_fits_type_p (val
, vr
->type
))
3081 val
= fold_convert (vr
->type
, val
);
3085 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3086 ao_ref_base_alias_set (&lhs_ref
), val
);
3089 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
3093 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3094 if (TREE_CODE (rhs
) == SSA_NAME
)
3095 rhs
= SSA_VAL (rhs
);
3097 pd
.offset
= offset2i
;
3099 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3100 ao_ref_base_alias_set (&lhs_ref
),
3106 /* 4) Assignment from an SSA name which definition we may be able
3107 to access pieces from or we can combine to a larger entity. */
3108 else if (known_eq (ref
->size
, maxsize
)
3109 && is_gimple_reg_type (vr
->type
)
3110 && !reverse_storage_order_for_component_p (vr
->operands
)
3111 && !contains_storage_order_barrier_p (vr
->operands
)
3112 && gimple_assign_single_p (def_stmt
)
3113 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
3115 tree lhs
= gimple_assign_lhs (def_stmt
);
3117 poly_int64 offset2
, size2
, maxsize2
;
3118 HOST_WIDE_INT offset2i
, size2i
, offseti
;
3120 gcc_assert (lhs_ref_ok
);
3121 base2
= ao_ref_base (&lhs_ref
);
3122 offset2
= lhs_ref
.offset
;
3123 size2
= lhs_ref
.size
;
3124 maxsize2
= lhs_ref
.max_size
;
3125 reverse
= reverse_storage_order_for_component_p (lhs
);
3126 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
3128 && !storage_order_barrier_p (lhs
)
3129 && known_size_p (maxsize2
)
3130 && known_eq (maxsize2
, size2
)
3131 && adjust_offsets_for_equal_base_address (base
, &offset
,
3134 if (data
->partial_defs
.is_empty ()
3135 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
3136 /* ??? We can't handle bitfield precision extracts without
3137 either using an alternate type for the BIT_FIELD_REF and
3138 then doing a conversion or possibly adjusting the offset
3139 according to endianness. */
3140 && (! INTEGRAL_TYPE_P (vr
->type
)
3141 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
3142 && multiple_p (ref
->size
, BITS_PER_UNIT
))
3144 tree val
= NULL_TREE
;
3145 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
3146 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
3148 gimple_match_op
op (gimple_match_cond::UNCOND
,
3149 BIT_FIELD_REF
, vr
->type
,
3151 bitsize_int (ref
->size
),
3152 bitsize_int (offset
- offset2
));
3153 val
= vn_nary_build_or_lookup (&op
);
3155 else if (known_eq (ref
->size
, size2
))
3157 gimple_match_op
op (gimple_match_cond::UNCOND
,
3158 VIEW_CONVERT_EXPR
, vr
->type
,
3160 val
= vn_nary_build_or_lookup (&op
);
3163 && (TREE_CODE (val
) != SSA_NAME
3164 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
3165 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3166 ao_ref_base_alias_set (&lhs_ref
), val
);
3168 else if (maxsize
.is_constant (&maxsizei
)
3169 && offset
.is_constant (&offseti
)
3170 && offset2
.is_constant (&offset2i
)
3171 && size2
.is_constant (&size2i
)
3172 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3175 pd
.rhs
= SSA_VAL (def_rhs
);
3176 pd
.offset
= offset2i
;
3178 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3179 ao_ref_base_alias_set (&lhs_ref
),
3185 /* 5) For aggregate copies translate the reference through them if
3186 the copy kills ref. */
3187 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3188 && gimple_assign_single_p (def_stmt
)
3189 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
3190 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
3191 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
3195 auto_vec
<vn_reference_op_s
> rhs
;
3196 vn_reference_op_t vro
;
3199 gcc_assert (lhs_ref_ok
);
3201 /* See if the assignment kills REF. */
3202 base2
= ao_ref_base (&lhs_ref
);
3203 if (!lhs_ref
.max_size_known_p ()
3205 && (TREE_CODE (base
) != MEM_REF
3206 || TREE_CODE (base2
) != MEM_REF
3207 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3208 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3209 TREE_OPERAND (base2
, 1))))
3210 || !stmt_kills_ref_p (def_stmt
, ref
))
3213 /* Find the common base of ref and the lhs. lhs_ops already
3214 contains valueized operands for the lhs. */
3215 i
= vr
->operands
.length () - 1;
3216 j
= lhs_ops
.length () - 1;
3217 while (j
>= 0 && i
>= 0
3218 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3224 /* ??? The innermost op should always be a MEM_REF and we already
3225 checked that the assignment to the lhs kills vr. Thus for
3226 aggregate copies using char[] types the vn_reference_op_eq
3227 may fail when comparing types for compatibility. But we really
3228 don't care here - further lookups with the rewritten operands
3229 will simply fail if we messed up types too badly. */
3230 poly_int64 extra_off
= 0;
3231 if (j
== 0 && i
>= 0
3232 && lhs_ops
[0].opcode
== MEM_REF
3233 && maybe_ne (lhs_ops
[0].off
, -1))
3235 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3237 else if (vr
->operands
[i
].opcode
== MEM_REF
3238 && maybe_ne (vr
->operands
[i
].off
, -1))
3240 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3245 /* i now points to the first additional op.
3246 ??? LHS may not be completely contained in VR, one or more
3247 VIEW_CONVERT_EXPRs could be in its way. We could at least
3248 try handling outermost VIEW_CONVERT_EXPRs. */
3252 /* Punt if the additional ops contain a storage order barrier. */
3253 for (k
= i
; k
>= 0; k
--)
3255 vro
= &vr
->operands
[k
];
3256 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3260 /* Now re-write REF to be based on the rhs of the assignment. */
3261 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3262 copy_reference_ops_from_ref (rhs1
, &rhs
);
3264 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3265 if (maybe_ne (extra_off
, 0))
3267 if (rhs
.length () < 2)
3269 int ix
= rhs
.length () - 2;
3270 if (rhs
[ix
].opcode
!= MEM_REF
3271 || known_eq (rhs
[ix
].off
, -1))
3273 rhs
[ix
].off
+= extra_off
;
3274 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3275 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3279 /* Save the operands since we need to use the original ones for
3280 the hash entry we use. */
3281 if (!data
->saved_operands
.exists ())
3282 data
->saved_operands
= vr
->operands
.copy ();
3284 /* We need to pre-pend vr->operands[0..i] to rhs. */
3285 vec
<vn_reference_op_s
> old
= vr
->operands
;
3286 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3287 vr
->operands
.safe_grow (i
+ 1 + rhs
.length (), true);
3289 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3290 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3291 vr
->operands
[i
+ 1 + j
] = *vro
;
3292 valueize_refs (&vr
->operands
);
3293 if (old
== shared_lookup_references
)
3294 shared_lookup_references
= vr
->operands
;
3295 vr
->hashcode
= vn_reference_compute_hash (vr
);
3297 /* Try folding the new reference to a constant. */
3298 tree val
= fully_constant_vn_reference_p (vr
);
3301 if (data
->partial_defs
.is_empty ())
3302 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3303 ao_ref_base_alias_set (&lhs_ref
), val
);
3304 /* This is the only interesting case for partial-def handling
3305 coming from targets that like to gimplify init-ctors as
3306 aggregate copies from constant data like aarch64 for
3308 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3314 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3315 ao_ref_base_alias_set (&lhs_ref
),
3320 /* Continuing with partial defs isn't easily possible here, we
3321 have to find a full def from further lookups from here. Probably
3322 not worth the special-casing everywhere. */
3323 if (!data
->partial_defs
.is_empty ())
3326 /* Adjust *ref from the new operands. */
3328 ao_ref_init (&rhs1_ref
, rhs1
);
3329 if (!ao_ref_init_from_vn_reference (&r
, ao_ref_alias_set (&rhs1_ref
),
3330 ao_ref_base_alias_set (&rhs1_ref
),
3331 vr
->type
, vr
->operands
))
3333 /* This can happen with bitfields. */
3334 if (maybe_ne (ref
->size
, r
.size
))
3336 /* If the access lacks some subsetting simply apply that by
3337 shortening it. That in the end can only be successful
3338 if we can pun the lookup result which in turn requires
3340 if (known_eq (r
.size
, r
.max_size
)
3341 && known_lt (ref
->size
, r
.size
))
3342 r
.size
= r
.max_size
= ref
->size
;
3348 /* Do not update last seen VUSE after translating. */
3349 data
->last_vuse_ptr
= NULL
;
3350 /* Invalidate the original access path since it now contains
3352 data
->orig_ref
.ref
= NULL_TREE
;
3353 /* Use the alias-set of this LHS for recording an eventual result. */
3354 if (data
->first_set
== -2)
3356 data
->first_set
= ao_ref_alias_set (&lhs_ref
);
3357 data
->first_base_set
= ao_ref_base_alias_set (&lhs_ref
);
3360 /* Keep looking for the adjusted *REF / VR pair. */
3364 /* 6) For memcpy copies translate the reference through them if the copy
3365 kills ref. But we cannot (easily) do this translation if the memcpy is
3366 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3367 can modify the storage order of objects (see storage_order_barrier_p). */
3368 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3369 && is_gimple_reg_type (vr
->type
)
3370 /* ??? Handle BCOPY as well. */
3371 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3372 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY_CHK
)
3373 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3374 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY_CHK
)
3375 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
)
3376 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE_CHK
))
3377 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3378 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3379 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3380 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3381 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3382 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
3383 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)),
3385 /* Handling this is more complicated, give up for now. */
3386 && data
->partial_defs
.is_empty ())
3390 poly_int64 rhs_offset
, lhs_offset
;
3391 vn_reference_op_s op
;
3392 poly_uint64 mem_offset
;
3393 poly_int64 at
, byte_maxsize
;
3395 /* Only handle non-variable, addressable refs. */
3396 if (maybe_ne (ref
->size
, maxsize
)
3397 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3398 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3401 /* Extract a pointer base and an offset for the destination. */
3402 lhs
= gimple_call_arg (def_stmt
, 0);
3404 if (TREE_CODE (lhs
) == SSA_NAME
)
3406 lhs
= vn_valueize (lhs
);
3407 if (TREE_CODE (lhs
) == SSA_NAME
)
3409 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3410 if (gimple_assign_single_p (def_stmt
)
3411 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3412 lhs
= gimple_assign_rhs1 (def_stmt
);
3415 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3417 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
)))
3418 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs
))))
3420 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3424 if (TREE_CODE (tem
) == MEM_REF
3425 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3427 lhs
= TREE_OPERAND (tem
, 0);
3428 if (TREE_CODE (lhs
) == SSA_NAME
)
3429 lhs
= vn_valueize (lhs
);
3430 lhs_offset
+= mem_offset
;
3432 else if (DECL_P (tem
))
3433 lhs
= build_fold_addr_expr (tem
);
3437 if (TREE_CODE (lhs
) != SSA_NAME
3438 && TREE_CODE (lhs
) != ADDR_EXPR
)
3441 /* Extract a pointer base and an offset for the source. */
3442 rhs
= gimple_call_arg (def_stmt
, 1);
3444 if (TREE_CODE (rhs
) == SSA_NAME
)
3445 rhs
= vn_valueize (rhs
);
3446 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3448 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs
)))
3449 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs
))))
3451 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3455 if (TREE_CODE (tem
) == MEM_REF
3456 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3458 rhs
= TREE_OPERAND (tem
, 0);
3459 rhs_offset
+= mem_offset
;
3461 else if (DECL_P (tem
)
3462 || TREE_CODE (tem
) == STRING_CST
)
3463 rhs
= build_fold_addr_expr (tem
);
3467 if (TREE_CODE (rhs
) == SSA_NAME
)
3468 rhs
= SSA_VAL (rhs
);
3469 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3472 /* The bases of the destination and the references have to agree. */
3473 if (TREE_CODE (base
) == MEM_REF
)
3475 if (TREE_OPERAND (base
, 0) != lhs
3476 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3480 else if (!DECL_P (base
)
3481 || TREE_CODE (lhs
) != ADDR_EXPR
3482 || TREE_OPERAND (lhs
, 0) != base
)
3485 /* If the access is completely outside of the memcpy destination
3486 area there is no aliasing. */
3487 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3489 /* And the access has to be contained within the memcpy destination. */
3490 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3493 /* Save the operands since we need to use the original ones for
3494 the hash entry we use. */
3495 if (!data
->saved_operands
.exists ())
3496 data
->saved_operands
= vr
->operands
.copy ();
3498 /* Make room for 2 operands in the new reference. */
3499 if (vr
->operands
.length () < 2)
3501 vec
<vn_reference_op_s
> old
= vr
->operands
;
3502 vr
->operands
.safe_grow_cleared (2, true);
3503 if (old
== shared_lookup_references
)
3504 shared_lookup_references
= vr
->operands
;
3507 vr
->operands
.truncate (2);
3509 /* The looked-through reference is a simple MEM_REF. */
3510 memset (&op
, 0, sizeof (op
));
3512 op
.opcode
= MEM_REF
;
3513 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3514 op
.off
= at
- lhs_offset
+ rhs_offset
;
3515 vr
->operands
[0] = op
;
3516 op
.type
= TREE_TYPE (rhs
);
3517 op
.opcode
= TREE_CODE (rhs
);
3520 vr
->operands
[1] = op
;
3521 vr
->hashcode
= vn_reference_compute_hash (vr
);
3523 /* Try folding the new reference to a constant. */
3524 tree val
= fully_constant_vn_reference_p (vr
);
3526 return data
->finish (0, 0, val
);
3528 /* Adjust *ref from the new operands. */
3529 if (!ao_ref_init_from_vn_reference (&r
, 0, 0, vr
->type
, vr
->operands
))
3531 /* This can happen with bitfields. */
3532 if (maybe_ne (ref
->size
, r
.size
))
3536 /* Do not update last seen VUSE after translating. */
3537 data
->last_vuse_ptr
= NULL
;
3538 /* Invalidate the original access path since it now contains
3540 data
->orig_ref
.ref
= NULL_TREE
;
3541 /* Use the alias-set of this stmt for recording an eventual result. */
3542 if (data
->first_set
== -2)
3544 data
->first_set
= 0;
3545 data
->first_base_set
= 0;
3548 /* Keep looking for the adjusted *REF / VR pair. */
3552 /* Bail out and stop walking. */
3556 /* Return a reference op vector from OP that can be used for
3557 vn_reference_lookup_pieces. The caller is responsible for releasing
3560 vec
<vn_reference_op_s
>
3561 vn_reference_operands_for_lookup (tree op
)
3564 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3567 /* Lookup a reference operation by it's parts, in the current hash table.
3568 Returns the resulting value number if it exists in the hash table,
3569 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3570 vn_reference_t stored in the hashtable if something is found. */
3573 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
,
3574 alias_set_type base_set
, tree type
,
3575 vec
<vn_reference_op_s
> operands
,
3576 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3578 struct vn_reference_s vr1
;
3586 vr1
.vuse
= vuse_ssa_val (vuse
);
3587 shared_lookup_references
.truncate (0);
3588 shared_lookup_references
.safe_grow (operands
.length (), true);
3589 memcpy (shared_lookup_references
.address (),
3590 operands
.address (),
3591 sizeof (vn_reference_op_s
)
3592 * operands
.length ());
3594 valueize_refs_1 (&shared_lookup_references
, &valueized_p
);
3595 vr1
.operands
= shared_lookup_references
;
3598 vr1
.base_set
= base_set
;
3599 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3600 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3603 vn_reference_lookup_1 (&vr1
, vnresult
);
3605 && kind
!= VN_NOWALK
3609 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3610 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true, NULL_TREE
);
3611 vec
<vn_reference_op_s
> ops_for_ref
;
3613 ops_for_ref
= vr1
.operands
;
3616 /* For ao_ref_from_mem we have to ensure only available SSA names
3617 end up in base and the only convenient way to make this work
3618 for PRE is to re-valueize with that in mind. */
3619 ops_for_ref
.create (operands
.length ());
3620 ops_for_ref
.quick_grow (operands
.length ());
3621 memcpy (ops_for_ref
.address (),
3622 operands
.address (),
3623 sizeof (vn_reference_op_s
)
3624 * operands
.length ());
3625 valueize_refs_1 (&ops_for_ref
, &valueized_p
, true);
3627 if (ao_ref_init_from_vn_reference (&r
, set
, base_set
, type
,
3631 walk_non_aliased_vuses (&r
, vr1
.vuse
, true, vn_reference_lookup_2
,
3632 vn_reference_lookup_3
, vuse_valueize
,
3634 if (ops_for_ref
!= shared_lookup_references
)
3635 ops_for_ref
.release ();
3636 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3640 return (*vnresult
)->result
;
3645 /* Lookup OP in the current hash table, and return the resulting value
3646 number if it exists in the hash table. Return NULL_TREE if it does
3647 not exist in the hash table or if the result field of the structure
3648 was NULL.. VNRESULT will be filled in with the vn_reference_t
3649 stored in the hashtable if one exists. When TBAA_P is false assume
3650 we are looking up a store and treat it as having alias-set zero.
3651 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3652 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3653 load is bitwise anded with MASK and so we are only interested in a subset
3654 of the bits and can ignore if the other bits are uninitialized or
3655 not initialized with constants. */
3658 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3659 vn_reference_t
*vnresult
, bool tbaa_p
,
3660 tree
*last_vuse_ptr
, tree mask
)
3662 vec
<vn_reference_op_s
> operands
;
3663 struct vn_reference_s vr1
;
3664 bool valueized_anything
;
3669 vr1
.vuse
= vuse_ssa_val (vuse
);
3670 vr1
.operands
= operands
3671 = valueize_shared_reference_ops_from_ref (op
, &valueized_anything
);
3672 vr1
.type
= TREE_TYPE (op
);
3674 ao_ref_init (&op_ref
, op
);
3675 vr1
.set
= ao_ref_alias_set (&op_ref
);
3676 vr1
.base_set
= ao_ref_base_alias_set (&op_ref
);
3677 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3678 if (mask
== NULL_TREE
)
3679 if (tree cst
= fully_constant_vn_reference_p (&vr1
))
3682 if (kind
!= VN_NOWALK
&& vr1
.vuse
)
3684 vn_reference_t wvnresult
;
3686 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3687 auto_vec
<vn_reference_op_s
> ops_for_ref
;
3688 if (valueized_anything
)
3690 copy_reference_ops_from_ref (op
, &ops_for_ref
);
3692 valueize_refs_1 (&ops_for_ref
, &tem
, true);
3694 /* Make sure to use a valueized reference if we valueized anything.
3695 Otherwise preserve the full reference for advanced TBAA. */
3696 if (!valueized_anything
3697 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.base_set
,
3698 vr1
.type
, ops_for_ref
))
3699 ao_ref_init (&r
, op
);
3700 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
3701 last_vuse_ptr
, kind
, tbaa_p
, mask
);
3705 walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
, vn_reference_lookup_2
,
3706 vn_reference_lookup_3
, vuse_valueize
, limit
,
3708 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3711 gcc_assert (mask
== NULL_TREE
);
3713 *vnresult
= wvnresult
;
3714 return wvnresult
->result
;
3717 return data
.masked_result
;
3723 *last_vuse_ptr
= vr1
.vuse
;
3726 return vn_reference_lookup_1 (&vr1
, vnresult
);
3729 /* Lookup CALL in the current hash table and return the entry in
3730 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3733 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
3739 tree vuse
= gimple_vuse (call
);
3741 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
3742 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
3743 tree lhs
= gimple_call_lhs (call
);
3744 /* For non-SSA return values the referece ops contain the LHS. */
3745 vr
->type
= ((lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
3746 ? TREE_TYPE (lhs
) : NULL_TREE
);
3750 vr
->hashcode
= vn_reference_compute_hash (vr
);
3751 vn_reference_lookup_1 (vr
, vnresult
);
3754 /* Insert OP into the current hash table with a value number of RESULT. */
3757 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
3759 vn_reference_s
**slot
;
3763 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3764 if (TREE_CODE (result
) == SSA_NAME
)
3765 vr1
->value_id
= VN_INFO (result
)->value_id
;
3767 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
3768 vr1
->vuse
= vuse_ssa_val (vuse
);
3769 vr1
->operands
= valueize_shared_reference_ops_from_ref (op
, &tem
).copy ();
3770 vr1
->type
= TREE_TYPE (op
);
3771 vr1
->punned
= false;
3773 ao_ref_init (&op_ref
, op
);
3774 vr1
->set
= ao_ref_alias_set (&op_ref
);
3775 vr1
->base_set
= ao_ref_base_alias_set (&op_ref
);
3776 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3777 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
3778 vr1
->result_vdef
= vdef
;
3780 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3783 /* Because IL walking on reference lookup can end up visiting
3784 a def that is only to be visited later in iteration order
3785 when we are about to make an irreducible region reducible
3786 the def can be effectively processed and its ref being inserted
3787 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3788 but save a lookup if we deal with already inserted refs here. */
3791 /* We cannot assert that we have the same value either because
3792 when disentangling an irreducible region we may end up visiting
3793 a use before the corresponding def. That's a missed optimization
3794 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3795 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
3796 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
3798 fprintf (dump_file
, "Keeping old value ");
3799 print_generic_expr (dump_file
, (*slot
)->result
);
3800 fprintf (dump_file
, " because of collision\n");
3802 free_reference (vr1
);
3803 obstack_free (&vn_tables_obstack
, vr1
);
3808 vr1
->next
= last_inserted_ref
;
3809 last_inserted_ref
= vr1
;
3812 /* Insert a reference by it's pieces into the current hash table with
3813 a value number of RESULT. Return the resulting reference
3814 structure we created. */
3817 vn_reference_insert_pieces (tree vuse
, alias_set_type set
,
3818 alias_set_type base_set
, tree type
,
3819 vec
<vn_reference_op_s
> operands
,
3820 tree result
, unsigned int value_id
)
3823 vn_reference_s
**slot
;
3826 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
3827 vr1
->value_id
= value_id
;
3828 vr1
->vuse
= vuse_ssa_val (vuse
);
3829 vr1
->operands
= operands
;
3830 valueize_refs (&vr1
->operands
);
3832 vr1
->punned
= false;
3834 vr1
->base_set
= base_set
;
3835 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
3836 if (result
&& TREE_CODE (result
) == SSA_NAME
)
3837 result
= SSA_VAL (result
);
3838 vr1
->result
= result
;
3839 vr1
->result_vdef
= NULL_TREE
;
3841 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
3844 /* At this point we should have all the things inserted that we have
3845 seen before, and we should never try inserting something that
3847 gcc_assert (!*slot
);
3850 vr1
->next
= last_inserted_ref
;
3851 last_inserted_ref
= vr1
;
3855 /* Compute and return the hash value for nary operation VBO1. */
3858 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
3860 inchash::hash hstate
;
3863 if (((vno1
->length
== 2
3864 && commutative_tree_code (vno1
->opcode
))
3865 || (vno1
->length
== 3
3866 && commutative_ternary_tree_code (vno1
->opcode
)))
3867 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3868 std::swap (vno1
->op
[0], vno1
->op
[1]);
3869 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
3870 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
3872 std::swap (vno1
->op
[0], vno1
->op
[1]);
3873 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
3876 hstate
.add_int (vno1
->opcode
);
3877 for (i
= 0; i
< vno1
->length
; ++i
)
3878 inchash::add_expr (vno1
->op
[i
], hstate
);
3880 return hstate
.end ();
3883 /* Compare nary operations VNO1 and VNO2 and return true if they are
3887 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
3891 if (vno1
->hashcode
!= vno2
->hashcode
)
3894 if (vno1
->length
!= vno2
->length
)
3897 if (vno1
->opcode
!= vno2
->opcode
3898 || !types_compatible_p (vno1
->type
, vno2
->type
))
3901 for (i
= 0; i
< vno1
->length
; ++i
)
3902 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
3905 /* BIT_INSERT_EXPR has an implict operand as the type precision
3906 of op1. Need to check to make sure they are the same. */
3907 if (vno1
->opcode
== BIT_INSERT_EXPR
3908 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
3909 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
3910 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
3916 /* Initialize VNO from the pieces provided. */
3919 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
3920 enum tree_code code
, tree type
, tree
*ops
)
3923 vno
->length
= length
;
3925 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
3928 /* Return the number of operands for a vn_nary ops structure from STMT. */
3931 vn_nary_length_from_stmt (gimple
*stmt
)
3933 switch (gimple_assign_rhs_code (stmt
))
3937 case VIEW_CONVERT_EXPR
:
3944 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3947 return gimple_num_ops (stmt
) - 1;
3951 /* Initialize VNO from STMT. */
3954 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gassign
*stmt
)
3958 vno
->opcode
= gimple_assign_rhs_code (stmt
);
3959 vno
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
3960 switch (vno
->opcode
)
3964 case VIEW_CONVERT_EXPR
:
3966 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3971 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
3972 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
3973 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
3977 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
3978 for (i
= 0; i
< vno
->length
; ++i
)
3979 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
3983 gcc_checking_assert (!gimple_assign_single_p (stmt
));
3984 vno
->length
= gimple_num_ops (stmt
) - 1;
3985 for (i
= 0; i
< vno
->length
; ++i
)
3986 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
3990 /* Compute the hashcode for VNO and look for it in the hash table;
3991 return the resulting value number if it exists in the hash table.
3992 Return NULL_TREE if it does not exist in the hash table or if the
3993 result field of the operation is NULL. VNRESULT will contain the
3994 vn_nary_op_t from the hashtable if it exists. */
3997 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
3999 vn_nary_op_s
**slot
;
4004 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4005 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4006 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4008 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4009 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
4014 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
4017 /* Lookup a n-ary operation by its pieces and return the resulting value
4018 number if it exists in the hash table. Return NULL_TREE if it does
4019 not exist in the hash table or if the result field of the operation
4020 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4024 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
4025 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
4027 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
4028 sizeof_vn_nary_op (length
));
4029 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4030 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4033 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4034 value number if it exists in the hash table. Return NULL_TREE if
4035 it does not exist in the hash table. VNRESULT will contain the
4036 vn_nary_op_t from the hashtable if it exists. */
4039 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
4042 = XALLOCAVAR (struct vn_nary_op_s
,
4043 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
4044 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4045 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4048 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4051 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
4053 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
4056 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4060 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
4062 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
4064 vno1
->value_id
= value_id
;
4065 vno1
->length
= length
;
4066 vno1
->predicated_values
= 0;
4067 vno1
->u
.result
= result
;
4072 /* Insert VNO into TABLE. */
4075 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
)
4077 vn_nary_op_s
**slot
;
4079 gcc_assert (! vno
->predicated_values
4080 || (! vno
->u
.values
->next
4081 && vno
->u
.values
->n
== 1));
4083 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4084 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4085 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4087 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4088 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
4089 vno
->unwind_to
= *slot
;
4092 /* Prefer non-predicated values.
4093 ??? Only if those are constant, otherwise, with constant predicated
4094 value, turn them into predicated values with entry-block validity
4095 (??? but we always find the first valid result currently). */
4096 if ((*slot
)->predicated_values
4097 && ! vno
->predicated_values
)
4099 /* ??? We cannot remove *slot from the unwind stack list.
4100 For the moment we deal with this by skipping not found
4101 entries but this isn't ideal ... */
4103 /* ??? Maintain a stack of states we can unwind in
4104 vn_nary_op_s? But how far do we unwind? In reality
4105 we need to push change records somewhere... Or not
4106 unwind vn_nary_op_s and linking them but instead
4107 unwind the results "list", linking that, which also
4108 doesn't move on hashtable resize. */
4109 /* We can also have a ->unwind_to recording *slot there.
4110 That way we can make u.values a fixed size array with
4111 recording the number of entries but of course we then
4112 have always N copies for each unwind_to-state. Or we
4113 make sure to only ever append and each unwinding will
4114 pop off one entry (but how to deal with predicated
4115 replaced with non-predicated here?) */
4116 vno
->next
= last_inserted_nary
;
4117 last_inserted_nary
= vno
;
4120 else if (vno
->predicated_values
4121 && ! (*slot
)->predicated_values
)
4123 else if (vno
->predicated_values
4124 && (*slot
)->predicated_values
)
4126 /* ??? Factor this all into a insert_single_predicated_value
4128 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
4130 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
4131 vn_pval
*nval
= vno
->u
.values
;
4132 vn_pval
**next
= &vno
->u
.values
;
4134 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
4136 if (expressions_equal_p (val
->result
, nval
->result
))
4139 for (unsigned i
= 0; i
< val
->n
; ++i
)
4142 = BASIC_BLOCK_FOR_FN (cfun
,
4143 val
->valid_dominated_by_p
[i
]);
4144 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
4145 /* Value registered with more generic predicate. */
4147 else if (dominated_by_p (CDI_DOMINATORS
, val_bb
, vno_bb
))
4148 /* Shouldn't happen, we insert in RPO order. */
4152 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4154 + val
->n
* sizeof (int));
4155 (*next
)->next
= NULL
;
4156 (*next
)->result
= val
->result
;
4157 (*next
)->n
= val
->n
+ 1;
4158 memcpy ((*next
)->valid_dominated_by_p
,
4159 val
->valid_dominated_by_p
,
4160 val
->n
* sizeof (int));
4161 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
4162 next
= &(*next
)->next
;
4163 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4164 fprintf (dump_file
, "Appending predicate to value.\n");
4167 /* Copy other predicated values. */
4168 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4170 + (val
->n
-1) * sizeof (int));
4171 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
4172 (*next
)->next
= NULL
;
4173 next
= &(*next
)->next
;
4179 vno
->next
= last_inserted_nary
;
4180 last_inserted_nary
= vno
;
4184 /* While we do not want to insert things twice it's awkward to
4185 avoid it in the case where visit_nary_op pattern-matches stuff
4186 and ends up simplifying the replacement to itself. We then
4187 get two inserts, one from visit_nary_op and one from
4188 vn_nary_build_or_lookup.
4189 So allow inserts with the same value number. */
4190 if ((*slot
)->u
.result
== vno
->u
.result
)
4194 /* ??? There's also optimistic vs. previous commited state merging
4195 that is problematic for the case of unwinding. */
4197 /* ??? We should return NULL if we do not use 'vno' and have the
4198 caller release it. */
4199 gcc_assert (!*slot
);
4202 vno
->next
= last_inserted_nary
;
4203 last_inserted_nary
= vno
;
4207 /* Insert a n-ary operation into the current hash table using it's
4208 pieces. Return the vn_nary_op_t structure we created and put in
4212 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
4213 tree type
, tree
*ops
,
4214 tree result
, unsigned int value_id
)
4216 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
4217 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4218 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4222 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
4223 tree type
, tree
*ops
,
4224 tree result
, unsigned int value_id
,
4227 /* ??? Currently tracking BBs. */
4228 if (! single_pred_p (pred_e
->dest
))
4230 /* Never record for backedges. */
4231 if (pred_e
->flags
& EDGE_DFS_BACK
)
4236 /* Ignore backedges. */
4237 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
4238 if (! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4243 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4244 /* ??? Fix dumping, but currently we only get comparisons. */
4245 && TREE_CODE_CLASS (code
) == tcc_comparison
)
4247 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
4248 pred_e
->dest
->index
);
4249 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4250 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4251 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4252 fprintf (dump_file
, " == %s\n",
4253 integer_zerop (result
) ? "false" : "true");
4255 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
4256 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4257 vno1
->predicated_values
= 1;
4258 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4260 vno1
->u
.values
->next
= NULL
;
4261 vno1
->u
.values
->result
= result
;
4262 vno1
->u
.values
->n
= 1;
4263 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
4264 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4268 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool);
4271 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
)
4273 if (! vno
->predicated_values
)
4274 return vno
->u
.result
;
4275 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
4276 for (unsigned i
= 0; i
< val
->n
; ++i
)
4277 /* Do not handle backedge executability optimistically since
4278 when figuring out whether to iterate we do not consider
4279 changed predication. */
4280 if (dominated_by_p_w_unex
4281 (bb
, BASIC_BLOCK_FOR_FN (cfun
, val
->valid_dominated_by_p
[i
]),
4287 /* Insert the rhs of STMT into the current hash table with a value number of
4291 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4294 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4295 result
, VN_INFO (result
)->value_id
);
4296 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4297 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4300 /* Compute a hashcode for PHI operation VP1 and return it. */
4302 static inline hashval_t
4303 vn_phi_compute_hash (vn_phi_t vp1
)
4305 inchash::hash hstate
;
4311 hstate
.add_int (EDGE_COUNT (vp1
->block
->preds
));
4312 switch (EDGE_COUNT (vp1
->block
->preds
))
4317 if (vp1
->block
->loop_father
->header
== vp1
->block
)
4323 hstate
.add_int (vp1
->block
->index
);
4326 /* If all PHI arguments are constants we need to distinguish
4327 the PHI node via its type. */
4329 hstate
.merge_hash (vn_hash_type (type
));
4331 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4333 /* Don't hash backedge values they need to be handled as VN_TOP
4334 for optimistic value-numbering. */
4335 if (e
->flags
& EDGE_DFS_BACK
)
4338 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4339 if (phi1op
== VN_TOP
)
4341 inchash::add_expr (phi1op
, hstate
);
4344 return hstate
.end ();
4348 /* Return true if COND1 and COND2 represent the same condition, set
4349 *INVERTED_P if one needs to be inverted to make it the same as
4353 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4354 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4356 enum tree_code code1
= gimple_cond_code (cond1
);
4357 enum tree_code code2
= gimple_cond_code (cond2
);
4359 *inverted_p
= false;
4362 else if (code1
== swap_tree_comparison (code2
))
4363 std::swap (lhs2
, rhs2
);
4364 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4366 else if (code1
== invert_tree_comparison
4367 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4369 std::swap (lhs2
, rhs2
);
4375 return ((expressions_equal_p (lhs1
, lhs2
)
4376 && expressions_equal_p (rhs1
, rhs2
))
4377 || (commutative_tree_code (code1
)
4378 && expressions_equal_p (lhs1
, rhs2
)
4379 && expressions_equal_p (rhs1
, lhs2
)));
4382 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4385 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4387 if (vp1
->hashcode
!= vp2
->hashcode
)
4390 if (vp1
->block
!= vp2
->block
)
4392 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4395 switch (EDGE_COUNT (vp1
->block
->preds
))
4398 /* Single-arg PHIs are just copies. */
4403 /* Rule out backedges into the PHI. */
4404 if (vp1
->block
->loop_father
->header
== vp1
->block
4405 || vp2
->block
->loop_father
->header
== vp2
->block
)
4408 /* If the PHI nodes do not have compatible types
4409 they are not the same. */
4410 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4414 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4416 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4417 /* If the immediate dominator end in switch stmts multiple
4418 values may end up in the same PHI arg via intermediate
4420 if (EDGE_COUNT (idom1
->succs
) != 2
4421 || EDGE_COUNT (idom2
->succs
) != 2)
4424 /* Verify the controlling stmt is the same. */
4425 gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
));
4426 gcond
*last2
= safe_dyn_cast
<gcond
*> (last_stmt (idom2
));
4427 if (! last1
|| ! last2
)
4430 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4431 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4435 /* Get at true/false controlled edges into the PHI. */
4436 edge te1
, te2
, fe1
, fe2
;
4437 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4439 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4443 /* Swap edges if the second condition is the inverted of the
4446 std::swap (te2
, fe2
);
4448 /* Since we do not know which edge will be executed we have
4449 to be careful when matching VN_TOP. Be conservative and
4450 only match VN_TOP == VN_TOP for now, we could allow
4451 VN_TOP on the not prevailing PHI though. See for example
4453 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4454 vp2
->phiargs
[te2
->dest_idx
], false)
4455 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4456 vp2
->phiargs
[fe2
->dest_idx
], false))
4467 /* If the PHI nodes do not have compatible types
4468 they are not the same. */
4469 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4472 /* Any phi in the same block will have it's arguments in the
4473 same edge order, because of how we store phi nodes. */
4474 unsigned nargs
= EDGE_COUNT (vp1
->block
->preds
);
4475 for (unsigned i
= 0; i
< nargs
; ++i
)
4477 tree phi1op
= vp1
->phiargs
[i
];
4478 tree phi2op
= vp2
->phiargs
[i
];
4479 if (phi1op
== phi2op
)
4481 if (!expressions_equal_p (phi1op
, phi2op
, false))
4488 /* Lookup PHI in the current hash table, and return the resulting
4489 value number if it exists in the hash table. Return NULL_TREE if
4490 it does not exist in the hash table. */
4493 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4496 struct vn_phi_s
*vp1
;
4500 vp1
= XALLOCAVAR (struct vn_phi_s
,
4501 sizeof (struct vn_phi_s
)
4502 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4504 /* Canonicalize the SSA_NAME's to their value number. */
4505 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4507 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4508 if (TREE_CODE (def
) == SSA_NAME
4509 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4511 if (ssa_undefined_value_p (def
, false))
4514 def
= SSA_VAL (def
);
4516 vp1
->phiargs
[e
->dest_idx
] = def
;
4518 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4519 vp1
->block
= gimple_bb (phi
);
4520 /* Extract values of the controlling condition. */
4521 vp1
->cclhs
= NULL_TREE
;
4522 vp1
->ccrhs
= NULL_TREE
;
4523 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4524 if (EDGE_COUNT (idom1
->succs
) == 2)
4525 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4527 /* ??? We want to use SSA_VAL here. But possibly not
4529 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4530 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4532 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4533 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4536 return (*slot
)->result
;
4539 /* Insert PHI into the current hash table with a value number of
4543 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4546 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4548 + ((gimple_phi_num_args (phi
) - 1)
4553 /* Canonicalize the SSA_NAME's to their value number. */
4554 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4556 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4557 if (TREE_CODE (def
) == SSA_NAME
4558 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4560 if (ssa_undefined_value_p (def
, false))
4563 def
= SSA_VAL (def
);
4565 vp1
->phiargs
[e
->dest_idx
] = def
;
4567 vp1
->value_id
= VN_INFO (result
)->value_id
;
4568 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4569 vp1
->block
= gimple_bb (phi
);
4570 /* Extract values of the controlling condition. */
4571 vp1
->cclhs
= NULL_TREE
;
4572 vp1
->ccrhs
= NULL_TREE
;
4573 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4574 if (EDGE_COUNT (idom1
->succs
) == 2)
4575 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (last_stmt (idom1
)))
4577 /* ??? We want to use SSA_VAL here. But possibly not
4579 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4580 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4582 vp1
->result
= result
;
4583 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4585 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4586 gcc_assert (!*slot
);
4589 vp1
->next
= last_inserted_phi
;
4590 last_inserted_phi
= vp1
;
4595 /* Return true if BB1 is dominated by BB2 taking into account edges
4596 that are not executable. When ALLOW_BACK is false consider not
4597 executable backedges as executable. */
4600 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool allow_back
)
4605 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4608 /* Before iterating we'd like to know if there exists a
4609 (executable) path from bb2 to bb1 at all, if not we can
4610 directly return false. For now simply iterate once. */
4612 /* Iterate to the single executable bb1 predecessor. */
4613 if (EDGE_COUNT (bb1
->preds
) > 1)
4616 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
4617 if ((e
->flags
& EDGE_EXECUTABLE
)
4618 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
4631 /* Re-do the dominance check with changed bb1. */
4632 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4637 /* Iterate to the single executable bb2 successor. */
4639 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
4640 if ((e
->flags
& EDGE_EXECUTABLE
)
4641 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
4652 /* Verify the reached block is only reached through succe.
4653 If there is only one edge we can spare us the dominator
4654 check and iterate directly. */
4655 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
4657 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
4659 && ((e
->flags
& EDGE_EXECUTABLE
)
4660 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
))))
4670 /* Re-do the dominance check with changed bb2. */
4671 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
4676 /* We could now iterate updating bb1 / bb2. */
4680 /* Set the value number of FROM to TO, return true if it has changed
4684 set_ssa_val_to (tree from
, tree to
)
4686 vn_ssa_aux_t from_info
= VN_INFO (from
);
4687 tree currval
= from_info
->valnum
; // SSA_VAL (from)
4688 poly_int64 toff
, coff
;
4689 bool curr_undefined
= false;
4690 bool curr_invariant
= false;
4692 /* The only thing we allow as value numbers are ssa_names
4693 and invariants. So assert that here. We don't allow VN_TOP
4694 as visiting a stmt should produce a value-number other than
4696 ??? Still VN_TOP can happen for unreachable code, so force
4697 it to varying in that case. Not all code is prepared to
4698 get VN_TOP on valueization. */
4701 /* ??? When iterating and visiting PHI <undef, backedge-value>
4702 for the first time we rightfully get VN_TOP and we need to
4703 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4704 With SCCVN we were simply lucky we iterated the other PHI
4705 cycles first and thus visited the backedge-value DEF. */
4706 if (currval
== VN_TOP
)
4708 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4709 fprintf (dump_file
, "Forcing value number to varying on "
4710 "receiving VN_TOP\n");
4714 gcc_checking_assert (to
!= NULL_TREE
4715 && ((TREE_CODE (to
) == SSA_NAME
4716 && (to
== from
|| SSA_VAL (to
) == to
))
4717 || is_gimple_min_invariant (to
)));
4721 if (currval
== from
)
4723 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4725 fprintf (dump_file
, "Not changing value number of ");
4726 print_generic_expr (dump_file
, from
);
4727 fprintf (dump_file
, " from VARYING to ");
4728 print_generic_expr (dump_file
, to
);
4729 fprintf (dump_file
, "\n");
4733 curr_invariant
= is_gimple_min_invariant (currval
);
4734 curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
4735 && ssa_undefined_value_p (currval
, false));
4736 if (currval
!= VN_TOP
4739 && is_gimple_min_invariant (to
))
4741 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4743 fprintf (dump_file
, "Forcing VARYING instead of changing "
4744 "value number of ");
4745 print_generic_expr (dump_file
, from
);
4746 fprintf (dump_file
, " from ");
4747 print_generic_expr (dump_file
, currval
);
4748 fprintf (dump_file
, " (non-constant) to ");
4749 print_generic_expr (dump_file
, to
);
4750 fprintf (dump_file
, " (constant)\n");
4754 else if (currval
!= VN_TOP
4756 && TREE_CODE (to
) == SSA_NAME
4757 && ssa_undefined_value_p (to
, false))
4759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4761 fprintf (dump_file
, "Forcing VARYING instead of changing "
4762 "value number of ");
4763 print_generic_expr (dump_file
, from
);
4764 fprintf (dump_file
, " from ");
4765 print_generic_expr (dump_file
, currval
);
4766 fprintf (dump_file
, " (non-undefined) to ");
4767 print_generic_expr (dump_file
, to
);
4768 fprintf (dump_file
, " (undefined)\n");
4772 else if (TREE_CODE (to
) == SSA_NAME
4773 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
4778 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4780 fprintf (dump_file
, "Setting value number of ");
4781 print_generic_expr (dump_file
, from
);
4782 fprintf (dump_file
, " to ");
4783 print_generic_expr (dump_file
, to
);
4787 && !operand_equal_p (currval
, to
, 0)
4788 /* Different undefined SSA names are not actually different. See
4789 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4791 && TREE_CODE (to
) == SSA_NAME
4792 && ssa_undefined_value_p (to
, false))
4793 /* ??? For addresses involving volatile objects or types operand_equal_p
4794 does not reliably detect ADDR_EXPRs as equal. We know we are only
4795 getting invariant gimple addresses here, so can use
4796 get_addr_base_and_unit_offset to do this comparison. */
4797 && !(TREE_CODE (currval
) == ADDR_EXPR
4798 && TREE_CODE (to
) == ADDR_EXPR
4799 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
4800 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
4801 && known_eq (coff
, toff
)))
4804 && currval
!= VN_TOP
4806 /* We do not want to allow lattice transitions from one value
4807 to another since that may lead to not terminating iteration
4808 (see PR95049). Since there's no convenient way to check
4809 for the allowed transition of VAL -> PHI (loop entry value,
4810 same on two PHIs, to same PHI result) we restrict the check
4813 && is_gimple_min_invariant (to
))
4815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4816 fprintf (dump_file
, " forced VARYING");
4819 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4820 fprintf (dump_file
, " (changed)\n");
4821 from_info
->valnum
= to
;
4824 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4825 fprintf (dump_file
, "\n");
4829 /* Set all definitions in STMT to value number to themselves.
4830 Return true if a value number changed. */
4833 defs_to_varying (gimple
*stmt
)
4835 bool changed
= false;
4839 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
4841 tree def
= DEF_FROM_PTR (defp
);
4842 changed
|= set_ssa_val_to (def
, def
);
4847 /* Visit a copy between LHS and RHS, return true if the value number
4851 visit_copy (tree lhs
, tree rhs
)
4854 rhs
= SSA_VAL (rhs
);
4856 return set_ssa_val_to (lhs
, rhs
);
4859 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4863 valueized_wider_op (tree wide_type
, tree op
, bool allow_truncate
)
4865 if (TREE_CODE (op
) == SSA_NAME
)
4866 op
= vn_valueize (op
);
4868 /* Either we have the op widened available. */
4871 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
4872 wide_type
, ops
, NULL
);
4876 /* Or the op is truncated from some existing value. */
4877 if (allow_truncate
&& TREE_CODE (op
) == SSA_NAME
)
4879 gimple
*def
= SSA_NAME_DEF_STMT (op
);
4880 if (is_gimple_assign (def
)
4881 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
4883 tem
= gimple_assign_rhs1 (def
);
4884 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
4886 if (TREE_CODE (tem
) == SSA_NAME
)
4887 tem
= vn_valueize (tem
);
4893 /* For constants simply extend it. */
4894 if (TREE_CODE (op
) == INTEGER_CST
)
4895 return wide_int_to_tree (wide_type
, wi::to_wide (op
));
4900 /* Visit a nary operator RHS, value number it, and return true if the
4901 value number of LHS has changed as a result. */
4904 visit_nary_op (tree lhs
, gassign
*stmt
)
4906 vn_nary_op_t vnresult
;
4907 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
4908 if (! result
&& vnresult
)
4909 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
4911 return set_ssa_val_to (lhs
, result
);
4913 /* Do some special pattern matching for redundancies of operations
4914 in different types. */
4915 enum tree_code code
= gimple_assign_rhs_code (stmt
);
4916 tree type
= TREE_TYPE (lhs
);
4917 tree rhs1
= gimple_assign_rhs1 (stmt
);
4921 /* Match arithmetic done in a different type where we can easily
4922 substitute the result from some earlier sign-changed or widened
4924 if (INTEGRAL_TYPE_P (type
)
4925 && TREE_CODE (rhs1
) == SSA_NAME
4926 /* We only handle sign-changes, zero-extension -> & mask or
4927 sign-extension if we know the inner operation doesn't
4929 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
4930 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4931 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4932 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
4933 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
4935 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
4937 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
4938 || gimple_assign_rhs_code (def
) == MINUS_EXPR
4939 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
4942 /* When requiring a sign-extension we cannot model a
4943 previous truncation with a single op so don't bother. */
4944 bool allow_truncate
= TYPE_UNSIGNED (TREE_TYPE (rhs1
));
4945 /* Either we have the op widened available. */
4946 ops
[0] = valueized_wider_op (type
, gimple_assign_rhs1 (def
),
4949 ops
[1] = valueized_wider_op (type
, gimple_assign_rhs2 (def
),
4951 if (ops
[0] && ops
[1])
4953 ops
[0] = vn_nary_op_lookup_pieces
4954 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
4955 /* We have wider operation available. */
4957 /* If the leader is a wrapping operation we can
4958 insert it for code hoisting w/o introducing
4959 undefined overflow. If it is not it has to
4960 be available. See PR86554. */
4961 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
4962 || (rpo_avail
&& vn_context_bb
4963 && rpo_avail
->eliminate_avail (vn_context_bb
,
4966 unsigned lhs_prec
= TYPE_PRECISION (type
);
4967 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
4968 if (lhs_prec
== rhs_prec
4969 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
4970 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
4972 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4973 NOP_EXPR
, type
, ops
[0]);
4974 result
= vn_nary_build_or_lookup (&match_op
);
4977 bool changed
= set_ssa_val_to (lhs
, result
);
4978 vn_nary_op_insert_stmt (stmt
, result
);
4984 tree mask
= wide_int_to_tree
4985 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
4986 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
4990 result
= vn_nary_build_or_lookup (&match_op
);
4993 bool changed
= set_ssa_val_to (lhs
, result
);
4994 vn_nary_op_insert_stmt (stmt
, result
);
5004 if (INTEGRAL_TYPE_P (type
)
5005 && TREE_CODE (rhs1
) == SSA_NAME
5006 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
5007 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)
5008 && default_vn_walk_kind
!= VN_NOWALK
5010 && BITS_PER_UNIT
== 8
5011 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
5012 && !integer_all_onesp (gimple_assign_rhs2 (stmt
))
5013 && !integer_zerop (gimple_assign_rhs2 (stmt
)))
5015 gassign
*ass
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
5017 && !gimple_has_volatile_ops (ass
)
5018 && vn_get_stmt_kind (ass
) == VN_REFERENCE
)
5020 tree last_vuse
= gimple_vuse (ass
);
5021 tree op
= gimple_assign_rhs1 (ass
);
5022 tree result
= vn_reference_lookup (op
, gimple_vuse (ass
),
5023 default_vn_walk_kind
,
5024 NULL
, true, &last_vuse
,
5025 gimple_assign_rhs2 (stmt
));
5027 && useless_type_conversion_p (TREE_TYPE (result
),
5029 return set_ssa_val_to (lhs
, result
);
5033 case TRUNC_DIV_EXPR
:
5034 if (TYPE_UNSIGNED (type
))
5039 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5040 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type
))
5044 rhs
[1] = gimple_assign_rhs2 (stmt
);
5045 for (unsigned i
= 0; i
<= 1; ++i
)
5047 unsigned j
= i
== 0 ? 1 : 0;
5049 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5050 NEGATE_EXPR
, type
, rhs
[i
]);
5051 ops
[i
] = vn_nary_build_or_lookup_1 (&match_op
, false, true);
5054 && (ops
[0] = vn_nary_op_lookup_pieces (2, code
,
5057 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5058 NEGATE_EXPR
, type
, ops
[0]);
5059 result
= vn_nary_build_or_lookup_1 (&match_op
, true, false);
5062 bool changed
= set_ssa_val_to (lhs
, result
);
5063 vn_nary_op_insert_stmt (stmt
, result
);
5074 bool changed
= set_ssa_val_to (lhs
, lhs
);
5075 vn_nary_op_insert_stmt (stmt
, lhs
);
5079 /* Visit a call STMT storing into LHS. Return true if the value number
5080 of the LHS has changed as a result. */
5083 visit_reference_op_call (tree lhs
, gcall
*stmt
)
5085 bool changed
= false;
5086 struct vn_reference_s vr1
;
5087 vn_reference_t vnresult
= NULL
;
5088 tree vdef
= gimple_vdef (stmt
);
5089 modref_summary
*summary
;
5091 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5092 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5095 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
5097 /* If the lookup did not succeed for pure functions try to use
5098 modref info to find a candidate to CSE to. */
5099 const unsigned accesses_limit
= 8;
5103 && gimple_vuse (stmt
)
5104 && (((summary
= get_modref_function_summary (stmt
, NULL
))
5105 && !summary
->global_memory_read
5106 && summary
->load_accesses
< accesses_limit
)
5107 || gimple_call_flags (stmt
) & ECF_CONST
))
5109 /* First search if we can do someting useful and build a
5110 vector of all loads we have to check. */
5111 bool unknown_memory_access
= false;
5112 auto_vec
<ao_ref
, accesses_limit
> accesses
;
5113 unsigned load_accesses
= summary
? summary
->load_accesses
: 0;
5114 if (!unknown_memory_access
)
5115 /* Add loads done as part of setting up the call arguments.
5116 That's also necessary for CONST functions which will
5117 not have a modref summary. */
5118 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5120 tree arg
= gimple_call_arg (stmt
, i
);
5121 if (TREE_CODE (arg
) != SSA_NAME
5122 && !is_gimple_min_invariant (arg
))
5124 if (accesses
.length () >= accesses_limit
- load_accesses
)
5126 unknown_memory_access
= true;
5129 accesses
.quick_grow (accesses
.length () + 1);
5130 ao_ref_init (&accesses
.last (), arg
);
5133 if (summary
&& !unknown_memory_access
)
5135 /* Add loads as analyzed by IPA modref. */
5136 for (auto base_node
: summary
->loads
->bases
)
5137 if (unknown_memory_access
)
5139 else for (auto ref_node
: base_node
->refs
)
5140 if (unknown_memory_access
)
5142 else for (auto access_node
: ref_node
->accesses
)
5144 accesses
.quick_grow (accesses
.length () + 1);
5145 ao_ref
*r
= &accesses
.last ();
5146 if (!access_node
.get_ao_ref (stmt
, r
))
5148 /* Initialize a ref based on the argument and
5149 unknown offset if possible. */
5150 tree arg
= access_node
.get_call_arg (stmt
);
5151 if (arg
&& TREE_CODE (arg
) == SSA_NAME
)
5152 arg
= SSA_VAL (arg
);
5154 && TREE_CODE (arg
) == ADDR_EXPR
5155 && (arg
= get_base_address (arg
))
5158 ao_ref_init (r
, arg
);
5164 unknown_memory_access
= true;
5168 r
->base_alias_set
= base_node
->base
;
5169 r
->ref_alias_set
= ref_node
->ref
;
5173 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5174 for the call in the hashtable. */
5175 unsigned limit
= (unknown_memory_access
5177 : (param_sccvn_max_alias_queries_per_access
5178 / (accesses
.length () + 1)));
5179 tree saved_vuse
= vr1
.vuse
;
5180 hashval_t saved_hashcode
= vr1
.hashcode
;
5181 while (limit
> 0 && !vnresult
&& !SSA_NAME_IS_DEFAULT_DEF (vr1
.vuse
))
5183 vr1
.hashcode
= vr1
.hashcode
- SSA_NAME_VERSION (vr1
.vuse
);
5184 gimple
*def
= SSA_NAME_DEF_STMT (vr1
.vuse
);
5185 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5186 do not bother for now. */
5187 if (is_a
<gphi
*> (def
))
5189 vr1
.vuse
= vuse_ssa_val (gimple_vuse (def
));
5190 vr1
.hashcode
= vr1
.hashcode
+ SSA_NAME_VERSION (vr1
.vuse
);
5191 vn_reference_lookup_1 (&vr1
, &vnresult
);
5195 /* If we found a candidate to CSE to verify it is valid. */
5196 if (vnresult
&& !accesses
.is_empty ())
5198 tree vuse
= vuse_ssa_val (gimple_vuse (stmt
));
5199 while (vnresult
&& vuse
!= vr1
.vuse
)
5201 gimple
*def
= SSA_NAME_DEF_STMT (vuse
);
5202 for (auto &ref
: accesses
)
5204 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5205 analysis overhead that we might be able to cache. */
5206 if (stmt_may_clobber_ref_p_1 (def
, &ref
, true))
5212 vuse
= vuse_ssa_val (gimple_vuse (def
));
5215 vr1
.vuse
= saved_vuse
;
5216 vr1
.hashcode
= saved_hashcode
;
5221 if (vnresult
->result_vdef
&& vdef
)
5222 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
5224 /* If the call was discovered to be pure or const reflect
5225 that as far as possible. */
5226 changed
|= set_ssa_val_to (vdef
, vuse_ssa_val (gimple_vuse (stmt
)));
5228 if (!vnresult
->result
&& lhs
)
5229 vnresult
->result
= lhs
;
5231 if (vnresult
->result
&& lhs
)
5232 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
5237 vn_reference_s
**slot
;
5238 tree vdef_val
= vdef
;
5241 /* If we value numbered an indirect functions function to
5242 one not clobbering memory value number its VDEF to its
5244 tree fn
= gimple_call_fn (stmt
);
5245 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5248 if (TREE_CODE (fn
) == ADDR_EXPR
5249 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5250 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
5251 & (ECF_CONST
| ECF_PURE
)))
5252 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
5254 changed
|= set_ssa_val_to (vdef
, vdef_val
);
5257 changed
|= set_ssa_val_to (lhs
, lhs
);
5258 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
5259 vr2
->vuse
= vr1
.vuse
;
5260 /* As we are not walking the virtual operand chain we know the
5261 shared_lookup_references are still original so we can re-use
5263 vr2
->operands
= vr1
.operands
.copy ();
5264 vr2
->type
= vr1
.type
;
5265 vr2
->punned
= vr1
.punned
;
5267 vr2
->base_set
= vr1
.base_set
;
5268 vr2
->hashcode
= vr1
.hashcode
;
5270 vr2
->result_vdef
= vdef_val
;
5272 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
5274 gcc_assert (!*slot
);
5276 vr2
->next
= last_inserted_ref
;
5277 last_inserted_ref
= vr2
;
5283 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5284 and return true if the value number of the LHS has changed as a result. */
5287 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
5289 bool changed
= false;
5293 tree vuse
= gimple_vuse (stmt
);
5294 tree last_vuse
= vuse
;
5295 result
= vn_reference_lookup (op
, vuse
, default_vn_walk_kind
, &res
, true, &last_vuse
);
5297 /* We handle type-punning through unions by value-numbering based
5298 on offset and size of the access. Be prepared to handle a
5299 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5301 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
5303 /* Avoid the type punning in case the result mode has padding where
5304 the op we lookup has not. */
5305 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result
))),
5306 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op
)))))
5310 /* We will be setting the value number of lhs to the value number
5311 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5312 So first simplify and lookup this expression to see if it
5313 is already available. */
5314 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
5315 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
5316 result
= vn_nary_build_or_lookup (&res_op
);
5318 && TREE_CODE (result
) == SSA_NAME
5319 && VN_INFO (result
)->needs_insertion
)
5320 /* Track whether this is the canonical expression for different
5321 typed loads. We use that as a stopgap measure for code
5322 hoisting when dealing with floating point loads. */
5326 /* When building the conversion fails avoid inserting the reference
5329 return set_ssa_val_to (lhs
, lhs
);
5333 changed
= set_ssa_val_to (lhs
, result
);
5336 changed
= set_ssa_val_to (lhs
, lhs
);
5337 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
5338 if (vuse
&& SSA_VAL (last_vuse
) != SSA_VAL (vuse
))
5340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5342 fprintf (dump_file
, "Using extra use virtual operand ");
5343 print_generic_expr (dump_file
, last_vuse
);
5344 fprintf (dump_file
, "\n");
5346 vn_reference_insert (op
, lhs
, vuse
, NULL_TREE
);
5354 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5355 and return true if the value number of the LHS has changed as a result. */
5358 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
5360 bool changed
= false;
5361 vn_reference_t vnresult
= NULL
;
5363 bool resultsame
= false;
5364 tree vuse
= gimple_vuse (stmt
);
5365 tree vdef
= gimple_vdef (stmt
);
5367 if (TREE_CODE (op
) == SSA_NAME
)
5370 /* First we want to lookup using the *vuses* from the store and see
5371 if there the last store to this location with the same address
5374 The vuses represent the memory state before the store. If the
5375 memory state, address, and value of the store is the same as the
5376 last store to this location, then this store will produce the
5377 same memory state as that store.
5379 In this case the vdef versions for this store are value numbered to those
5380 vuse versions, since they represent the same memory state after
5383 Otherwise, the vdefs for the store are used when inserting into
5384 the table, since the store generates a new memory state. */
5386 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
5388 && vnresult
->result
)
5390 tree result
= vnresult
->result
;
5391 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
5392 || result
== SSA_VAL (result
));
5393 resultsame
= expressions_equal_p (result
, op
);
5396 /* If the TBAA state isn't compatible for downstream reads
5397 we cannot value-number the VDEFs the same. */
5399 ao_ref_init (&lhs_ref
, lhs
);
5400 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
5401 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
5402 if ((vnresult
->set
!= set
5403 && ! alias_set_subset_of (set
, vnresult
->set
))
5404 || (vnresult
->base_set
!= base_set
5405 && ! alias_set_subset_of (base_set
, vnresult
->base_set
)))
5412 /* Only perform the following when being called from PRE
5413 which embeds tail merging. */
5414 if (default_vn_walk_kind
== VN_WALK
)
5416 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5417 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
5420 VN_INFO (vdef
)->visited
= true;
5421 return set_ssa_val_to (vdef
, vnresult
->result_vdef
);
5425 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5427 fprintf (dump_file
, "No store match\n");
5428 fprintf (dump_file
, "Value numbering store ");
5429 print_generic_expr (dump_file
, lhs
);
5430 fprintf (dump_file
, " to ");
5431 print_generic_expr (dump_file
, op
);
5432 fprintf (dump_file
, "\n");
5434 /* Have to set value numbers before insert, since insert is
5435 going to valueize the references in-place. */
5437 changed
|= set_ssa_val_to (vdef
, vdef
);
5439 /* Do not insert structure copies into the tables. */
5440 if (is_gimple_min_invariant (op
)
5441 || is_gimple_reg (op
))
5442 vn_reference_insert (lhs
, op
, vdef
, NULL
);
5444 /* Only perform the following when being called from PRE
5445 which embeds tail merging. */
5446 if (default_vn_walk_kind
== VN_WALK
)
5448 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5449 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
5454 /* We had a match, so value number the vdef to have the value
5455 number of the vuse it came from. */
5457 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5458 fprintf (dump_file
, "Store matched earlier value, "
5459 "value numbering store vdefs to matching vuses.\n");
5461 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
5467 /* Visit and value number PHI, return true if the value number
5468 changed. When BACKEDGES_VARYING_P is true then assume all
5469 backedge values are varying. When INSERTED is not NULL then
5470 this is just a ahead query for a possible iteration, set INSERTED
5471 to true if we'd insert into the hashtable. */
5474 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
5476 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
5477 tree backedge_val
= NULL_TREE
;
5478 bool seen_non_backedge
= false;
5479 tree sameval_base
= NULL_TREE
;
5480 poly_int64 soff
, doff
;
5481 unsigned n_executable
= 0;
5485 /* TODO: We could check for this in initialization, and replace this
5486 with a gcc_assert. */
5487 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
5488 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
5490 /* We track whether a PHI was CSEd to to avoid excessive iterations
5491 that would be necessary only because the PHI changed arguments
5494 gimple_set_plf (phi
, GF_PLF_1
, false);
5496 /* See if all non-TOP arguments have the same value. TOP is
5497 equivalent to everything, so we can ignore it. */
5498 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
5499 if (e
->flags
& EDGE_EXECUTABLE
)
5501 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5503 if (def
== PHI_RESULT (phi
))
5506 if (TREE_CODE (def
) == SSA_NAME
)
5508 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
5509 def
= SSA_VAL (def
);
5510 if (e
->flags
& EDGE_DFS_BACK
)
5513 if (!(e
->flags
& EDGE_DFS_BACK
))
5514 seen_non_backedge
= true;
5517 /* Ignore undefined defs for sameval but record one. */
5518 else if (TREE_CODE (def
) == SSA_NAME
5519 && ! virtual_operand_p (def
)
5520 && ssa_undefined_value_p (def
, false))
5522 else if (sameval
== VN_TOP
)
5524 else if (!expressions_equal_p (def
, sameval
))
5526 /* We know we're arriving only with invariant addresses here,
5527 try harder comparing them. We can do some caching here
5528 which we cannot do in expressions_equal_p. */
5529 if (TREE_CODE (def
) == ADDR_EXPR
5530 && TREE_CODE (sameval
) == ADDR_EXPR
5531 && sameval_base
!= (void *)-1)
5534 sameval_base
= get_addr_base_and_unit_offset
5535 (TREE_OPERAND (sameval
, 0), &soff
);
5537 sameval_base
= (tree
)(void *)-1;
5538 else if ((get_addr_base_and_unit_offset
5539 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
5540 && known_eq (soff
, doff
))
5543 sameval
= NULL_TREE
;
5548 /* If the value we want to use is flowing over the backedge and we
5549 should take it as VARYING but it has a non-VARYING value drop to
5551 If we value-number a virtual operand never value-number to the
5552 value from the backedge as that confuses the alias-walking code.
5553 See gcc.dg/torture/pr87176.c. If the value is the same on a
5554 non-backedge everything is OK though. */
5557 && !seen_non_backedge
5558 && TREE_CODE (backedge_val
) == SSA_NAME
5559 && sameval
== backedge_val
5560 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
5561 || SSA_VAL (backedge_val
) != backedge_val
))
5562 /* Do not value-number a virtual operand to sth not visited though
5563 given that allows us to escape a region in alias walking. */
5565 && TREE_CODE (sameval
) == SSA_NAME
5566 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
5567 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
5568 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
5569 /* Note this just drops to VARYING without inserting the PHI into
5571 result
= PHI_RESULT (phi
);
5572 /* If none of the edges was executable keep the value-number at VN_TOP,
5573 if only a single edge is exectuable use its value. */
5574 else if (n_executable
<= 1)
5575 result
= seen_undef
? seen_undef
: sameval
;
5576 /* If we saw only undefined values and VN_TOP use one of the
5577 undefined values. */
5578 else if (sameval
== VN_TOP
)
5579 result
= seen_undef
? seen_undef
: sameval
;
5580 /* First see if it is equivalent to a phi node in this block. We prefer
5581 this as it allows IV elimination - see PRs 66502 and 67167. */
5582 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
5585 && TREE_CODE (result
) == SSA_NAME
5586 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
5588 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
5589 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5591 fprintf (dump_file
, "Marking CSEd to PHI node ");
5592 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
5594 fprintf (dump_file
, "\n");
5598 /* If all values are the same use that, unless we've seen undefined
5599 values as well and the value isn't constant.
5600 CCP/copyprop have the same restriction to not remove uninit warnings. */
5602 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
5606 result
= PHI_RESULT (phi
);
5607 /* Only insert PHIs that are varying, for constant value numbers
5608 we mess up equivalences otherwise as we are only comparing
5609 the immediate controlling predicates. */
5610 vn_phi_insert (phi
, result
, backedges_varying_p
);
5615 return set_ssa_val_to (PHI_RESULT (phi
), result
);
5618 /* Try to simplify RHS using equivalences and constant folding. */
5621 try_to_simplify (gassign
*stmt
)
5623 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5626 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5627 in this case, there is no point in doing extra work. */
5628 if (code
== SSA_NAME
)
5631 /* First try constant folding based on our current lattice. */
5632 mprts_hook
= vn_lookup_simplify_result
;
5633 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
5636 && (TREE_CODE (tem
) == SSA_NAME
5637 || is_gimple_min_invariant (tem
)))
5643 /* Visit and value number STMT, return true if the value number
5647 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
5649 bool changed
= false;
5651 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5653 fprintf (dump_file
, "Value numbering stmt = ");
5654 print_gimple_stmt (dump_file
, stmt
, 0);
5657 if (gimple_code (stmt
) == GIMPLE_PHI
)
5658 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
5659 else if (gimple_has_volatile_ops (stmt
))
5660 changed
= defs_to_varying (stmt
);
5661 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
5663 enum tree_code code
= gimple_assign_rhs_code (ass
);
5664 tree lhs
= gimple_assign_lhs (ass
);
5665 tree rhs1
= gimple_assign_rhs1 (ass
);
5668 /* Shortcut for copies. Simplifying copies is pointless,
5669 since we copy the expression and value they represent. */
5670 if (code
== SSA_NAME
5671 && TREE_CODE (lhs
) == SSA_NAME
)
5673 changed
= visit_copy (lhs
, rhs1
);
5676 simplified
= try_to_simplify (ass
);
5679 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5681 fprintf (dump_file
, "RHS ");
5682 print_gimple_expr (dump_file
, ass
, 0);
5683 fprintf (dump_file
, " simplified to ");
5684 print_generic_expr (dump_file
, simplified
);
5685 fprintf (dump_file
, "\n");
5688 /* Setting value numbers to constants will occasionally
5689 screw up phi congruence because constants are not
5690 uniquely associated with a single ssa name that can be
5693 && is_gimple_min_invariant (simplified
)
5694 && TREE_CODE (lhs
) == SSA_NAME
)
5696 changed
= set_ssa_val_to (lhs
, simplified
);
5700 && TREE_CODE (simplified
) == SSA_NAME
5701 && TREE_CODE (lhs
) == SSA_NAME
)
5703 changed
= visit_copy (lhs
, simplified
);
5707 if ((TREE_CODE (lhs
) == SSA_NAME
5708 /* We can substitute SSA_NAMEs that are live over
5709 abnormal edges with their constant value. */
5710 && !(gimple_assign_copy_p (ass
)
5711 && is_gimple_min_invariant (rhs1
))
5713 && is_gimple_min_invariant (simplified
))
5714 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5715 /* Stores or copies from SSA_NAMEs that are live over
5716 abnormal edges are a problem. */
5717 || (code
== SSA_NAME
5718 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
5719 changed
= defs_to_varying (ass
);
5720 else if (REFERENCE_CLASS_P (lhs
)
5722 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
5723 else if (TREE_CODE (lhs
) == SSA_NAME
)
5725 if ((gimple_assign_copy_p (ass
)
5726 && is_gimple_min_invariant (rhs1
))
5728 && is_gimple_min_invariant (simplified
)))
5731 changed
= set_ssa_val_to (lhs
, simplified
);
5733 changed
= set_ssa_val_to (lhs
, rhs1
);
5737 /* Visit the original statement. */
5738 switch (vn_get_stmt_kind (ass
))
5741 changed
= visit_nary_op (lhs
, ass
);
5744 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
5747 changed
= defs_to_varying (ass
);
5753 changed
= defs_to_varying (ass
);
5755 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
5757 tree lhs
= gimple_call_lhs (call_stmt
);
5758 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
5760 /* Try constant folding based on our current lattice. */
5761 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
5765 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5767 fprintf (dump_file
, "call ");
5768 print_gimple_expr (dump_file
, call_stmt
, 0);
5769 fprintf (dump_file
, " simplified to ");
5770 print_generic_expr (dump_file
, simplified
);
5771 fprintf (dump_file
, "\n");
5774 /* Setting value numbers to constants will occasionally
5775 screw up phi congruence because constants are not
5776 uniquely associated with a single ssa name that can be
5779 && is_gimple_min_invariant (simplified
))
5781 changed
= set_ssa_val_to (lhs
, simplified
);
5782 if (gimple_vdef (call_stmt
))
5783 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5784 SSA_VAL (gimple_vuse (call_stmt
)));
5788 && TREE_CODE (simplified
) == SSA_NAME
)
5790 changed
= visit_copy (lhs
, simplified
);
5791 if (gimple_vdef (call_stmt
))
5792 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
5793 SSA_VAL (gimple_vuse (call_stmt
)));
5796 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
5798 changed
= defs_to_varying (call_stmt
);
5803 /* Pick up flags from a devirtualization target. */
5804 tree fn
= gimple_call_fn (stmt
);
5805 int extra_fnflags
= 0;
5806 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5809 if (TREE_CODE (fn
) == ADDR_EXPR
5810 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
5811 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
5813 if ((/* Calls to the same function with the same vuse
5814 and the same operands do not necessarily return the same
5815 value, unless they're pure or const. */
5816 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
5817 & (ECF_PURE
| ECF_CONST
))
5818 /* If calls have a vdef, subsequent calls won't have
5819 the same incoming vuse. So, if 2 calls with vdef have the
5820 same vuse, we know they're not subsequent.
5821 We can value number 2 calls to the same function with the
5822 same vuse and the same operands which are not subsequent
5823 the same, because there is no code in the program that can
5824 compare the 2 values... */
5825 || (gimple_vdef (call_stmt
)
5826 /* ... unless the call returns a pointer which does
5827 not alias with anything else. In which case the
5828 information that the values are distinct are encoded
5830 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
5831 /* Only perform the following when being called from PRE
5832 which embeds tail merging. */
5833 && default_vn_walk_kind
== VN_WALK
))
5834 /* Do not process .DEFERRED_INIT since that confuses uninit
5836 && !gimple_call_internal_p (call_stmt
, IFN_DEFERRED_INIT
))
5837 changed
= visit_reference_op_call (lhs
, call_stmt
);
5839 changed
= defs_to_varying (call_stmt
);
5842 changed
= defs_to_varying (stmt
);
5848 /* Allocate a value number table. */
5851 allocate_vn_table (vn_tables_t table
, unsigned size
)
5853 table
->phis
= new vn_phi_table_type (size
);
5854 table
->nary
= new vn_nary_op_table_type (size
);
5855 table
->references
= new vn_reference_table_type (size
);
5858 /* Free a value number table. */
5861 free_vn_table (vn_tables_t table
)
5863 /* Walk over elements and release vectors. */
5864 vn_reference_iterator_type hir
;
5866 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
5867 vr
->operands
.release ();
5872 delete table
->references
;
5873 table
->references
= NULL
;
5876 /* Set *ID according to RESULT. */
5879 set_value_id_for_result (tree result
, unsigned int *id
)
5881 if (result
&& TREE_CODE (result
) == SSA_NAME
)
5882 *id
= VN_INFO (result
)->value_id
;
5883 else if (result
&& is_gimple_min_invariant (result
))
5884 *id
= get_or_alloc_constant_value_id (result
);
5886 *id
= get_next_value_id ();
5889 /* Set the value ids in the valid hash tables. */
5892 set_hashtable_value_ids (void)
5894 vn_nary_op_iterator_type hin
;
5895 vn_phi_iterator_type hip
;
5896 vn_reference_iterator_type hir
;
5901 /* Now set the value ids of the things we had put in the hash
5904 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
5905 if (! vno
->predicated_values
)
5906 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
5908 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
5909 set_value_id_for_result (vp
->result
, &vp
->value_id
);
5911 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
5913 set_value_id_for_result (vr
->result
, &vr
->value_id
);
5916 /* Return the maximum value id we have ever seen. */
5919 get_max_value_id (void)
5921 return next_value_id
;
5924 /* Return the maximum constant value id we have ever seen. */
5927 get_max_constant_value_id (void)
5929 return -next_constant_value_id
;
5932 /* Return the next unique value id. */
5935 get_next_value_id (void)
5937 gcc_checking_assert ((int)next_value_id
> 0);
5938 return next_value_id
++;
5941 /* Return the next unique value id for constants. */
5944 get_next_constant_value_id (void)
5946 gcc_checking_assert (next_constant_value_id
< 0);
5947 return next_constant_value_id
--;
5951 /* Compare two expressions E1 and E2 and return true if they are equal.
5952 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
5953 otherwise VN_TOP only matches VN_TOP. */
5956 expressions_equal_p (tree e1
, tree e2
, bool match_vn_top_optimistically
)
5958 /* The obvious case. */
5962 /* If either one is VN_TOP consider them equal. */
5963 if (match_vn_top_optimistically
5964 && (e1
== VN_TOP
|| e2
== VN_TOP
))
5967 /* SSA_NAME compare pointer equal. */
5968 if (TREE_CODE (e1
) == SSA_NAME
|| TREE_CODE (e2
) == SSA_NAME
)
5971 /* Now perform the actual comparison. */
5972 if (TREE_CODE (e1
) == TREE_CODE (e2
)
5973 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
5980 /* Return true if the nary operation NARY may trap. This is a copy
5981 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5984 vn_nary_may_trap (vn_nary_op_t nary
)
5987 tree rhs2
= NULL_TREE
;
5988 bool honor_nans
= false;
5989 bool honor_snans
= false;
5990 bool fp_operation
= false;
5991 bool honor_trapv
= false;
5995 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
5996 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
5997 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
6000 fp_operation
= FLOAT_TYPE_P (type
);
6003 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
6004 honor_snans
= flag_signaling_nans
!= 0;
6006 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
6009 if (nary
->length
>= 2)
6011 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
6012 honor_trapv
, honor_nans
, honor_snans
,
6017 for (i
= 0; i
< nary
->length
; ++i
)
6018 if (tree_could_trap_p (nary
->op
[i
]))
6024 /* Return true if the reference operation REF may trap. */
6027 vn_reference_may_trap (vn_reference_t ref
)
6029 switch (ref
->operands
[0].opcode
)
6033 /* We do not handle calls. */
6036 /* And toplevel address computations never trap. */
6041 vn_reference_op_t op
;
6043 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
6047 case WITH_SIZE_EXPR
:
6048 case TARGET_MEM_REF
:
6049 /* Always variable. */
6052 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
6055 case ARRAY_RANGE_REF
:
6056 if (TREE_CODE (op
->op0
) == SSA_NAME
)
6061 if (TREE_CODE (op
->op0
) != INTEGER_CST
)
6064 /* !in_array_bounds */
6065 tree domain_type
= TYPE_DOMAIN (ref
->operands
[i
+1].type
);
6070 tree max
= TYPE_MAX_VALUE (domain_type
);
6073 || TREE_CODE (min
) != INTEGER_CST
6074 || TREE_CODE (max
) != INTEGER_CST
)
6077 if (tree_int_cst_lt (op
->op0
, min
)
6078 || tree_int_cst_lt (max
, op
->op0
))
6084 /* Nothing interesting in itself, the base is separate. */
6086 /* The following are the address bases. */
6091 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
6099 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
6100 bitmap inserted_exprs_
)
6101 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
6102 el_todo (0), eliminations (0), insertions (0),
6103 inserted_exprs (inserted_exprs_
)
6105 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
6106 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
6109 eliminate_dom_walker::~eliminate_dom_walker ()
6111 BITMAP_FREE (need_eh_cleanup
);
6112 BITMAP_FREE (need_ab_cleanup
);
6115 /* Return a leader for OP that is available at the current point of the
6116 eliminate domwalk. */
6119 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
6121 tree valnum
= VN_INFO (op
)->valnum
;
6122 if (TREE_CODE (valnum
) == SSA_NAME
)
6124 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6126 if (avail
.length () > SSA_NAME_VERSION (valnum
))
6127 return avail
[SSA_NAME_VERSION (valnum
)];
6129 else if (is_gimple_min_invariant (valnum
))
6134 /* At the current point of the eliminate domwalk make OP available. */
6137 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
6139 tree valnum
= VN_INFO (op
)->valnum
;
6140 if (TREE_CODE (valnum
) == SSA_NAME
)
6142 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
6143 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1, true);
6145 if (avail
[SSA_NAME_VERSION (valnum
)])
6146 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
6147 avail_stack
.safe_push (pushop
);
6148 avail
[SSA_NAME_VERSION (valnum
)] = op
;
6152 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6153 the leader for the expression if insertion was successful. */
6156 eliminate_dom_walker::eliminate_insert (basic_block bb
,
6157 gimple_stmt_iterator
*gsi
, tree val
)
6159 /* We can insert a sequence with a single assignment only. */
6160 gimple_seq stmts
= VN_INFO (val
)->expr
;
6161 if (!gimple_seq_singleton_p (stmts
))
6163 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
6165 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6166 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
6167 && gimple_assign_rhs_code (stmt
) != NEGATE_EXPR
6168 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
6169 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
6170 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
6173 tree op
= gimple_assign_rhs1 (stmt
);
6174 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
6175 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6176 op
= TREE_OPERAND (op
, 0);
6177 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
6183 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6184 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
6185 TREE_TYPE (val
), leader
,
6186 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
6187 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
6188 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
6189 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
6190 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
6192 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
6193 TREE_TYPE (val
), leader
);
6194 if (TREE_CODE (res
) != SSA_NAME
6195 || SSA_NAME_IS_DEFAULT_DEF (res
)
6196 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
6198 gimple_seq_discard (stmts
);
6200 /* During propagation we have to treat SSA info conservatively
6201 and thus we can end up simplifying the inserted expression
6202 at elimination time to sth not defined in stmts. */
6203 /* But then this is a redundancy we failed to detect. Which means
6204 res now has two values. That doesn't play well with how
6205 we track availability here, so give up. */
6206 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6208 if (TREE_CODE (res
) == SSA_NAME
)
6209 res
= eliminate_avail (bb
, res
);
6212 fprintf (dump_file
, "Failed to insert expression for value ");
6213 print_generic_expr (dump_file
, val
);
6214 fprintf (dump_file
, " which is really fully redundant to ");
6215 print_generic_expr (dump_file
, res
);
6216 fprintf (dump_file
, "\n");
6224 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
6225 vn_ssa_aux_t vn_info
= VN_INFO (res
);
6226 vn_info
->valnum
= val
;
6227 vn_info
->visited
= true;
6231 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6233 fprintf (dump_file
, "Inserted ");
6234 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
6241 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
6243 tree sprime
= NULL_TREE
;
6244 gimple
*stmt
= gsi_stmt (*gsi
);
6245 tree lhs
= gimple_get_lhs (stmt
);
6246 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
6247 && !gimple_has_volatile_ops (stmt
)
6248 /* See PR43491. Do not replace a global register variable when
6249 it is a the RHS of an assignment. Do replace local register
6250 variables since gcc does not guarantee a local variable will
6251 be allocated in register.
6252 ??? The fix isn't effective here. This should instead
6253 be ensured by not value-numbering them the same but treating
6254 them like volatiles? */
6255 && !(gimple_assign_single_p (stmt
)
6256 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
6257 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
6258 && is_global_var (gimple_assign_rhs1 (stmt
)))))
6260 sprime
= eliminate_avail (b
, lhs
);
6263 /* If there is no existing usable leader but SCCVN thinks
6264 it has an expression it wants to use as replacement,
6266 tree val
= VN_INFO (lhs
)->valnum
;
6267 vn_ssa_aux_t vn_info
;
6269 && TREE_CODE (val
) == SSA_NAME
6270 && (vn_info
= VN_INFO (val
), true)
6271 && vn_info
->needs_insertion
6272 && vn_info
->expr
!= NULL
6273 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
6274 eliminate_push_avail (b
, sprime
);
6277 /* If this now constitutes a copy duplicate points-to
6278 and range info appropriately. This is especially
6279 important for inserted code. See tree-ssa-copy.c
6280 for similar code. */
6282 && TREE_CODE (sprime
) == SSA_NAME
)
6284 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
6285 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6286 && SSA_NAME_PTR_INFO (lhs
)
6287 && ! SSA_NAME_PTR_INFO (sprime
))
6289 duplicate_ssa_name_ptr_info (sprime
,
6290 SSA_NAME_PTR_INFO (lhs
));
6292 reset_flow_sensitive_info (sprime
);
6294 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6295 && SSA_NAME_RANGE_INFO (lhs
)
6296 && ! SSA_NAME_RANGE_INFO (sprime
)
6298 duplicate_ssa_name_range_info (sprime
,
6299 SSA_NAME_RANGE_TYPE (lhs
),
6300 SSA_NAME_RANGE_INFO (lhs
));
6303 /* Inhibit the use of an inserted PHI on a loop header when
6304 the address of the memory reference is a simple induction
6305 variable. In other cases the vectorizer won't do anything
6306 anyway (either it's loop invariant or a complicated
6309 && TREE_CODE (sprime
) == SSA_NAME
6311 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
6312 && loop_outer (b
->loop_father
)
6313 && has_zero_uses (sprime
)
6314 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
6315 && gimple_assign_load_p (stmt
))
6317 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
6318 basic_block def_bb
= gimple_bb (def_stmt
);
6319 if (gimple_code (def_stmt
) == GIMPLE_PHI
6320 && def_bb
->loop_father
->header
== def_bb
)
6322 loop_p loop
= def_bb
->loop_father
;
6326 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
6329 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
6331 && flow_bb_inside_loop_p (loop
, def_bb
)
6332 && simple_iv (loop
, loop
, op
, &iv
, true))
6340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6342 fprintf (dump_file
, "Not replacing ");
6343 print_gimple_expr (dump_file
, stmt
, 0);
6344 fprintf (dump_file
, " with ");
6345 print_generic_expr (dump_file
, sprime
);
6346 fprintf (dump_file
, " which would add a loop"
6347 " carried dependence to loop %d\n",
6350 /* Don't keep sprime available. */
6358 /* If we can propagate the value computed for LHS into
6359 all uses don't bother doing anything with this stmt. */
6360 if (may_propagate_copy (lhs
, sprime
))
6362 /* Mark it for removal. */
6363 to_remove
.safe_push (stmt
);
6365 /* ??? Don't count copy/constant propagations. */
6366 if (gimple_assign_single_p (stmt
)
6367 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6368 || gimple_assign_rhs1 (stmt
) == sprime
))
6371 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6373 fprintf (dump_file
, "Replaced ");
6374 print_gimple_expr (dump_file
, stmt
, 0);
6375 fprintf (dump_file
, " with ");
6376 print_generic_expr (dump_file
, sprime
);
6377 fprintf (dump_file
, " in all uses of ");
6378 print_gimple_stmt (dump_file
, stmt
, 0);
6385 /* If this is an assignment from our leader (which
6386 happens in the case the value-number is a constant)
6387 then there is nothing to do. Likewise if we run into
6388 inserted code that needed a conversion because of
6389 our type-agnostic value-numbering of loads. */
6390 if ((gimple_assign_single_p (stmt
)
6391 || (is_gimple_assign (stmt
)
6392 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6393 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
6394 && sprime
== gimple_assign_rhs1 (stmt
))
6397 /* Else replace its RHS. */
6398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6400 fprintf (dump_file
, "Replaced ");
6401 print_gimple_expr (dump_file
, stmt
, 0);
6402 fprintf (dump_file
, " with ");
6403 print_generic_expr (dump_file
, sprime
);
6404 fprintf (dump_file
, " in ");
6405 print_gimple_stmt (dump_file
, stmt
, 0);
6409 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
6410 && stmt_can_make_abnormal_goto (stmt
));
6411 gimple
*orig_stmt
= stmt
;
6412 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
6413 TREE_TYPE (sprime
)))
6415 /* We preserve conversions to but not from function or method
6416 types. This asymmetry makes it necessary to re-instantiate
6417 conversions here. */
6418 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6419 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
6420 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
6424 tree vdef
= gimple_vdef (stmt
);
6425 tree vuse
= gimple_vuse (stmt
);
6426 propagate_tree_value_into_stmt (gsi
, sprime
);
6427 stmt
= gsi_stmt (*gsi
);
6429 /* In case the VDEF on the original stmt was released, value-number
6430 it to the VUSE. This is to make vuse_ssa_val able to skip
6431 released virtual operands. */
6432 if (vdef
!= gimple_vdef (stmt
))
6434 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
6435 VN_INFO (vdef
)->valnum
= vuse
;
6438 /* If we removed EH side-effects from the statement, clean
6439 its EH information. */
6440 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
6442 bitmap_set_bit (need_eh_cleanup
,
6443 gimple_bb (stmt
)->index
);
6444 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6445 fprintf (dump_file
, " Removed EH side-effects.\n");
6448 /* Likewise for AB side-effects. */
6449 if (can_make_abnormal_goto
6450 && !stmt_can_make_abnormal_goto (stmt
))
6452 bitmap_set_bit (need_ab_cleanup
,
6453 gimple_bb (stmt
)->index
);
6454 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6455 fprintf (dump_file
, " Removed AB side-effects.\n");
6462 /* If the statement is a scalar store, see if the expression
6463 has the same value number as its rhs. If so, the store is
6465 if (gimple_assign_single_p (stmt
)
6466 && !gimple_has_volatile_ops (stmt
)
6467 && !is_gimple_reg (gimple_assign_lhs (stmt
))
6468 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6469 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
6471 tree rhs
= gimple_assign_rhs1 (stmt
);
6472 vn_reference_t vnresult
;
6473 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6474 typed load of a byte known to be 0x11 as 1 so a store of
6475 a boolean 1 is detected as redundant. Because of this we
6476 have to make sure to lookup with a ref where its size
6477 matches the precision. */
6478 tree lookup_lhs
= lhs
;
6479 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6480 && (TREE_CODE (lhs
) != COMPONENT_REF
6481 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
6482 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
6484 if (TREE_CODE (lhs
) == COMPONENT_REF
6485 || TREE_CODE (lhs
) == MEM_REF
)
6487 tree ltype
= build_nonstandard_integer_type
6488 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
6489 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
6490 if (TREE_CODE (lhs
) == COMPONENT_REF
)
6492 tree foff
= component_ref_field_offset (lhs
);
6493 tree f
= TREE_OPERAND (lhs
, 1);
6494 if (!poly_int_tree_p (foff
))
6495 lookup_lhs
= NULL_TREE
;
6497 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
6498 TREE_OPERAND (lhs
, 0),
6499 TYPE_SIZE (TREE_TYPE (lhs
)),
6501 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
6504 lookup_lhs
= build2 (MEM_REF
, ltype
,
6505 TREE_OPERAND (lhs
, 0),
6506 TREE_OPERAND (lhs
, 1));
6509 lookup_lhs
= NULL_TREE
;
6511 tree val
= NULL_TREE
;
6513 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
6514 VN_WALKREWRITE
, &vnresult
, false);
6515 if (TREE_CODE (rhs
) == SSA_NAME
)
6516 rhs
= VN_INFO (rhs
)->valnum
;
6518 && (operand_equal_p (val
, rhs
, 0)
6519 /* Due to the bitfield lookups above we can get bit
6520 interpretations of the same RHS as values here. Those
6521 are redundant as well. */
6522 || (TREE_CODE (val
) == SSA_NAME
6523 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
6524 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
6525 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
6526 && TREE_OPERAND (val
, 0) == rhs
)))
6528 /* We can only remove the later store if the former aliases
6529 at least all accesses the later one does or if the store
6530 was to readonly memory storing the same value. */
6532 ao_ref_init (&lhs_ref
, lhs
);
6533 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
6534 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
6536 || ((vnresult
->set
== set
6537 || alias_set_subset_of (set
, vnresult
->set
))
6538 && (vnresult
->base_set
== base_set
6539 || alias_set_subset_of (base_set
, vnresult
->base_set
))))
6541 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6543 fprintf (dump_file
, "Deleted redundant store ");
6544 print_gimple_stmt (dump_file
, stmt
, 0);
6547 /* Queue stmt for removal. */
6548 to_remove
.safe_push (stmt
);
6554 /* If this is a control statement value numbering left edges
6555 unexecuted on force the condition in a way consistent with
6557 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
6559 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
6560 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
6562 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6564 fprintf (dump_file
, "Removing unexecutable edge from ");
6565 print_gimple_stmt (dump_file
, stmt
, 0);
6567 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
6568 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
6569 gimple_cond_make_true (cond
);
6571 gimple_cond_make_false (cond
);
6573 el_todo
|= TODO_cleanup_cfg
;
6578 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
6579 bool was_noreturn
= (is_gimple_call (stmt
)
6580 && gimple_call_noreturn_p (stmt
));
6581 tree vdef
= gimple_vdef (stmt
);
6582 tree vuse
= gimple_vuse (stmt
);
6584 /* If we didn't replace the whole stmt (or propagate the result
6585 into all uses), replace all uses on this stmt with their
6587 bool modified
= false;
6588 use_operand_p use_p
;
6590 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
6592 tree use
= USE_FROM_PTR (use_p
);
6593 /* ??? The call code above leaves stmt operands un-updated. */
6594 if (TREE_CODE (use
) != SSA_NAME
)
6597 if (SSA_NAME_IS_DEFAULT_DEF (use
))
6598 /* ??? For default defs BB shouldn't matter, but we have to
6599 solve the inconsistency between rpo eliminate and
6600 dom eliminate avail valueization first. */
6601 sprime
= eliminate_avail (b
, use
);
6603 /* Look for sth available at the definition block of the argument.
6604 This avoids inconsistencies between availability there which
6605 decides if the stmt can be removed and availability at the
6606 use site. The SSA property ensures that things available
6607 at the definition are also available at uses. */
6608 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
6609 if (sprime
&& sprime
!= use
6610 && may_propagate_copy (use
, sprime
, true)
6611 /* We substitute into debug stmts to avoid excessive
6612 debug temporaries created by removed stmts, but we need
6613 to avoid doing so for inserted sprimes as we never want
6614 to create debug temporaries for them. */
6616 || TREE_CODE (sprime
) != SSA_NAME
6617 || !is_gimple_debug (stmt
)
6618 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
6620 propagate_value (use_p
, sprime
);
6625 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6626 into which is a requirement for the IPA devirt machinery. */
6627 gimple
*old_stmt
= stmt
;
6630 /* If a formerly non-invariant ADDR_EXPR is turned into an
6631 invariant one it was on a separate stmt. */
6632 if (gimple_assign_single_p (stmt
)
6633 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
6634 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
6635 gimple_stmt_iterator prev
= *gsi
;
6637 if (fold_stmt (gsi
, follow_all_ssa_edges
))
6639 /* fold_stmt may have created new stmts inbetween
6640 the previous stmt and the folded stmt. Mark
6641 all defs created there as varying to not confuse
6642 the SCCVN machinery as we're using that even during
6644 if (gsi_end_p (prev
))
6645 prev
= gsi_start_bb (b
);
6648 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
6653 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
6654 dit
, SSA_OP_ALL_DEFS
)
6655 /* As existing DEFs may move between stmts
6656 only process new ones. */
6657 if (! has_VN_INFO (def
))
6659 vn_ssa_aux_t vn_info
= VN_INFO (def
);
6660 vn_info
->valnum
= def
;
6661 vn_info
->visited
= true;
6663 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
6669 stmt
= gsi_stmt (*gsi
);
6670 /* In case we folded the stmt away schedule the NOP for removal. */
6671 if (gimple_nop_p (stmt
))
6672 to_remove
.safe_push (stmt
);
6675 /* Visit indirect calls and turn them into direct calls if
6676 possible using the devirtualization machinery. Do this before
6677 checking for required EH/abnormal/noreturn cleanup as devird
6678 may expose more of those. */
6679 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6681 tree fn
= gimple_call_fn (call_stmt
);
6683 && flag_devirtualize
6684 && virtual_method_call_p (fn
))
6686 tree otr_type
= obj_type_ref_class (fn
);
6687 unsigned HOST_WIDE_INT otr_tok
6688 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
6690 ipa_polymorphic_call_context
context (current_function_decl
,
6691 fn
, stmt
, &instance
);
6692 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
6693 otr_type
, stmt
, NULL
);
6695 vec
<cgraph_node
*> targets
6696 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
6697 otr_tok
, context
, &final
);
6699 dump_possible_polymorphic_call_targets (dump_file
,
6700 obj_type_ref_class (fn
),
6702 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
6705 if (targets
.length () == 1)
6706 fn
= targets
[0]->decl
;
6708 fn
= builtin_decl_implicit (BUILT_IN_UNREACHABLE
);
6709 if (dump_enabled_p ())
6711 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
6712 "converting indirect call to "
6714 lang_hooks
.decl_printable_name (fn
, 2));
6716 gimple_call_set_fndecl (call_stmt
, fn
);
6717 /* If changing the call to __builtin_unreachable
6718 or similar noreturn function, adjust gimple_call_fntype
6720 if (gimple_call_noreturn_p (call_stmt
)
6721 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
6722 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
6723 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
6725 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
6726 maybe_remove_unused_call_args (cfun
, call_stmt
);
6734 /* When changing a call into a noreturn call, cfg cleanup
6735 is needed to fix up the noreturn call. */
6737 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
6738 to_fixup
.safe_push (stmt
);
6739 /* When changing a condition or switch into one we know what
6740 edge will be executed, schedule a cfg cleanup. */
6741 if ((gimple_code (stmt
) == GIMPLE_COND
6742 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
6743 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
6744 || (gimple_code (stmt
) == GIMPLE_SWITCH
6745 && TREE_CODE (gimple_switch_index
6746 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
6747 el_todo
|= TODO_cleanup_cfg
;
6748 /* If we removed EH side-effects from the statement, clean
6749 its EH information. */
6750 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
6752 bitmap_set_bit (need_eh_cleanup
,
6753 gimple_bb (stmt
)->index
);
6754 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6755 fprintf (dump_file
, " Removed EH side-effects.\n");
6757 /* Likewise for AB side-effects. */
6758 if (can_make_abnormal_goto
6759 && !stmt_can_make_abnormal_goto (stmt
))
6761 bitmap_set_bit (need_ab_cleanup
,
6762 gimple_bb (stmt
)->index
);
6763 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6764 fprintf (dump_file
, " Removed AB side-effects.\n");
6767 /* In case the VDEF on the original stmt was released, value-number
6768 it to the VUSE. This is to make vuse_ssa_val able to skip
6769 released virtual operands. */
6770 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
6771 VN_INFO (vdef
)->valnum
= vuse
;
6774 /* Make new values available - for fully redundant LHS we
6775 continue with the next stmt above and skip this. */
6777 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_DEF
)
6778 eliminate_push_avail (b
, DEF_FROM_PTR (defp
));
6781 /* Perform elimination for the basic-block B during the domwalk. */
6784 eliminate_dom_walker::before_dom_children (basic_block b
)
6787 avail_stack
.safe_push (NULL_TREE
);
6789 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6790 if (!(b
->flags
& BB_EXECUTABLE
))
6795 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
6797 gphi
*phi
= gsi
.phi ();
6798 tree res
= PHI_RESULT (phi
);
6800 if (virtual_operand_p (res
))
6806 tree sprime
= eliminate_avail (b
, res
);
6810 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6812 fprintf (dump_file
, "Replaced redundant PHI node defining ");
6813 print_generic_expr (dump_file
, res
);
6814 fprintf (dump_file
, " with ");
6815 print_generic_expr (dump_file
, sprime
);
6816 fprintf (dump_file
, "\n");
6819 /* If we inserted this PHI node ourself, it's not an elimination. */
6820 if (! inserted_exprs
6821 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
6824 /* If we will propagate into all uses don't bother to do
6826 if (may_propagate_copy (res
, sprime
))
6828 /* Mark the PHI for removal. */
6829 to_remove
.safe_push (phi
);
6834 remove_phi_node (&gsi
, false);
6836 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
6837 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
6838 gimple
*stmt
= gimple_build_assign (res
, sprime
);
6839 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
6840 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
6844 eliminate_push_avail (b
, res
);
6848 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
6851 eliminate_stmt (b
, &gsi
);
6853 /* Replace destination PHI arguments. */
6856 FOR_EACH_EDGE (e
, ei
, b
->succs
)
6857 if (e
->flags
& EDGE_EXECUTABLE
)
6858 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
6862 gphi
*phi
= gsi
.phi ();
6863 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
6864 tree arg
= USE_FROM_PTR (use_p
);
6865 if (TREE_CODE (arg
) != SSA_NAME
6866 || virtual_operand_p (arg
))
6868 tree sprime
= eliminate_avail (b
, arg
);
6869 if (sprime
&& may_propagate_copy (arg
, sprime
))
6870 propagate_value (use_p
, sprime
);
6873 vn_context_bb
= NULL
;
6878 /* Make no longer available leaders no longer available. */
6881 eliminate_dom_walker::after_dom_children (basic_block
)
6884 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
6886 tree valnum
= VN_INFO (entry
)->valnum
;
6887 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
6889 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
6891 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
6895 /* Remove queued stmts and perform delayed cleanups. */
6898 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
6900 statistics_counter_event (cfun
, "Eliminated", eliminations
);
6901 statistics_counter_event (cfun
, "Insertions", insertions
);
6903 /* We cannot remove stmts during BB walk, especially not release SSA
6904 names there as this confuses the VN machinery. The stmts ending
6905 up in to_remove are either stores or simple copies.
6906 Remove stmts in reverse order to make debug stmt creation possible. */
6907 while (!to_remove
.is_empty ())
6909 bool do_release_defs
= true;
6910 gimple
*stmt
= to_remove
.pop ();
6912 /* When we are value-numbering a region we do not require exit PHIs to
6913 be present so we have to make sure to deal with uses outside of the
6914 region of stmts that we thought are eliminated.
6915 ??? Note we may be confused by uses in dead regions we didn't run
6916 elimination on. Rather than checking individual uses we accept
6917 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6918 contains such example). */
6921 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
6923 tree lhs
= gimple_phi_result (phi
);
6924 if (!has_zero_uses (lhs
))
6926 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6927 fprintf (dump_file
, "Keeping eliminated stmt live "
6928 "as copy because of out-of-region uses\n");
6929 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6930 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6931 gimple_stmt_iterator gsi
6932 = gsi_after_labels (gimple_bb (stmt
));
6933 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6934 do_release_defs
= false;
6937 else if (tree lhs
= gimple_get_lhs (stmt
))
6938 if (TREE_CODE (lhs
) == SSA_NAME
6939 && !has_zero_uses (lhs
))
6941 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6942 fprintf (dump_file
, "Keeping eliminated stmt live "
6943 "as copy because of out-of-region uses\n");
6944 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
6945 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6946 if (is_gimple_assign (stmt
))
6948 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
6949 stmt
= gsi_stmt (gsi
);
6951 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
6952 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
6957 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
6958 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
6959 do_release_defs
= false;
6964 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6966 fprintf (dump_file
, "Removing dead stmt ");
6967 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
6970 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
6971 if (gimple_code (stmt
) == GIMPLE_PHI
)
6972 remove_phi_node (&gsi
, do_release_defs
);
6975 basic_block bb
= gimple_bb (stmt
);
6976 unlink_stmt_vdef (stmt
);
6977 if (gsi_remove (&gsi
, true))
6978 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
6979 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
6980 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
6981 if (do_release_defs
)
6982 release_defs (stmt
);
6985 /* Removing a stmt may expose a forwarder block. */
6986 el_todo
|= TODO_cleanup_cfg
;
6989 /* Fixup stmts that became noreturn calls. This may require splitting
6990 blocks and thus isn't possible during the dominator walk. Do this
6991 in reverse order so we don't inadvertedly remove a stmt we want to
6992 fixup by visiting a dominating now noreturn call first. */
6993 while (!to_fixup
.is_empty ())
6995 gimple
*stmt
= to_fixup
.pop ();
6997 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6999 fprintf (dump_file
, "Fixing up noreturn call ");
7000 print_gimple_stmt (dump_file
, stmt
, 0);
7003 if (fixup_noreturn_call (stmt
))
7004 el_todo
|= TODO_cleanup_cfg
;
7007 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
7008 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
7011 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
7014 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
7016 if (do_eh_cleanup
|| do_ab_cleanup
)
7017 el_todo
|= TODO_cleanup_cfg
;
7022 /* Eliminate fully redundant computations. */
7025 eliminate_with_rpo_vn (bitmap inserted_exprs
)
7027 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
7029 eliminate_dom_walker
*saved_rpo_avail
= rpo_avail
;
7030 rpo_avail
= &walker
;
7031 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
7032 rpo_avail
= saved_rpo_avail
;
7034 return walker
.eliminate_cleanup ();
7038 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
7039 bool iterate
, bool eliminate
);
7042 run_rpo_vn (vn_lookup_kind kind
)
7044 default_vn_walk_kind
= kind
;
7045 do_rpo_vn (cfun
, NULL
, NULL
, true, false);
7047 /* ??? Prune requirement of these. */
7048 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
7050 /* Initialize the value ids and prune out remaining VN_TOPs
7054 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7056 vn_ssa_aux_t info
= VN_INFO (name
);
7058 || info
->valnum
== VN_TOP
)
7059 info
->valnum
= name
;
7060 if (info
->valnum
== name
)
7061 info
->value_id
= get_next_value_id ();
7062 else if (is_gimple_min_invariant (info
->valnum
))
7063 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
7067 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7069 vn_ssa_aux_t info
= VN_INFO (name
);
7070 if (TREE_CODE (info
->valnum
) == SSA_NAME
7071 && info
->valnum
!= name
7072 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
7073 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
7076 set_hashtable_value_ids ();
7078 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7080 fprintf (dump_file
, "Value numbers:\n");
7081 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7083 if (VN_INFO (name
)->visited
7084 && SSA_VAL (name
) != name
)
7086 print_generic_expr (dump_file
, name
);
7087 fprintf (dump_file
, " = ");
7088 print_generic_expr (dump_file
, SSA_VAL (name
));
7089 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
7095 /* Free VN associated data structures. */
7100 free_vn_table (valid_info
);
7101 XDELETE (valid_info
);
7102 obstack_free (&vn_tables_obstack
, NULL
);
7103 obstack_free (&vn_tables_insert_obstack
, NULL
);
7105 vn_ssa_aux_iterator_type it
;
7107 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
7108 if (info
->needs_insertion
)
7109 release_ssa_name (info
->name
);
7110 obstack_free (&vn_ssa_aux_obstack
, NULL
);
7111 delete vn_ssa_aux_hash
;
7113 delete constant_to_value_id
;
7114 constant_to_value_id
= NULL
;
7117 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7120 vn_lookup_simplify_result (gimple_match_op
*res_op
)
7122 if (!res_op
->code
.is_tree_code ())
7124 tree
*ops
= res_op
->ops
;
7125 unsigned int length
= res_op
->num_ops
;
7126 if (res_op
->code
== CONSTRUCTOR
7127 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7128 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7129 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
7131 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
7132 ops
= XALLOCAVEC (tree
, length
);
7133 for (unsigned i
= 0; i
< length
; ++i
)
7134 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
7136 vn_nary_op_t vnresult
= NULL
;
7137 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
7138 res_op
->type
, ops
, &vnresult
);
7139 /* If this is used from expression simplification make sure to
7140 return an available expression. */
7141 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
7142 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
7146 /* Return a leader for OPs value that is valid at BB. */
7149 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
7152 tree valnum
= SSA_VAL (op
, &visited
);
7153 /* If we didn't visit OP then it must be defined outside of the
7154 region we process and also dominate it. So it is available. */
7157 if (TREE_CODE (valnum
) == SSA_NAME
)
7159 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
7161 vn_avail
*av
= VN_INFO (valnum
)->avail
;
7164 if (av
->location
== bb
->index
)
7165 /* On tramp3d 90% of the cases are here. */
7166 return ssa_name (av
->leader
);
7169 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
7170 /* ??? During elimination we have to use availability at the
7171 definition site of a use we try to replace. This
7172 is required to not run into inconsistencies because
7173 of dominated_by_p_w_unex behavior and removing a definition
7174 while not replacing all uses.
7175 ??? We could try to consistently walk dominators
7176 ignoring non-executable regions. The nearest common
7177 dominator of bb and abb is where we can stop walking. We
7178 may also be able to "pre-compute" (bits of) the next immediate
7179 (non-)dominator during the RPO walk when marking edges as
7181 if (dominated_by_p_w_unex (bb
, abb
, true))
7183 tree leader
= ssa_name (av
->leader
);
7184 /* Prevent eliminations that break loop-closed SSA. */
7185 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
7186 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
7187 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7188 (leader
))->loop_father
,
7191 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7193 print_generic_expr (dump_file
, leader
);
7194 fprintf (dump_file
, " is available for ");
7195 print_generic_expr (dump_file
, valnum
);
7196 fprintf (dump_file
, "\n");
7198 /* On tramp3d 99% of the _remaining_ cases succeed at
7202 /* ??? Can we somehow skip to the immediate dominator
7203 RPO index (bb_to_rpo)? Again, maybe not worth, on
7204 tramp3d the worst number of elements in the vector is 9. */
7209 else if (valnum
!= VN_TOP
)
7210 /* valnum is is_gimple_min_invariant. */
7215 /* Make LEADER a leader for its value at BB. */
7218 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
7220 tree valnum
= VN_INFO (leader
)->valnum
;
7221 if (valnum
== VN_TOP
7222 || is_gimple_min_invariant (valnum
))
7224 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7226 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
7227 print_generic_expr (dump_file
, leader
);
7228 fprintf (dump_file
, " for value ");
7229 print_generic_expr (dump_file
, valnum
);
7230 fprintf (dump_file
, "\n");
7232 vn_ssa_aux_t value
= VN_INFO (valnum
);
7234 if (m_avail_freelist
)
7236 av
= m_avail_freelist
;
7237 m_avail_freelist
= m_avail_freelist
->next
;
7240 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
7241 av
->location
= bb
->index
;
7242 av
->leader
= SSA_NAME_VERSION (leader
);
7243 av
->next
= value
->avail
;
7244 av
->next_undo
= last_pushed_avail
;
7245 last_pushed_avail
= value
;
7249 /* Valueization hook for RPO VN plus required state. */
7252 rpo_vn_valueize (tree name
)
7254 if (TREE_CODE (name
) == SSA_NAME
)
7256 vn_ssa_aux_t val
= VN_INFO (name
);
7259 tree tem
= val
->valnum
;
7260 if (tem
!= VN_TOP
&& tem
!= name
)
7262 if (TREE_CODE (tem
) != SSA_NAME
)
7264 /* For all values we only valueize to an available leader
7265 which means we can use SSA name info without restriction. */
7266 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
7275 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7276 inverted condition. */
7279 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
7284 /* a < b -> a {!,<}= b */
7285 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7286 ops
, boolean_true_node
, 0, pred_e
);
7287 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
7288 ops
, boolean_true_node
, 0, pred_e
);
7289 /* a < b -> ! a {>,=} b */
7290 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7291 ops
, boolean_false_node
, 0, pred_e
);
7292 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7293 ops
, boolean_false_node
, 0, pred_e
);
7296 /* a > b -> a {!,>}= b */
7297 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7298 ops
, boolean_true_node
, 0, pred_e
);
7299 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
7300 ops
, boolean_true_node
, 0, pred_e
);
7301 /* a > b -> ! a {<,=} b */
7302 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7303 ops
, boolean_false_node
, 0, pred_e
);
7304 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7305 ops
, boolean_false_node
, 0, pred_e
);
7308 /* a == b -> ! a {<,>} b */
7309 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7310 ops
, boolean_false_node
, 0, pred_e
);
7311 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7312 ops
, boolean_false_node
, 0, pred_e
);
7317 /* Nothing besides inverted condition. */
7323 /* Main stmt worker for RPO VN, process BB. */
7326 process_bb (rpo_elim
&avail
, basic_block bb
,
7327 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
7328 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
7336 /* If we are in loop-closed SSA preserve this state. This is
7337 relevant when called on regions from outside of FRE/PRE. */
7338 bool lc_phi_nodes
= false;
7340 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
7341 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7342 if (e
->src
->loop_father
!= e
->dest
->loop_father
7343 && flow_loop_nested_p (e
->dest
->loop_father
,
7344 e
->src
->loop_father
))
7346 lc_phi_nodes
= true;
7350 /* When we visit a loop header substitute into loop info. */
7351 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
7353 /* Keep fields in sync with substitute_in_loop_info. */
7354 if (bb
->loop_father
->nb_iterations
)
7355 bb
->loop_father
->nb_iterations
7356 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
7357 NULL_TREE
, NULL_TREE
, &vn_valueize_for_srt
);
7360 /* Value-number all defs in the basic-block. */
7362 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7365 gphi
*phi
= gsi
.phi ();
7366 tree res
= PHI_RESULT (phi
);
7367 vn_ssa_aux_t res_info
= VN_INFO (res
);
7370 gcc_assert (!res_info
->visited
);
7371 res_info
->valnum
= VN_TOP
;
7372 res_info
->visited
= true;
7375 /* When not iterating force backedge values to varying. */
7376 visit_stmt (phi
, !iterate_phis
);
7377 if (virtual_operand_p (res
))
7381 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7382 how we handle backedges and availability.
7383 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7384 tree val
= res_info
->valnum
;
7385 if (res
!= val
&& !iterate
&& eliminate
)
7387 if (tree leader
= avail
.eliminate_avail (bb
, res
))
7390 /* Preserve loop-closed SSA form. */
7392 || is_gimple_min_invariant (leader
)))
7394 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7396 fprintf (dump_file
, "Replaced redundant PHI node "
7398 print_generic_expr (dump_file
, res
);
7399 fprintf (dump_file
, " with ");
7400 print_generic_expr (dump_file
, leader
);
7401 fprintf (dump_file
, "\n");
7403 avail
.eliminations
++;
7405 if (may_propagate_copy (res
, leader
))
7407 /* Schedule for removal. */
7408 avail
.to_remove
.safe_push (phi
);
7411 /* ??? Else generate a copy stmt. */
7415 /* Only make defs available that not already are. But make
7416 sure loop-closed SSA PHI node defs are picked up for
7420 || ! avail
.eliminate_avail (bb
, res
))
7421 avail
.eliminate_push_avail (bb
, res
);
7424 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7425 we do this when processing the last stmt as we have to do this
7426 before elimination which otherwise forces GIMPLE_CONDs to
7427 if (1 != 0) style when seeing non-executable edges. */
7428 if (gsi_end_p (gsi_start_bb (bb
)))
7430 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7432 if (!(e
->flags
& EDGE_EXECUTABLE
))
7434 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7436 "marking outgoing edge %d -> %d executable\n",
7437 e
->src
->index
, e
->dest
->index
);
7438 e
->flags
|= EDGE_EXECUTABLE
;
7439 e
->dest
->flags
|= BB_EXECUTABLE
;
7441 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7443 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7445 "marking destination block %d reachable\n",
7447 e
->dest
->flags
|= BB_EXECUTABLE
;
7451 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
7452 !gsi_end_p (gsi
); gsi_next (&gsi
))
7458 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
7460 vn_ssa_aux_t op_info
= VN_INFO (op
);
7461 gcc_assert (!op_info
->visited
);
7462 op_info
->valnum
= VN_TOP
;
7463 op_info
->visited
= true;
7466 /* We somehow have to deal with uses that are not defined
7467 in the processed region. Forcing unvisited uses to
7468 varying here doesn't play well with def-use following during
7469 expression simplification, so we deal with this by checking
7470 the visited flag in SSA_VAL. */
7473 visit_stmt (gsi_stmt (gsi
));
7475 gimple
*last
= gsi_stmt (gsi
);
7477 switch (gimple_code (last
))
7480 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
7481 (as_a
<gswitch
*> (last
))));
7485 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
7486 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
7487 tree val
= gimple_simplify (gimple_cond_code (last
),
7488 boolean_type_node
, lhs
, rhs
,
7490 /* If the condition didn't simplfy see if we have recorded
7491 an expression from sofar taken edges. */
7492 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
7494 vn_nary_op_t vnresult
;
7498 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
7499 boolean_type_node
, ops
,
7501 /* Did we get a predicated value? */
7502 if (! val
&& vnresult
&& vnresult
->predicated_values
)
7504 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
7505 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7507 fprintf (dump_file
, "Got predicated value ");
7508 print_generic_expr (dump_file
, val
, TDF_NONE
);
7509 fprintf (dump_file
, " for ");
7510 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
7515 e
= find_taken_edge (bb
, val
);
7518 /* If we didn't manage to compute the taken edge then
7519 push predicated expressions for the condition itself
7520 and related conditions to the hashtables. This allows
7521 simplification of redundant conditions which is
7522 important as early cleanup. */
7523 edge true_e
, false_e
;
7524 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
7525 enum tree_code code
= gimple_cond_code (last
);
7526 enum tree_code icode
7527 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
7532 && bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
7535 && bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
7538 vn_nary_op_insert_pieces_predicated
7539 (2, code
, boolean_type_node
, ops
,
7540 boolean_true_node
, 0, true_e
);
7542 vn_nary_op_insert_pieces_predicated
7543 (2, code
, boolean_type_node
, ops
,
7544 boolean_false_node
, 0, false_e
);
7545 if (icode
!= ERROR_MARK
)
7548 vn_nary_op_insert_pieces_predicated
7549 (2, icode
, boolean_type_node
, ops
,
7550 boolean_false_node
, 0, true_e
);
7552 vn_nary_op_insert_pieces_predicated
7553 (2, icode
, boolean_type_node
, ops
,
7554 boolean_true_node
, 0, false_e
);
7556 /* Relax for non-integers, inverted condition handled
7558 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
7561 insert_related_predicates_on_edge (code
, ops
, true_e
);
7563 insert_related_predicates_on_edge (icode
, ops
, false_e
);
7569 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
7576 todo
= TODO_cleanup_cfg
;
7577 if (!(e
->flags
& EDGE_EXECUTABLE
))
7579 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7581 "marking known outgoing %sedge %d -> %d executable\n",
7582 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
7583 e
->src
->index
, e
->dest
->index
);
7584 e
->flags
|= EDGE_EXECUTABLE
;
7585 e
->dest
->flags
|= BB_EXECUTABLE
;
7587 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7589 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7591 "marking destination block %d reachable\n",
7593 e
->dest
->flags
|= BB_EXECUTABLE
;
7596 else if (gsi_one_before_end_p (gsi
))
7598 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7600 if (!(e
->flags
& EDGE_EXECUTABLE
))
7602 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7604 "marking outgoing edge %d -> %d executable\n",
7605 e
->src
->index
, e
->dest
->index
);
7606 e
->flags
|= EDGE_EXECUTABLE
;
7607 e
->dest
->flags
|= BB_EXECUTABLE
;
7609 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
7611 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7613 "marking destination block %d reachable\n",
7615 e
->dest
->flags
|= BB_EXECUTABLE
;
7620 /* Eliminate. That also pushes to avail. */
7621 if (eliminate
&& ! iterate
)
7622 avail
.eliminate_stmt (bb
, &gsi
);
7624 /* If not eliminating, make all not already available defs
7626 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
7627 if (! avail
.eliminate_avail (bb
, op
))
7628 avail
.eliminate_push_avail (bb
, op
);
7631 /* Eliminate in destination PHI arguments. Always substitute in dest
7632 PHIs, even for non-executable edges. This handles region
7634 if (!iterate
&& eliminate
)
7635 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7636 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7637 !gsi_end_p (gsi
); gsi_next (&gsi
))
7639 gphi
*phi
= gsi
.phi ();
7640 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7641 tree arg
= USE_FROM_PTR (use_p
);
7642 if (TREE_CODE (arg
) != SSA_NAME
7643 || virtual_operand_p (arg
))
7646 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
7648 sprime
= SSA_VAL (arg
);
7649 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
7650 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
7653 /* Look for sth available at the definition block of the argument.
7654 This avoids inconsistencies between availability there which
7655 decides if the stmt can be removed and availability at the
7656 use site. The SSA property ensures that things available
7657 at the definition are also available at uses. */
7658 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
7662 && may_propagate_copy (arg
, sprime
))
7663 propagate_value (use_p
, sprime
);
7666 vn_context_bb
= NULL
;
7670 /* Unwind state per basic-block. */
7674 /* Times this block has been visited. */
7676 /* Whether to handle this as iteration point or whether to treat
7677 incoming backedge PHI values as varying. */
7679 /* Maximum RPO index this block is reachable from. */
7683 vn_reference_t ref_top
;
7685 vn_nary_op_t nary_top
;
7686 vn_avail
*avail_top
;
7689 /* Unwind the RPO VN state for iteration. */
7692 do_unwind (unwind_state
*to
, rpo_elim
&avail
)
7694 gcc_assert (to
->iterate
);
7695 for (; last_inserted_nary
!= to
->nary_top
;
7696 last_inserted_nary
= last_inserted_nary
->next
)
7699 slot
= valid_info
->nary
->find_slot_with_hash
7700 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
7701 /* Predication causes the need to restore previous state. */
7702 if ((*slot
)->unwind_to
)
7703 *slot
= (*slot
)->unwind_to
;
7705 valid_info
->nary
->clear_slot (slot
);
7707 for (; last_inserted_phi
!= to
->phi_top
;
7708 last_inserted_phi
= last_inserted_phi
->next
)
7711 slot
= valid_info
->phis
->find_slot_with_hash
7712 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
7713 valid_info
->phis
->clear_slot (slot
);
7715 for (; last_inserted_ref
!= to
->ref_top
;
7716 last_inserted_ref
= last_inserted_ref
->next
)
7718 vn_reference_t
*slot
;
7719 slot
= valid_info
->references
->find_slot_with_hash
7720 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
7721 (*slot
)->operands
.release ();
7722 valid_info
->references
->clear_slot (slot
);
7724 obstack_free (&vn_tables_obstack
, to
->ob_top
);
7726 /* Prune [rpo_idx, ] from avail. */
7727 for (; last_pushed_avail
&& last_pushed_avail
->avail
!= to
->avail_top
;)
7729 vn_ssa_aux_t val
= last_pushed_avail
;
7730 vn_avail
*av
= val
->avail
;
7731 val
->avail
= av
->next
;
7732 last_pushed_avail
= av
->next_undo
;
7733 av
->next
= avail
.m_avail_freelist
;
7734 avail
.m_avail_freelist
= av
;
7738 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7739 If ITERATE is true then treat backedges optimistically as not
7740 executed and iterate. If ELIMINATE is true then perform
7741 elimination, otherwise leave that to the caller. */
7744 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
7745 bool iterate
, bool eliminate
)
7749 /* We currently do not support region-based iteration when
7750 elimination is requested. */
7751 gcc_assert (!entry
|| !iterate
|| !eliminate
);
7752 /* When iterating we need loop info up-to-date. */
7753 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
7755 bool do_region
= entry
!= NULL
;
7758 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
7759 exit_bbs
= BITMAP_ALLOC (NULL
);
7760 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
7763 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7764 re-mark those that are contained in the region. */
7767 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7768 e
->flags
&= ~EDGE_DFS_BACK
;
7770 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
7771 auto_vec
<std::pair
<int, int> > toplevel_scc_extents
;
7772 int n
= rev_post_order_and_mark_dfs_back_seme
7773 (fn
, entry
, exit_bbs
, true, rpo
, !iterate
? &toplevel_scc_extents
: NULL
);
7776 BITMAP_FREE (exit_bbs
);
7778 /* If there are any non-DFS_BACK edges into entry->dest skip
7779 processing PHI nodes for that block. This supports
7780 value-numbering loop bodies w/o the actual loop. */
7781 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
7783 && !(e
->flags
& EDGE_DFS_BACK
))
7785 bool skip_entry_phis
= e
!= NULL
;
7786 if (skip_entry_phis
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
7787 fprintf (dump_file
, "Region does not contain all edges into "
7788 "the entry block, skipping its PHIs.\n");
7790 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
7791 for (int i
= 0; i
< n
; ++i
)
7792 bb_to_rpo
[rpo
[i
]] = i
;
7794 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
7796 rpo_elim
avail (entry
->dest
);
7799 /* Verify we have no extra entries into the region. */
7800 if (flag_checking
&& do_region
)
7802 auto_bb_flag
bb_in_region (fn
);
7803 for (int i
= 0; i
< n
; ++i
)
7805 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7806 bb
->flags
|= bb_in_region
;
7808 /* We can't merge the first two loops because we cannot rely
7809 on EDGE_DFS_BACK for edges not within the region. But if
7810 we decide to always have the bb_in_region flag we can
7811 do the checking during the RPO walk itself (but then it's
7812 also easy to handle MEME conservatively). */
7813 for (int i
= 0; i
< n
; ++i
)
7815 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7818 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7819 gcc_assert (e
== entry
7820 || (skip_entry_phis
&& bb
== entry
->dest
)
7821 || (e
->src
->flags
& bb_in_region
));
7823 for (int i
= 0; i
< n
; ++i
)
7825 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7826 bb
->flags
&= ~bb_in_region
;
7830 /* Create the VN state. For the initial size of the various hashtables
7831 use a heuristic based on region size and number of SSA names. */
7832 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
7833 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
7834 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
7836 next_constant_value_id
= -1;
7838 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
7839 gcc_obstack_init (&vn_ssa_aux_obstack
);
7841 gcc_obstack_init (&vn_tables_obstack
);
7842 gcc_obstack_init (&vn_tables_insert_obstack
);
7843 valid_info
= XCNEW (struct vn_tables_s
);
7844 allocate_vn_table (valid_info
, region_size
);
7845 last_inserted_ref
= NULL
;
7846 last_inserted_phi
= NULL
;
7847 last_inserted_nary
= NULL
;
7848 last_pushed_avail
= NULL
;
7850 vn_valueize
= rpo_vn_valueize
;
7852 /* Initialize the unwind state and edge/BB executable state. */
7853 unsigned curr_scc
= 0;
7854 for (int i
= 0; i
< n
; ++i
)
7856 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
7857 rpo_state
[i
].visited
= 0;
7858 rpo_state
[i
].max_rpo
= i
;
7859 if (!iterate
&& curr_scc
< toplevel_scc_extents
.length ())
7861 if (i
>= toplevel_scc_extents
[curr_scc
].first
7862 && i
<= toplevel_scc_extents
[curr_scc
].second
)
7863 rpo_state
[i
].max_rpo
= toplevel_scc_extents
[curr_scc
].second
;
7864 if (i
== toplevel_scc_extents
[curr_scc
].second
)
7867 bb
->flags
&= ~BB_EXECUTABLE
;
7868 bool has_backedges
= false;
7871 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7873 if (e
->flags
& EDGE_DFS_BACK
)
7874 has_backedges
= true;
7875 e
->flags
&= ~EDGE_EXECUTABLE
;
7876 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
7879 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
7881 entry
->flags
|= EDGE_EXECUTABLE
;
7882 entry
->dest
->flags
|= BB_EXECUTABLE
;
7884 /* As heuristic to improve compile-time we handle only the N innermost
7885 loops and the outermost one optimistically. */
7888 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
7889 for (auto loop
: loops_list (cfun
, LI_ONLY_INNERMOST
))
7890 if (loop_depth (loop
) > max_depth
)
7891 for (unsigned i
= 2;
7892 i
< loop_depth (loop
) - max_depth
; ++i
)
7894 basic_block header
= superloop_at_depth (loop
, i
)->header
;
7895 bool non_latch_backedge
= false;
7898 FOR_EACH_EDGE (e
, ei
, header
->preds
)
7899 if (e
->flags
& EDGE_DFS_BACK
)
7901 /* There can be a non-latch backedge into the header
7902 which is part of an outer irreducible region. We
7903 cannot avoid iterating this block then. */
7904 if (!dominated_by_p (CDI_DOMINATORS
,
7907 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7908 fprintf (dump_file
, "non-latch backedge %d -> %d "
7909 "forces iteration of loop %d\n",
7910 e
->src
->index
, e
->dest
->index
, loop
->num
);
7911 non_latch_backedge
= true;
7914 e
->flags
|= EDGE_EXECUTABLE
;
7916 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
7923 /* Go and process all blocks, iterating as necessary. */
7926 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
7928 /* If the block has incoming backedges remember unwind state. This
7929 is required even for non-executable blocks since in irreducible
7930 regions we might reach them via the backedge and re-start iterating
7932 Note we can individually mark blocks with incoming backedges to
7933 not iterate where we then handle PHIs conservatively. We do that
7934 heuristically to reduce compile-time for degenerate cases. */
7935 if (rpo_state
[idx
].iterate
)
7937 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
7938 rpo_state
[idx
].ref_top
= last_inserted_ref
;
7939 rpo_state
[idx
].phi_top
= last_inserted_phi
;
7940 rpo_state
[idx
].nary_top
= last_inserted_nary
;
7941 rpo_state
[idx
].avail_top
7942 = last_pushed_avail
? last_pushed_avail
->avail
: NULL
;
7945 if (!(bb
->flags
& BB_EXECUTABLE
))
7947 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7948 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
7954 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7955 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
7957 todo
|= process_bb (avail
, bb
,
7958 rpo_state
[idx
].visited
!= 0,
7959 rpo_state
[idx
].iterate
,
7960 iterate
, eliminate
, do_region
, exit_bbs
, false);
7961 rpo_state
[idx
].visited
++;
7963 /* Verify if changed values flow over executable outgoing backedges
7964 and those change destination PHI values (that's the thing we
7965 can easily verify). Reduce over all such edges to the farthest
7967 int iterate_to
= -1;
7970 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7971 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
7972 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
7973 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
7975 int destidx
= bb_to_rpo
[e
->dest
->index
];
7976 if (!rpo_state
[destidx
].visited
)
7978 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7979 fprintf (dump_file
, "Unvisited destination %d\n",
7981 if (iterate_to
== -1 || destidx
< iterate_to
)
7982 iterate_to
= destidx
;
7985 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7986 fprintf (dump_file
, "Looking for changed values of backedge"
7987 " %d->%d destination PHIs\n",
7988 e
->src
->index
, e
->dest
->index
);
7989 vn_context_bb
= e
->dest
;
7991 for (gsi
= gsi_start_phis (e
->dest
);
7992 !gsi_end_p (gsi
); gsi_next (&gsi
))
7994 bool inserted
= false;
7995 /* While we'd ideally just iterate on value changes
7996 we CSE PHIs and do that even across basic-block
7997 boundaries. So even hashtable state changes can
7998 be important (which is roughly equivalent to
7999 PHI argument value changes). To not excessively
8000 iterate because of that we track whether a PHI
8001 was CSEd to with GF_PLF_1. */
8002 bool phival_changed
;
8003 if ((phival_changed
= visit_phi (gsi
.phi (),
8005 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
8008 && dump_file
&& (dump_flags
& TDF_DETAILS
))
8009 fprintf (dump_file
, "PHI was CSEd and hashtable "
8010 "state (changed)\n");
8011 if (iterate_to
== -1 || destidx
< iterate_to
)
8012 iterate_to
= destidx
;
8016 vn_context_bb
= NULL
;
8018 if (iterate_to
!= -1)
8020 do_unwind (&rpo_state
[iterate_to
], avail
);
8022 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8023 fprintf (dump_file
, "Iterating to %d BB%d\n",
8024 iterate_to
, rpo
[iterate_to
]);
8034 /* Process all blocks greedily with a worklist that enforces RPO
8035 processing of reachable blocks. */
8036 auto_bitmap worklist
;
8037 bitmap_set_bit (worklist
, 0);
8038 while (!bitmap_empty_p (worklist
))
8040 int idx
= bitmap_first_set_bit (worklist
);
8041 bitmap_clear_bit (worklist
, idx
);
8042 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
8043 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
8044 && !rpo_state
[idx
].visited
);
8046 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8047 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
8049 /* When we run into predecessor edges where we cannot trust its
8050 executable state mark them executable so PHI processing will
8052 ??? Do we need to force arguments flowing over that edge
8053 to be varying or will they even always be? */
8056 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8057 if (!(e
->flags
& EDGE_EXECUTABLE
)
8058 && (bb
== entry
->dest
8059 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
8060 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
8063 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8064 fprintf (dump_file
, "Cannot trust state of predecessor "
8065 "edge %d -> %d, marking executable\n",
8066 e
->src
->index
, e
->dest
->index
);
8067 e
->flags
|= EDGE_EXECUTABLE
;
8071 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
8072 do_region
, exit_bbs
,
8073 skip_entry_phis
&& bb
== entry
->dest
);
8074 rpo_state
[idx
].visited
++;
8076 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8077 if ((e
->flags
& EDGE_EXECUTABLE
)
8078 && e
->dest
->index
!= EXIT_BLOCK
8079 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
8080 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
8081 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
8085 /* If statistics or dump file active. */
8087 unsigned max_visited
= 1;
8088 for (int i
= 0; i
< n
; ++i
)
8090 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8091 if (bb
->flags
& BB_EXECUTABLE
)
8093 statistics_histogram_event (cfun
, "RPO block visited times",
8094 rpo_state
[i
].visited
);
8095 if (rpo_state
[i
].visited
> max_visited
)
8096 max_visited
= rpo_state
[i
].visited
;
8098 unsigned nvalues
= 0, navail
= 0;
8099 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
8100 i
!= vn_ssa_aux_hash
->end (); ++i
)
8103 vn_avail
*av
= (*i
)->avail
;
8110 statistics_counter_event (cfun
, "RPO blocks", n
);
8111 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
8112 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
8113 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
8114 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
8115 statistics_histogram_event (cfun
, "RPO num avail", navail
);
8116 statistics_histogram_event (cfun
, "RPO num lattice",
8117 vn_ssa_aux_hash
->elements ());
8118 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
8120 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
8121 " blocks in total discovering %d executable blocks iterating "
8122 "%d.%d times, a block was visited max. %u times\n",
8124 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
8126 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
8127 "and %" PRIu64
" lattice elements\n",
8128 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
8133 /* When !iterate we already performed elimination during the RPO
8137 /* Elimination for region-based VN needs to be done within the
8139 gcc_assert (! do_region
);
8140 /* Note we can't use avail.walk here because that gets confused
8141 by the existing availability and it will be less efficient
8143 todo
|= eliminate_with_rpo_vn (NULL
);
8146 todo
|= avail
.eliminate_cleanup (do_region
);
8152 XDELETEVEC (bb_to_rpo
);
8154 XDELETEVEC (rpo_state
);
8159 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8160 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8161 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8162 are not considered. */
8165 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
)
8167 default_vn_walk_kind
= VN_WALKREWRITE
;
8168 unsigned todo
= do_rpo_vn (fn
, entry
, exit_bbs
, false, true);
8176 const pass_data pass_data_fre
=
8178 GIMPLE_PASS
, /* type */
8180 OPTGROUP_NONE
, /* optinfo_flags */
8181 TV_TREE_FRE
, /* tv_id */
8182 ( PROP_cfg
| PROP_ssa
), /* properties_required */
8183 0, /* properties_provided */
8184 0, /* properties_destroyed */
8185 0, /* todo_flags_start */
8186 0, /* todo_flags_finish */
8189 class pass_fre
: public gimple_opt_pass
8192 pass_fre (gcc::context
*ctxt
)
8193 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
8196 /* opt_pass methods: */
8197 opt_pass
* clone () { return new pass_fre (m_ctxt
); }
8198 void set_pass_param (unsigned int n
, bool param
)
8200 gcc_assert (n
== 0);
8201 may_iterate
= param
;
8203 virtual bool gate (function
*)
8205 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
8207 virtual unsigned int execute (function
*);
8211 }; // class pass_fre
8214 pass_fre::execute (function
*fun
)
8218 /* At -O[1g] use the cheap non-iterating mode. */
8219 bool iterate_p
= may_iterate
&& (optimize
> 1);
8220 calculate_dominance_info (CDI_DOMINATORS
);
8222 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
8224 default_vn_walk_kind
= VN_WALKREWRITE
;
8225 todo
= do_rpo_vn (fun
, NULL
, NULL
, iterate_p
, true);
8229 loop_optimizer_finalize ();
8231 if (scev_initialized_p ())
8234 /* For late FRE after IVOPTs and unrolling, see if we can
8235 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8237 todo
|= TODO_update_address_taken
;
8245 make_pass_fre (gcc::context
*ctxt
)
8247 return new pass_fre (ctxt
);
8250 #undef BB_EXECUTABLE