1 /* SCC value numbering for trees
2 Copyright (C) 2006-2024 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
24 #include "splay-tree.h"
31 #include "insn-config.h"
35 #include "gimple-pretty-print.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-iterator.h"
43 #include "gimple-fold.h"
57 #include "tree-ssa-propagate.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
73 #include "fold-const-call.h"
74 #include "ipa-modref-tree.h"
75 #include "ipa-modref.h"
76 #include "tree-ssa-sccvn.h"
77 #include "alloc-pool.h"
78 #include "symbol-summary.h"
82 /* This algorithm is based on the SCC algorithm presented by Keith
83 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
84 (http://citeseer.ist.psu.edu/41805.html). In
85 straight line code, it is equivalent to a regular hash based value
86 numbering that is performed in reverse postorder.
88 For code with cycles, there are two alternatives, both of which
89 require keeping the hashtables separate from the actual list of
90 value numbers for SSA names.
92 1. Iterate value numbering in an RPO walk of the blocks, removing
93 all the entries from the hashtable after each iteration (but
94 keeping the SSA name->value number mapping between iterations).
95 Iterate until it does not change.
97 2. Perform value numbering as part of an SCC walk on the SSA graph,
98 iterating only the cycles in the SSA graph until they do not change
99 (using a separate, optimistic hashtable for value numbering the SCC
102 The second is not just faster in practice (because most SSA graph
103 cycles do not involve all the variables in the graph), it also has
104 some nice properties.
106 One of these nice properties is that when we pop an SCC off the
107 stack, we are guaranteed to have processed all the operands coming from
108 *outside of that SCC*, so we do not need to do anything special to
109 ensure they have value numbers.
111 Another nice property is that the SCC walk is done as part of a DFS
112 of the SSA graph, which makes it easy to perform combining and
113 simplifying operations at the same time.
115 The code below is deliberately written in a way that makes it easy
116 to separate the SCC walk from the other work it does.
118 In order to propagate constants through the code, we track which
119 expressions contain constants, and use those while folding. In
120 theory, we could also track expressions whose value numbers are
121 replaced, in case we end up folding based on expression
124 In order to value number memory, we assign value numbers to vuses.
125 This enables us to note that, for example, stores to the same
126 address of the same value from the same starting memory states are
130 1. We can iterate only the changing portions of the SCC's, but
131 I have not seen an SCC big enough for this to be a win.
132 2. If you differentiate between phi nodes for loops and phi nodes
133 for if-then-else, you can properly consider phi nodes in different
134 blocks for equivalence.
135 3. We could value number vuses in more cases, particularly, whole
139 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
140 #define BB_EXECUTABLE BB_VISITED
142 static vn_lookup_kind default_vn_walk_kind
;
144 /* vn_nary_op hashtable helpers. */
146 struct vn_nary_op_hasher
: nofree_ptr_hash
<vn_nary_op_s
>
148 typedef vn_nary_op_s
*compare_type
;
149 static inline hashval_t
hash (const vn_nary_op_s
*);
150 static inline bool equal (const vn_nary_op_s
*, const vn_nary_op_s
*);
153 /* Return the computed hashcode for nary operation P1. */
156 vn_nary_op_hasher::hash (const vn_nary_op_s
*vno1
)
158 return vno1
->hashcode
;
161 /* Compare nary operations P1 and P2 and return true if they are
165 vn_nary_op_hasher::equal (const vn_nary_op_s
*vno1
, const vn_nary_op_s
*vno2
)
167 return vno1
== vno2
|| vn_nary_op_eq (vno1
, vno2
);
170 typedef hash_table
<vn_nary_op_hasher
> vn_nary_op_table_type
;
171 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type
;
174 /* vn_phi hashtable helpers. */
177 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
);
179 struct vn_phi_hasher
: nofree_ptr_hash
<vn_phi_s
>
181 static inline hashval_t
hash (const vn_phi_s
*);
182 static inline bool equal (const vn_phi_s
*, const vn_phi_s
*);
185 /* Return the computed hashcode for phi operation P1. */
188 vn_phi_hasher::hash (const vn_phi_s
*vp1
)
190 return vp1
->hashcode
;
193 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
196 vn_phi_hasher::equal (const vn_phi_s
*vp1
, const vn_phi_s
*vp2
)
198 return vp1
== vp2
|| vn_phi_eq (vp1
, vp2
);
201 typedef hash_table
<vn_phi_hasher
> vn_phi_table_type
;
202 typedef vn_phi_table_type::iterator vn_phi_iterator_type
;
205 /* Compare two reference operands P1 and P2 for equality. Return true if
206 they are equal, and false otherwise. */
209 vn_reference_op_eq (const void *p1
, const void *p2
)
211 const_vn_reference_op_t
const vro1
= (const_vn_reference_op_t
) p1
;
212 const_vn_reference_op_t
const vro2
= (const_vn_reference_op_t
) p2
;
214 return (vro1
->opcode
== vro2
->opcode
215 /* We do not care for differences in type qualification. */
216 && (vro1
->type
== vro2
->type
217 || (vro1
->type
&& vro2
->type
218 && types_compatible_p (TYPE_MAIN_VARIANT (vro1
->type
),
219 TYPE_MAIN_VARIANT (vro2
->type
))))
220 && expressions_equal_p (vro1
->op0
, vro2
->op0
)
221 && expressions_equal_p (vro1
->op1
, vro2
->op1
)
222 && expressions_equal_p (vro1
->op2
, vro2
->op2
)
223 && (vro1
->opcode
!= CALL_EXPR
|| vro1
->clique
== vro2
->clique
));
226 /* Free a reference operation structure VP. */
229 free_reference (vn_reference_s
*vr
)
231 vr
->operands
.release ();
235 /* vn_reference hashtable helpers. */
237 struct vn_reference_hasher
: nofree_ptr_hash
<vn_reference_s
>
239 static inline hashval_t
hash (const vn_reference_s
*);
240 static inline bool equal (const vn_reference_s
*, const vn_reference_s
*);
243 /* Return the hashcode for a given reference operation P1. */
246 vn_reference_hasher::hash (const vn_reference_s
*vr1
)
248 return vr1
->hashcode
;
252 vn_reference_hasher::equal (const vn_reference_s
*v
, const vn_reference_s
*c
)
254 return v
== c
|| vn_reference_eq (v
, c
);
257 typedef hash_table
<vn_reference_hasher
> vn_reference_table_type
;
258 typedef vn_reference_table_type::iterator vn_reference_iterator_type
;
260 /* Pretty-print OPS to OUTFILE. */
263 print_vn_reference_ops (FILE *outfile
, const vec
<vn_reference_op_s
> ops
)
265 vn_reference_op_t vro
;
267 fprintf (outfile
, "{");
268 for (i
= 0; ops
.iterate (i
, &vro
); i
++)
270 bool closebrace
= false;
271 if (vro
->opcode
!= SSA_NAME
272 && TREE_CODE_CLASS (vro
->opcode
) != tcc_declaration
)
274 fprintf (outfile
, "%s", get_tree_code_name (vro
->opcode
));
275 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
277 fprintf (outfile
, "<");
281 if (vro
->op0
|| vro
->opcode
== CALL_EXPR
)
284 fprintf (outfile
, internal_fn_name ((internal_fn
)vro
->clique
));
286 print_generic_expr (outfile
, vro
->op0
);
289 fprintf (outfile
, ",");
290 print_generic_expr (outfile
, vro
->op1
);
294 fprintf (outfile
, ",");
295 print_generic_expr (outfile
, vro
->op2
);
299 fprintf (outfile
, ">");
300 if (i
!= ops
.length () - 1)
301 fprintf (outfile
, ",");
303 fprintf (outfile
, "}");
307 debug_vn_reference_ops (const vec
<vn_reference_op_s
> ops
)
309 print_vn_reference_ops (stderr
, ops
);
310 fputc ('\n', stderr
);
313 /* The set of VN hashtables. */
315 typedef struct vn_tables_s
317 vn_nary_op_table_type
*nary
;
318 vn_phi_table_type
*phis
;
319 vn_reference_table_type
*references
;
323 /* vn_constant hashtable helpers. */
325 struct vn_constant_hasher
: free_ptr_hash
<vn_constant_s
>
327 static inline hashval_t
hash (const vn_constant_s
*);
328 static inline bool equal (const vn_constant_s
*, const vn_constant_s
*);
331 /* Hash table hash function for vn_constant_t. */
334 vn_constant_hasher::hash (const vn_constant_s
*vc1
)
336 return vc1
->hashcode
;
339 /* Hash table equality function for vn_constant_t. */
342 vn_constant_hasher::equal (const vn_constant_s
*vc1
, const vn_constant_s
*vc2
)
344 if (vc1
->hashcode
!= vc2
->hashcode
)
347 return vn_constant_eq_with_type (vc1
->constant
, vc2
->constant
);
350 static hash_table
<vn_constant_hasher
> *constant_to_value_id
;
353 /* Obstack we allocate the vn-tables elements from. */
354 static obstack vn_tables_obstack
;
355 /* Special obstack we never unwind. */
356 static obstack vn_tables_insert_obstack
;
358 static vn_reference_t last_inserted_ref
;
359 static vn_phi_t last_inserted_phi
;
360 static vn_nary_op_t last_inserted_nary
;
361 static vn_ssa_aux_t last_pushed_avail
;
363 /* Valid hashtables storing information we have proven to be
365 static vn_tables_t valid_info
;
368 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
369 an SSA name, otherwise just return it. */
370 tree (*vn_valueize
) (tree
);
372 vn_valueize_for_srt (tree t
, void* context ATTRIBUTE_UNUSED
)
374 basic_block saved_vn_context_bb
= vn_context_bb
;
375 /* Look for sth available at the definition block of the argument.
376 This avoids inconsistencies between availability there which
377 decides if the stmt can be removed and availability at the
378 use site. The SSA property ensures that things available
379 at the definition are also available at uses. */
380 if (!SSA_NAME_IS_DEFAULT_DEF (t
))
381 vn_context_bb
= gimple_bb (SSA_NAME_DEF_STMT (t
));
382 tree res
= vn_valueize (t
);
383 vn_context_bb
= saved_vn_context_bb
;
388 /* This represents the top of the VN lattice, which is the universal
393 /* Unique counter for our value ids. */
395 static unsigned int next_value_id
;
396 static int next_constant_value_id
;
399 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
400 are allocated on an obstack for locality reasons, and to free them
401 without looping over the vec. */
403 struct vn_ssa_aux_hasher
: typed_noop_remove
<vn_ssa_aux_t
>
405 typedef vn_ssa_aux_t value_type
;
406 typedef tree compare_type
;
407 static inline hashval_t
hash (const value_type
&);
408 static inline bool equal (const value_type
&, const compare_type
&);
409 static inline void mark_deleted (value_type
&) {}
410 static const bool empty_zero_p
= true;
411 static inline void mark_empty (value_type
&e
) { e
= NULL
; }
412 static inline bool is_deleted (value_type
&) { return false; }
413 static inline bool is_empty (value_type
&e
) { return e
== NULL
; }
417 vn_ssa_aux_hasher::hash (const value_type
&entry
)
419 return SSA_NAME_VERSION (entry
->name
);
423 vn_ssa_aux_hasher::equal (const value_type
&entry
, const compare_type
&name
)
425 return name
== entry
->name
;
428 static hash_table
<vn_ssa_aux_hasher
> *vn_ssa_aux_hash
;
429 typedef hash_table
<vn_ssa_aux_hasher
>::iterator vn_ssa_aux_iterator_type
;
430 static struct obstack vn_ssa_aux_obstack
;
432 static vn_nary_op_t
vn_nary_op_insert_stmt (gimple
*, tree
);
433 static vn_nary_op_t
vn_nary_op_insert_into (vn_nary_op_t
,
434 vn_nary_op_table_type
*);
435 static void init_vn_nary_op_from_pieces (vn_nary_op_t
, unsigned int,
436 enum tree_code
, tree
, tree
*);
437 static tree
vn_lookup_simplify_result (gimple_match_op
*);
438 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
439 (tree
, alias_set_type
, alias_set_type
, tree
,
440 vec
<vn_reference_op_s
, va_heap
>, tree
);
442 /* Return whether there is value numbering information for a given SSA name. */
445 has_VN_INFO (tree name
)
447 return vn_ssa_aux_hash
->find_with_hash (name
, SSA_NAME_VERSION (name
));
454 = vn_ssa_aux_hash
->find_slot_with_hash (name
, SSA_NAME_VERSION (name
),
459 vn_ssa_aux_t newinfo
= *res
= XOBNEW (&vn_ssa_aux_obstack
, struct vn_ssa_aux
);
460 memset (newinfo
, 0, sizeof (struct vn_ssa_aux
));
461 newinfo
->name
= name
;
462 newinfo
->valnum
= VN_TOP
;
463 /* We are using the visited flag to handle uses with defs not within the
464 region being value-numbered. */
465 newinfo
->visited
= false;
467 /* Given we create the VN_INFOs on-demand now we have to do initialization
468 different than VN_TOP here. */
469 if (SSA_NAME_IS_DEFAULT_DEF (name
))
470 switch (TREE_CODE (SSA_NAME_VAR (name
)))
473 /* All undefined vars are VARYING. */
474 newinfo
->valnum
= name
;
475 newinfo
->visited
= true;
479 /* Parameters are VARYING but we can record a condition
480 if we know it is a non-NULL pointer. */
481 newinfo
->visited
= true;
482 newinfo
->valnum
= name
;
483 if (POINTER_TYPE_P (TREE_TYPE (name
))
484 && nonnull_arg_p (SSA_NAME_VAR (name
)))
488 ops
[1] = build_int_cst (TREE_TYPE (name
), 0);
490 /* Allocate from non-unwinding stack. */
491 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
492 init_vn_nary_op_from_pieces (nary
, 2, NE_EXPR
,
493 boolean_type_node
, ops
);
494 nary
->predicated_values
= 0;
495 nary
->u
.result
= boolean_true_node
;
496 vn_nary_op_insert_into (nary
, valid_info
->nary
);
497 gcc_assert (nary
->unwind_to
== NULL
);
498 /* Also do not link it into the undo chain. */
499 last_inserted_nary
= nary
->next
;
500 nary
->next
= (vn_nary_op_t
)(void *)-1;
501 nary
= alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack
);
502 init_vn_nary_op_from_pieces (nary
, 2, EQ_EXPR
,
503 boolean_type_node
, ops
);
504 nary
->predicated_values
= 0;
505 nary
->u
.result
= boolean_false_node
;
506 vn_nary_op_insert_into (nary
, valid_info
->nary
);
507 gcc_assert (nary
->unwind_to
== NULL
);
508 last_inserted_nary
= nary
->next
;
509 nary
->next
= (vn_nary_op_t
)(void *)-1;
510 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
512 fprintf (dump_file
, "Recording ");
513 print_generic_expr (dump_file
, name
, TDF_SLIM
);
514 fprintf (dump_file
, " != 0\n");
520 /* If the result is passed by invisible reference the default
521 def is initialized, otherwise it's uninitialized. Still
522 undefined is varying. */
523 newinfo
->visited
= true;
524 newinfo
->valnum
= name
;
533 /* Return the SSA value of X. */
536 SSA_VAL (tree x
, bool *visited
= NULL
)
538 vn_ssa_aux_t tem
= vn_ssa_aux_hash
->find_with_hash (x
, SSA_NAME_VERSION (x
));
540 *visited
= tem
&& tem
->visited
;
541 return tem
&& tem
->visited
? tem
->valnum
: x
;
544 /* Return the SSA value of the VUSE x, supporting released VDEFs
545 during elimination which will value-number the VDEF to the
546 associated VUSE (but not substitute in the whole lattice). */
549 vuse_ssa_val (tree x
)
557 gcc_assert (x
!= VN_TOP
);
559 while (SSA_NAME_IN_FREE_LIST (x
));
564 /* Similar to the above but used as callback for walk_non_aliased_vuses
565 and thus should stop at unvisited VUSE to not walk across region
569 vuse_valueize (tree vuse
)
574 vuse
= SSA_VAL (vuse
, &visited
);
577 gcc_assert (vuse
!= VN_TOP
);
579 while (SSA_NAME_IN_FREE_LIST (vuse
));
584 /* Return the vn_kind the expression computed by the stmt should be
588 vn_get_stmt_kind (gimple
*stmt
)
590 switch (gimple_code (stmt
))
598 enum tree_code code
= gimple_assign_rhs_code (stmt
);
599 tree rhs1
= gimple_assign_rhs1 (stmt
);
600 switch (get_gimple_rhs_class (code
))
602 case GIMPLE_UNARY_RHS
:
603 case GIMPLE_BINARY_RHS
:
604 case GIMPLE_TERNARY_RHS
:
606 case GIMPLE_SINGLE_RHS
:
607 switch (TREE_CODE_CLASS (code
))
610 /* VOP-less references can go through unary case. */
611 if ((code
== REALPART_EXPR
612 || code
== IMAGPART_EXPR
613 || code
== VIEW_CONVERT_EXPR
614 || code
== BIT_FIELD_REF
)
615 && (TREE_CODE (TREE_OPERAND (rhs1
, 0)) == SSA_NAME
616 || is_gimple_min_invariant (TREE_OPERAND (rhs1
, 0))))
620 case tcc_declaration
:
627 if (code
== ADDR_EXPR
)
628 return (is_gimple_min_invariant (rhs1
)
629 ? VN_CONSTANT
: VN_REFERENCE
);
630 else if (code
== CONSTRUCTOR
)
643 /* Lookup a value id for CONSTANT and return it. If it does not
647 get_constant_value_id (tree constant
)
649 vn_constant_s
**slot
;
650 struct vn_constant_s vc
;
652 vc
.hashcode
= vn_hash_constant_with_type (constant
);
653 vc
.constant
= constant
;
654 slot
= constant_to_value_id
->find_slot (&vc
, NO_INSERT
);
656 return (*slot
)->value_id
;
660 /* Lookup a value id for CONSTANT, and if it does not exist, create a
661 new one and return it. If it does exist, return it. */
664 get_or_alloc_constant_value_id (tree constant
)
666 vn_constant_s
**slot
;
667 struct vn_constant_s vc
;
670 /* If the hashtable isn't initialized we're not running from PRE and thus
671 do not need value-ids. */
672 if (!constant_to_value_id
)
675 vc
.hashcode
= vn_hash_constant_with_type (constant
);
676 vc
.constant
= constant
;
677 slot
= constant_to_value_id
->find_slot (&vc
, INSERT
);
679 return (*slot
)->value_id
;
681 vcp
= XNEW (struct vn_constant_s
);
682 vcp
->hashcode
= vc
.hashcode
;
683 vcp
->constant
= constant
;
684 vcp
->value_id
= get_next_constant_value_id ();
686 return vcp
->value_id
;
689 /* Compute the hash for a reference operand VRO1. */
692 vn_reference_op_compute_hash (const vn_reference_op_t vro1
, inchash::hash
&hstate
)
694 hstate
.add_int (vro1
->opcode
);
695 if (vro1
->opcode
== CALL_EXPR
&& !vro1
->op0
)
696 hstate
.add_int (vro1
->clique
);
698 inchash::add_expr (vro1
->op0
, hstate
);
700 inchash::add_expr (vro1
->op1
, hstate
);
702 inchash::add_expr (vro1
->op2
, hstate
);
705 /* Compute a hash for the reference operation VR1 and return it. */
708 vn_reference_compute_hash (const vn_reference_t vr1
)
710 inchash::hash hstate
;
713 vn_reference_op_t vro
;
717 FOR_EACH_VEC_ELT (vr1
->operands
, i
, vro
)
719 if (vro
->opcode
== MEM_REF
)
721 else if (vro
->opcode
!= ADDR_EXPR
)
723 if (maybe_ne (vro
->off
, -1))
725 if (known_eq (off
, -1))
731 if (maybe_ne (off
, -1)
732 && maybe_ne (off
, 0))
733 hstate
.add_poly_int (off
);
736 && vro
->opcode
== ADDR_EXPR
)
740 tree op
= TREE_OPERAND (vro
->op0
, 0);
741 hstate
.add_int (TREE_CODE (op
));
742 inchash::add_expr (op
, hstate
);
746 vn_reference_op_compute_hash (vro
, hstate
);
749 result
= hstate
.end ();
750 /* ??? We would ICE later if we hash instead of adding that in. */
752 result
+= SSA_NAME_VERSION (vr1
->vuse
);
757 /* Return true if reference operations VR1 and VR2 are equivalent. This
758 means they have the same set of operands and vuses. */
761 vn_reference_eq (const_vn_reference_t
const vr1
, const_vn_reference_t
const vr2
)
765 /* Early out if this is not a hash collision. */
766 if (vr1
->hashcode
!= vr2
->hashcode
)
769 /* The VOP needs to be the same. */
770 if (vr1
->vuse
!= vr2
->vuse
)
773 /* If the operands are the same we are done. */
774 if (vr1
->operands
== vr2
->operands
)
777 if (!vr1
->type
|| !vr2
->type
)
779 if (vr1
->type
!= vr2
->type
)
782 else if (vr1
->type
== vr2
->type
)
784 else if (COMPLETE_TYPE_P (vr1
->type
) != COMPLETE_TYPE_P (vr2
->type
)
785 || (COMPLETE_TYPE_P (vr1
->type
)
786 && !expressions_equal_p (TYPE_SIZE (vr1
->type
),
787 TYPE_SIZE (vr2
->type
))))
789 else if (vr1
->operands
[0].opcode
== CALL_EXPR
790 && !types_compatible_p (vr1
->type
, vr2
->type
))
792 else if (INTEGRAL_TYPE_P (vr1
->type
)
793 && INTEGRAL_TYPE_P (vr2
->type
))
795 if (TYPE_PRECISION (vr1
->type
) != TYPE_PRECISION (vr2
->type
))
798 else if (INTEGRAL_TYPE_P (vr1
->type
)
799 && (TYPE_PRECISION (vr1
->type
)
800 != TREE_INT_CST_LOW (TYPE_SIZE (vr1
->type
))))
802 else if (INTEGRAL_TYPE_P (vr2
->type
)
803 && (TYPE_PRECISION (vr2
->type
)
804 != TREE_INT_CST_LOW (TYPE_SIZE (vr2
->type
))))
806 else if (VECTOR_BOOLEAN_TYPE_P (vr1
->type
)
807 && VECTOR_BOOLEAN_TYPE_P (vr2
->type
))
809 /* Vector boolean types can have padding, verify we are dealing with
810 the same number of elements, aka the precision of the types.
811 For example, In most architecture the precision_size of vbool*_t
812 types are caculated like below:
813 precision_size = type_size * 8
815 Unfortunately, the RISC-V will adjust the precision_size for the
816 vbool*_t in order to align the ISA as below:
817 type_size = [1, 1, 1, 1, 2, 4, 8]
818 precision_size = [1, 2, 4, 8, 16, 32, 64]
820 Then the precision_size of RISC-V vbool*_t will not be the multiple
821 of the type_size. We take care of this case consolidated here. */
822 if (maybe_ne (TYPE_VECTOR_SUBPARTS (vr1
->type
),
823 TYPE_VECTOR_SUBPARTS (vr2
->type
)))
831 poly_int64 off1
= 0, off2
= 0;
832 vn_reference_op_t vro1
, vro2
;
833 vn_reference_op_s tem1
, tem2
;
834 bool deref1
= false, deref2
= false;
835 bool reverse1
= false, reverse2
= false;
836 for (; vr1
->operands
.iterate (i
, &vro1
); i
++)
838 if (vro1
->opcode
== MEM_REF
)
840 /* Do not look through a storage order barrier. */
841 else if (vro1
->opcode
== VIEW_CONVERT_EXPR
&& vro1
->reverse
)
843 reverse1
|= vro1
->reverse
;
844 if (known_eq (vro1
->off
, -1))
848 for (; vr2
->operands
.iterate (j
, &vro2
); j
++)
850 if (vro2
->opcode
== MEM_REF
)
852 /* Do not look through a storage order barrier. */
853 else if (vro2
->opcode
== VIEW_CONVERT_EXPR
&& vro2
->reverse
)
855 reverse2
|= vro2
->reverse
;
856 if (known_eq (vro2
->off
, -1))
860 if (maybe_ne (off1
, off2
) || reverse1
!= reverse2
)
862 if (deref1
&& vro1
->opcode
== ADDR_EXPR
)
864 memset (&tem1
, 0, sizeof (tem1
));
865 tem1
.op0
= TREE_OPERAND (vro1
->op0
, 0);
866 tem1
.type
= TREE_TYPE (tem1
.op0
);
867 tem1
.opcode
= TREE_CODE (tem1
.op0
);
871 if (deref2
&& vro2
->opcode
== ADDR_EXPR
)
873 memset (&tem2
, 0, sizeof (tem2
));
874 tem2
.op0
= TREE_OPERAND (vro2
->op0
, 0);
875 tem2
.type
= TREE_TYPE (tem2
.op0
);
876 tem2
.opcode
= TREE_CODE (tem2
.op0
);
880 if (deref1
!= deref2
)
882 if (!vn_reference_op_eq (vro1
, vro2
))
887 while (vr1
->operands
.length () != i
888 || vr2
->operands
.length () != j
);
893 /* Copy the operations present in load/store REF into RESULT, a vector of
894 vn_reference_op_s's. */
897 copy_reference_ops_from_ref (tree ref
, vec
<vn_reference_op_s
> *result
)
899 /* For non-calls, store the information that makes up the address. */
903 vn_reference_op_s temp
;
905 memset (&temp
, 0, sizeof (temp
));
906 temp
.type
= TREE_TYPE (ref
);
907 temp
.opcode
= TREE_CODE (ref
);
913 temp
.op0
= TREE_OPERAND (ref
, 1);
916 temp
.op0
= TREE_OPERAND (ref
, 1);
920 /* The base address gets its own vn_reference_op_s structure. */
921 temp
.op0
= TREE_OPERAND (ref
, 1);
922 if (!mem_ref_offset (ref
).to_shwi (&temp
.off
))
924 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
925 temp
.base
= MR_DEPENDENCE_BASE (ref
);
926 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
929 /* The base address gets its own vn_reference_op_s structure. */
930 temp
.op0
= TMR_INDEX (ref
);
931 temp
.op1
= TMR_STEP (ref
);
932 temp
.op2
= TMR_OFFSET (ref
);
933 temp
.clique
= MR_DEPENDENCE_CLIQUE (ref
);
934 temp
.base
= MR_DEPENDENCE_BASE (ref
);
935 result
->safe_push (temp
);
936 memset (&temp
, 0, sizeof (temp
));
937 temp
.type
= NULL_TREE
;
938 temp
.opcode
= ERROR_MARK
;
939 temp
.op0
= TMR_INDEX2 (ref
);
943 /* Record bits, position and storage order. */
944 temp
.op0
= TREE_OPERAND (ref
, 1);
945 temp
.op1
= TREE_OPERAND (ref
, 2);
946 if (!multiple_p (bit_field_offset (ref
), BITS_PER_UNIT
, &temp
.off
))
948 temp
.reverse
= REF_REVERSE_STORAGE_ORDER (ref
);
951 /* The field decl is enough to unambiguously specify the field,
952 so use its type here. */
953 temp
.type
= TREE_TYPE (TREE_OPERAND (ref
, 1));
954 temp
.op0
= TREE_OPERAND (ref
, 1);
955 temp
.op1
= TREE_OPERAND (ref
, 2);
956 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
957 && TYPE_REVERSE_STORAGE_ORDER
958 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
960 tree this_offset
= component_ref_field_offset (ref
);
962 && poly_int_tree_p (this_offset
))
964 tree bit_offset
= DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref
, 1));
965 if (TREE_INT_CST_LOW (bit_offset
) % BITS_PER_UNIT
== 0)
968 = (wi::to_poly_offset (this_offset
)
969 + (wi::to_offset (bit_offset
) >> LOG2_BITS_PER_UNIT
));
970 /* Probibit value-numbering zero offset components
971 of addresses the same before the pass folding
972 __builtin_object_size had a chance to run. */
973 if (TREE_CODE (orig
) != ADDR_EXPR
975 || (cfun
->curr_properties
& PROP_objsz
))
976 off
.to_shwi (&temp
.off
);
981 case ARRAY_RANGE_REF
:
984 tree eltype
= TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref
, 0)));
985 /* Record index as operand. */
986 temp
.op0
= TREE_OPERAND (ref
, 1);
987 /* Always record lower bounds and element size. */
988 temp
.op1
= array_ref_low_bound (ref
);
989 /* But record element size in units of the type alignment. */
990 temp
.op2
= TREE_OPERAND (ref
, 3);
991 temp
.align
= eltype
->type_common
.align
;
993 temp
.op2
= size_binop (EXACT_DIV_EXPR
, TYPE_SIZE_UNIT (eltype
),
994 size_int (TYPE_ALIGN_UNIT (eltype
)));
995 if (poly_int_tree_p (temp
.op0
)
996 && poly_int_tree_p (temp
.op1
)
997 && TREE_CODE (temp
.op2
) == INTEGER_CST
)
999 poly_offset_int off
= ((wi::to_poly_offset (temp
.op0
)
1000 - wi::to_poly_offset (temp
.op1
))
1001 * wi::to_offset (temp
.op2
)
1002 * vn_ref_op_align_unit (&temp
));
1003 off
.to_shwi (&temp
.off
);
1005 temp
.reverse
= (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref
, 0)))
1006 && TYPE_REVERSE_STORAGE_ORDER
1007 (TREE_TYPE (TREE_OPERAND (ref
, 0))));
1011 if (DECL_HARD_REGISTER (ref
))
1020 /* Canonicalize decls to MEM[&decl] which is what we end up with
1021 when valueizing MEM[ptr] with ptr = &decl. */
1022 temp
.opcode
= MEM_REF
;
1023 temp
.op0
= build_int_cst (build_pointer_type (TREE_TYPE (ref
)), 0);
1025 result
->safe_push (temp
);
1026 temp
.opcode
= ADDR_EXPR
;
1027 temp
.op0
= build1 (ADDR_EXPR
, TREE_TYPE (temp
.op0
), ref
);
1028 temp
.type
= TREE_TYPE (temp
.op0
);
1043 if (is_gimple_min_invariant (ref
))
1049 /* These are only interesting for their operands, their
1050 existence, and their type. They will never be the last
1051 ref in the chain of references (IE they require an
1052 operand), so we don't have to put anything
1053 for op* as it will be handled by the iteration */
1057 case VIEW_CONVERT_EXPR
:
1059 temp
.reverse
= storage_order_barrier_p (ref
);
1062 /* This is only interesting for its constant offset. */
1063 temp
.off
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref
)));
1068 result
->safe_push (temp
);
1070 if (REFERENCE_CLASS_P (ref
)
1071 || TREE_CODE (ref
) == MODIFY_EXPR
1072 || TREE_CODE (ref
) == WITH_SIZE_EXPR
1073 || (TREE_CODE (ref
) == ADDR_EXPR
1074 && !is_gimple_min_invariant (ref
)))
1075 ref
= TREE_OPERAND (ref
, 0);
1081 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1082 operands in *OPS, the reference alias set SET and the reference type TYPE.
1083 Return true if something useful was produced. */
1086 ao_ref_init_from_vn_reference (ao_ref
*ref
,
1087 alias_set_type set
, alias_set_type base_set
,
1088 tree type
, const vec
<vn_reference_op_s
> &ops
)
1091 tree base
= NULL_TREE
;
1092 tree
*op0_p
= &base
;
1093 poly_offset_int offset
= 0;
1094 poly_offset_int max_size
;
1095 poly_offset_int size
= -1;
1096 tree size_tree
= NULL_TREE
;
1098 /* We don't handle calls. */
1102 machine_mode mode
= TYPE_MODE (type
);
1103 if (mode
== BLKmode
)
1104 size_tree
= TYPE_SIZE (type
);
1106 size
= GET_MODE_BITSIZE (mode
);
1107 if (size_tree
!= NULL_TREE
1108 && poly_int_tree_p (size_tree
))
1109 size
= wi::to_poly_offset (size_tree
);
1111 /* Lower the final access size from the outermost expression. */
1112 const_vn_reference_op_t cst_op
= &ops
[0];
1113 /* Cast away constness for the sake of the const-unsafe
1114 FOR_EACH_VEC_ELT(). */
1115 vn_reference_op_t op
= const_cast<vn_reference_op_t
>(cst_op
);
1116 size_tree
= NULL_TREE
;
1117 if (op
->opcode
== COMPONENT_REF
)
1118 size_tree
= DECL_SIZE (op
->op0
);
1119 else if (op
->opcode
== BIT_FIELD_REF
)
1120 size_tree
= op
->op0
;
1121 if (size_tree
!= NULL_TREE
1122 && poly_int_tree_p (size_tree
)
1123 && (!known_size_p (size
)
1124 || known_lt (wi::to_poly_offset (size_tree
), size
)))
1125 size
= wi::to_poly_offset (size_tree
);
1127 /* Initially, maxsize is the same as the accessed element size.
1128 In the following it will only grow (or become -1). */
1131 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1132 and find the ultimate containing object. */
1133 FOR_EACH_VEC_ELT (ops
, i
, op
)
1137 /* These may be in the reference ops, but we cannot do anything
1138 sensible with them here. */
1140 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1141 if (base
!= NULL_TREE
1142 && TREE_CODE (base
) == MEM_REF
1144 && DECL_P (TREE_OPERAND (op
->op0
, 0)))
1146 const_vn_reference_op_t pop
= &ops
[i
-1];
1147 base
= TREE_OPERAND (op
->op0
, 0);
1148 if (known_eq (pop
->off
, -1))
1154 offset
+= pop
->off
* BITS_PER_UNIT
;
1162 /* Record the base objects. */
1164 *op0_p
= build2 (MEM_REF
, op
->type
,
1165 NULL_TREE
, op
->op0
);
1166 MR_DEPENDENCE_CLIQUE (*op0_p
) = op
->clique
;
1167 MR_DEPENDENCE_BASE (*op0_p
) = op
->base
;
1168 op0_p
= &TREE_OPERAND (*op0_p
, 0);
1179 /* And now the usual component-reference style ops. */
1181 offset
+= wi::to_poly_offset (op
->op1
);
1186 tree field
= op
->op0
;
1187 /* We do not have a complete COMPONENT_REF tree here so we
1188 cannot use component_ref_field_offset. Do the interesting
1190 tree this_offset
= DECL_FIELD_OFFSET (field
);
1192 if (op
->op1
|| !poly_int_tree_p (this_offset
))
1196 poly_offset_int woffset
= (wi::to_poly_offset (this_offset
)
1197 << LOG2_BITS_PER_UNIT
);
1198 woffset
+= wi::to_offset (DECL_FIELD_BIT_OFFSET (field
));
1204 case ARRAY_RANGE_REF
:
1206 /* We recorded the lower bound and the element size. */
1207 if (!poly_int_tree_p (op
->op0
)
1208 || !poly_int_tree_p (op
->op1
)
1209 || TREE_CODE (op
->op2
) != INTEGER_CST
)
1213 poly_offset_int woffset
1214 = wi::sext (wi::to_poly_offset (op
->op0
)
1215 - wi::to_poly_offset (op
->op1
),
1216 TYPE_PRECISION (sizetype
));
1217 woffset
*= wi::to_offset (op
->op2
) * vn_ref_op_align_unit (op
);
1218 woffset
<<= LOG2_BITS_PER_UNIT
;
1230 case VIEW_CONVERT_EXPR
:
1247 if (base
== NULL_TREE
)
1250 ref
->ref
= NULL_TREE
;
1252 ref
->ref_alias_set
= set
;
1253 ref
->base_alias_set
= base_set
;
1254 /* We discount volatiles from value-numbering elsewhere. */
1255 ref
->volatile_p
= false;
1257 if (!size
.to_shwi (&ref
->size
) || maybe_lt (ref
->size
, 0))
1265 if (!offset
.to_shwi (&ref
->offset
))
1272 if (!max_size
.to_shwi (&ref
->max_size
) || maybe_lt (ref
->max_size
, 0))
1278 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1279 vn_reference_op_s's. */
1282 copy_reference_ops_from_call (gcall
*call
,
1283 vec
<vn_reference_op_s
> *result
)
1285 vn_reference_op_s temp
;
1287 tree lhs
= gimple_call_lhs (call
);
1290 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1291 different. By adding the lhs here in the vector, we ensure that the
1292 hashcode is different, guaranteeing a different value number. */
1293 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
1295 memset (&temp
, 0, sizeof (temp
));
1296 temp
.opcode
= MODIFY_EXPR
;
1297 temp
.type
= TREE_TYPE (lhs
);
1300 result
->safe_push (temp
);
1303 /* Copy the type, opcode, function, static chain and EH region, if any. */
1304 memset (&temp
, 0, sizeof (temp
));
1305 temp
.type
= gimple_call_fntype (call
);
1306 temp
.opcode
= CALL_EXPR
;
1307 temp
.op0
= gimple_call_fn (call
);
1308 if (gimple_call_internal_p (call
))
1309 temp
.clique
= gimple_call_internal_fn (call
);
1310 temp
.op1
= gimple_call_chain (call
);
1311 if (stmt_could_throw_p (cfun
, call
) && (lr
= lookup_stmt_eh_lp (call
)) > 0)
1312 temp
.op2
= size_int (lr
);
1314 result
->safe_push (temp
);
1316 /* Copy the call arguments. As they can be references as well,
1317 just chain them together. */
1318 for (i
= 0; i
< gimple_call_num_args (call
); ++i
)
1320 tree callarg
= gimple_call_arg (call
, i
);
1321 copy_reference_ops_from_ref (callarg
, result
);
1325 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1326 *I_P to point to the last element of the replacement. */
1328 vn_reference_fold_indirect (vec
<vn_reference_op_s
> *ops
,
1331 unsigned int i
= *i_p
;
1332 vn_reference_op_t op
= &(*ops
)[i
];
1333 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1335 poly_int64 addr_offset
= 0;
1337 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1338 from .foo.bar to the preceding MEM_REF offset and replace the
1339 address with &OBJ. */
1340 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (op
->op0
, 0),
1341 &addr_offset
, vn_valueize
);
1342 gcc_checking_assert (addr_base
&& TREE_CODE (addr_base
) != MEM_REF
);
1343 if (addr_base
!= TREE_OPERAND (op
->op0
, 0))
1346 = (poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
),
1349 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1350 op
->op0
= build_fold_addr_expr (addr_base
);
1351 if (tree_fits_shwi_p (mem_op
->op0
))
1352 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1360 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1361 *I_P to point to the last element of the replacement. */
1363 vn_reference_maybe_forwprop_address (vec
<vn_reference_op_s
> *ops
,
1366 bool changed
= false;
1367 vn_reference_op_t op
;
1371 unsigned int i
= *i_p
;
1373 vn_reference_op_t mem_op
= &(*ops
)[i
- 1];
1375 enum tree_code code
;
1376 poly_offset_int off
;
1378 def_stmt
= SSA_NAME_DEF_STMT (op
->op0
);
1379 if (!is_gimple_assign (def_stmt
))
1382 code
= gimple_assign_rhs_code (def_stmt
);
1383 if (code
!= ADDR_EXPR
1384 && code
!= POINTER_PLUS_EXPR
)
1387 off
= poly_offset_int::from (wi::to_poly_wide (mem_op
->op0
), SIGNED
);
1389 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1390 from .foo.bar to the preceding MEM_REF offset and replace the
1391 address with &OBJ. */
1392 if (code
== ADDR_EXPR
)
1394 tree addr
, addr_base
;
1395 poly_int64 addr_offset
;
1397 addr
= gimple_assign_rhs1 (def_stmt
);
1398 addr_base
= get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr
, 0),
1401 /* If that didn't work because the address isn't invariant propagate
1402 the reference tree from the address operation in case the current
1403 dereference isn't offsetted. */
1405 && *i_p
== ops
->length () - 1
1406 && known_eq (off
, 0)
1407 /* This makes us disable this transform for PRE where the
1408 reference ops might be also used for code insertion which
1410 && default_vn_walk_kind
== VN_WALKREWRITE
)
1412 auto_vec
<vn_reference_op_s
, 32> tem
;
1413 copy_reference_ops_from_ref (TREE_OPERAND (addr
, 0), &tem
);
1414 /* Make sure to preserve TBAA info. The only objects not
1415 wrapped in MEM_REFs that can have their address taken are
1417 if (tem
.length () >= 2
1418 && tem
[tem
.length () - 2].opcode
== MEM_REF
)
1420 vn_reference_op_t new_mem_op
= &tem
[tem
.length () - 2];
1422 = wide_int_to_tree (TREE_TYPE (mem_op
->op0
),
1423 wi::to_poly_wide (new_mem_op
->op0
));
1426 gcc_assert (tem
.last ().opcode
== STRING_CST
);
1429 ops
->safe_splice (tem
);
1434 || TREE_CODE (addr_base
) != MEM_REF
1435 || (TREE_CODE (TREE_OPERAND (addr_base
, 0)) == SSA_NAME
1436 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base
,
1441 off
+= mem_ref_offset (addr_base
);
1442 op
->op0
= TREE_OPERAND (addr_base
, 0);
1447 ptr
= gimple_assign_rhs1 (def_stmt
);
1448 ptroff
= gimple_assign_rhs2 (def_stmt
);
1449 if (TREE_CODE (ptr
) != SSA_NAME
1450 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr
)
1451 /* Make sure to not endlessly recurse.
1452 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1453 happen when we value-number a PHI to its backedge value. */
1454 || SSA_VAL (ptr
) == op
->op0
1455 || !poly_int_tree_p (ptroff
))
1458 off
+= wi::to_poly_offset (ptroff
);
1462 mem_op
->op0
= wide_int_to_tree (TREE_TYPE (mem_op
->op0
), off
);
1463 if (tree_fits_shwi_p (mem_op
->op0
))
1464 mem_op
->off
= tree_to_shwi (mem_op
->op0
);
1467 /* ??? Can end up with endless recursion here!?
1468 gcc.c-torture/execute/strcmp-1.c */
1469 if (TREE_CODE (op
->op0
) == SSA_NAME
)
1470 op
->op0
= SSA_VAL (op
->op0
);
1471 if (TREE_CODE (op
->op0
) != SSA_NAME
)
1472 op
->opcode
= TREE_CODE (op
->op0
);
1477 while (TREE_CODE (op
->op0
) == SSA_NAME
);
1479 /* Fold a remaining *&. */
1480 if (TREE_CODE (op
->op0
) == ADDR_EXPR
)
1481 vn_reference_fold_indirect (ops
, i_p
);
1486 /* Optimize the reference REF to a constant if possible or return
1487 NULL_TREE if not. */
1490 fully_constant_vn_reference_p (vn_reference_t ref
)
1492 vec
<vn_reference_op_s
> operands
= ref
->operands
;
1493 vn_reference_op_t op
;
1495 /* Try to simplify the translated expression if it is
1496 a call to a builtin function with at most two arguments. */
1498 if (op
->opcode
== CALL_EXPR
1500 || (TREE_CODE (op
->op0
) == ADDR_EXPR
1501 && TREE_CODE (TREE_OPERAND (op
->op0
, 0)) == FUNCTION_DECL
1502 && fndecl_built_in_p (TREE_OPERAND (op
->op0
, 0),
1504 && operands
.length () >= 2
1505 && operands
.length () <= 3)
1507 vn_reference_op_t arg0
, arg1
= NULL
;
1508 bool anyconst
= false;
1509 arg0
= &operands
[1];
1510 if (operands
.length () > 2)
1511 arg1
= &operands
[2];
1512 if (TREE_CODE_CLASS (arg0
->opcode
) == tcc_constant
1513 || (arg0
->opcode
== ADDR_EXPR
1514 && is_gimple_min_invariant (arg0
->op0
)))
1517 && (TREE_CODE_CLASS (arg1
->opcode
) == tcc_constant
1518 || (arg1
->opcode
== ADDR_EXPR
1519 && is_gimple_min_invariant (arg1
->op0
))))
1525 fn
= as_combined_fn (DECL_FUNCTION_CODE
1526 (TREE_OPERAND (op
->op0
, 0)));
1528 fn
= as_combined_fn ((internal_fn
) op
->clique
);
1531 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
, arg1
->op0
);
1533 folded
= fold_const_call (fn
, ref
->type
, arg0
->op0
);
1535 && is_gimple_min_invariant (folded
))
1540 /* Simplify reads from constants or constant initializers. */
1541 else if (BITS_PER_UNIT
== 8
1543 && COMPLETE_TYPE_P (ref
->type
)
1544 && is_gimple_reg_type (ref
->type
))
1548 if (INTEGRAL_TYPE_P (ref
->type
))
1549 size
= TYPE_PRECISION (ref
->type
);
1550 else if (tree_fits_shwi_p (TYPE_SIZE (ref
->type
)))
1551 size
= tree_to_shwi (TYPE_SIZE (ref
->type
));
1554 if (size
% BITS_PER_UNIT
!= 0
1555 || size
> MAX_BITSIZE_MODE_ANY_MODE
)
1557 size
/= BITS_PER_UNIT
;
1559 for (i
= 0; i
< operands
.length (); ++i
)
1561 if (TREE_CODE_CLASS (operands
[i
].opcode
) == tcc_constant
)
1566 if (known_eq (operands
[i
].off
, -1))
1568 off
+= operands
[i
].off
;
1569 if (operands
[i
].opcode
== MEM_REF
)
1575 vn_reference_op_t base
= &operands
[--i
];
1576 tree ctor
= error_mark_node
;
1577 tree decl
= NULL_TREE
;
1578 if (TREE_CODE_CLASS (base
->opcode
) == tcc_constant
)
1580 else if (base
->opcode
== MEM_REF
1581 && base
[1].opcode
== ADDR_EXPR
1582 && (VAR_P (TREE_OPERAND (base
[1].op0
, 0))
1583 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == CONST_DECL
1584 || TREE_CODE (TREE_OPERAND (base
[1].op0
, 0)) == STRING_CST
))
1586 decl
= TREE_OPERAND (base
[1].op0
, 0);
1587 if (TREE_CODE (decl
) == STRING_CST
)
1590 ctor
= ctor_for_folding (decl
);
1592 if (ctor
== NULL_TREE
)
1593 return build_zero_cst (ref
->type
);
1594 else if (ctor
!= error_mark_node
)
1596 HOST_WIDE_INT const_off
;
1599 tree res
= fold_ctor_reference (ref
->type
, ctor
,
1600 off
* BITS_PER_UNIT
,
1601 size
* BITS_PER_UNIT
, decl
);
1604 STRIP_USELESS_TYPE_CONVERSION (res
);
1605 if (is_gimple_min_invariant (res
))
1609 else if (off
.is_constant (&const_off
))
1611 unsigned char buf
[MAX_BITSIZE_MODE_ANY_MODE
/ BITS_PER_UNIT
];
1612 int len
= native_encode_expr (ctor
, buf
, size
, const_off
);
1614 return native_interpret_expr (ref
->type
, buf
, len
);
1622 /* Return true if OPS contain a storage order barrier. */
1625 contains_storage_order_barrier_p (vec
<vn_reference_op_s
> ops
)
1627 vn_reference_op_t op
;
1630 FOR_EACH_VEC_ELT (ops
, i
, op
)
1631 if (op
->opcode
== VIEW_CONVERT_EXPR
&& op
->reverse
)
1637 /* Return true if OPS represent an access with reverse storage order. */
1640 reverse_storage_order_for_component_p (vec
<vn_reference_op_s
> ops
)
1643 if (ops
[i
].opcode
== REALPART_EXPR
|| ops
[i
].opcode
== IMAGPART_EXPR
)
1645 switch (ops
[i
].opcode
)
1651 return ops
[i
].reverse
;
1657 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1658 structures into their value numbers. This is done in-place, and
1659 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1660 whether any operands were valueized. */
1663 valueize_refs_1 (vec
<vn_reference_op_s
> *orig
, bool *valueized_anything
,
1664 bool with_avail
= false)
1666 *valueized_anything
= false;
1668 for (unsigned i
= 0; i
< orig
->length (); ++i
)
1671 vn_reference_op_t vro
= &(*orig
)[i
];
1672 if (vro
->opcode
== SSA_NAME
1673 || (vro
->op0
&& TREE_CODE (vro
->op0
) == SSA_NAME
))
1675 tree tem
= with_avail
? vn_valueize (vro
->op0
) : SSA_VAL (vro
->op0
);
1676 if (tem
!= vro
->op0
)
1678 *valueized_anything
= true;
1681 /* If it transforms from an SSA_NAME to a constant, update
1683 if (TREE_CODE (vro
->op0
) != SSA_NAME
&& vro
->opcode
== SSA_NAME
)
1684 vro
->opcode
= TREE_CODE (vro
->op0
);
1686 if (vro
->op1
&& TREE_CODE (vro
->op1
) == SSA_NAME
)
1688 tree tem
= with_avail
? vn_valueize (vro
->op1
) : SSA_VAL (vro
->op1
);
1689 if (tem
!= vro
->op1
)
1691 *valueized_anything
= true;
1695 if (vro
->op2
&& TREE_CODE (vro
->op2
) == SSA_NAME
)
1697 tree tem
= with_avail
? vn_valueize (vro
->op2
) : SSA_VAL (vro
->op2
);
1698 if (tem
!= vro
->op2
)
1700 *valueized_anything
= true;
1704 /* If it transforms from an SSA_NAME to an address, fold with
1705 a preceding indirect reference. */
1708 && TREE_CODE (vro
->op0
) == ADDR_EXPR
1709 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1711 if (vn_reference_fold_indirect (orig
, &i
))
1712 *valueized_anything
= true;
1715 && vro
->opcode
== SSA_NAME
1716 && (*orig
)[i
- 1].opcode
== MEM_REF
)
1718 if (vn_reference_maybe_forwprop_address (orig
, &i
))
1720 *valueized_anything
= true;
1721 /* Re-valueize the current operand. */
1725 /* If it transforms a non-constant ARRAY_REF into a constant
1726 one, adjust the constant offset. */
1727 else if (vro
->opcode
== ARRAY_REF
1728 && known_eq (vro
->off
, -1)
1729 && poly_int_tree_p (vro
->op0
)
1730 && poly_int_tree_p (vro
->op1
)
1731 && TREE_CODE (vro
->op2
) == INTEGER_CST
)
1733 poly_offset_int off
= ((wi::to_poly_offset (vro
->op0
)
1734 - wi::to_poly_offset (vro
->op1
))
1735 * wi::to_offset (vro
->op2
)
1736 * vn_ref_op_align_unit (vro
));
1737 off
.to_shwi (&vro
->off
);
1743 valueize_refs (vec
<vn_reference_op_s
> *orig
)
1746 valueize_refs_1 (orig
, &tem
);
1749 static vec
<vn_reference_op_s
> shared_lookup_references
;
1751 /* Create a vector of vn_reference_op_s structures from REF, a
1752 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1753 this function. *VALUEIZED_ANYTHING will specify whether any
1754 operands were valueized. */
1756 static vec
<vn_reference_op_s
>
1757 valueize_shared_reference_ops_from_ref (tree ref
, bool *valueized_anything
)
1761 shared_lookup_references
.truncate (0);
1762 copy_reference_ops_from_ref (ref
, &shared_lookup_references
);
1763 valueize_refs_1 (&shared_lookup_references
, valueized_anything
);
1764 return shared_lookup_references
;
1767 /* Create a vector of vn_reference_op_s structures from CALL, a
1768 call statement. The vector is shared among all callers of
1771 static vec
<vn_reference_op_s
>
1772 valueize_shared_reference_ops_from_call (gcall
*call
)
1776 shared_lookup_references
.truncate (0);
1777 copy_reference_ops_from_call (call
, &shared_lookup_references
);
1778 valueize_refs (&shared_lookup_references
);
1779 return shared_lookup_references
;
1782 /* Lookup a SCCVN reference operation VR in the current hash table.
1783 Returns the resulting value number if it exists in the hash table,
1784 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1785 vn_reference_t stored in the hashtable if something is found. */
1788 vn_reference_lookup_1 (vn_reference_t vr
, vn_reference_t
*vnresult
)
1790 vn_reference_s
**slot
;
1793 hash
= vr
->hashcode
;
1794 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
1798 *vnresult
= (vn_reference_t
)*slot
;
1799 return ((vn_reference_t
)*slot
)->result
;
1806 /* Partial definition tracking support. */
1810 HOST_WIDE_INT offset
;
1817 HOST_WIDE_INT rhs_off
;
1818 HOST_WIDE_INT offset
;
1822 /* Context for alias walking. */
1824 struct vn_walk_cb_data
1826 vn_walk_cb_data (vn_reference_t vr_
, tree orig_ref_
, tree
*last_vuse_ptr_
,
1827 vn_lookup_kind vn_walk_kind_
, bool tbaa_p_
, tree mask_
,
1828 bool redundant_store_removal_p_
)
1829 : vr (vr_
), last_vuse_ptr (last_vuse_ptr_
), last_vuse (NULL_TREE
),
1830 mask (mask_
), masked_result (NULL_TREE
), same_val (NULL_TREE
),
1831 vn_walk_kind (vn_walk_kind_
),
1832 tbaa_p (tbaa_p_
), redundant_store_removal_p (redundant_store_removal_p_
),
1833 saved_operands (vNULL
), first_set (-2), first_base_set (-2),
1837 last_vuse_ptr
= &last_vuse
;
1838 ao_ref_init (&orig_ref
, orig_ref_
);
1841 wide_int w
= wi::to_wide (mask
);
1842 unsigned int pos
= 0, prec
= w
.get_precision ();
1844 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
1846 /* When bitwise and with a constant is done on a memory load,
1847 we don't really need all the bits to be defined or defined
1848 to constants, we don't really care what is in the position
1849 corresponding to 0 bits in the mask.
1850 So, push the ranges of those 0 bits in the mask as artificial
1851 zero stores and let the partial def handling code do the
1855 int tz
= wi::ctz (w
);
1856 if (pos
+ tz
> prec
)
1860 if (BYTES_BIG_ENDIAN
)
1861 pd
.offset
= prec
- pos
- tz
;
1865 void *r
= push_partial_def (pd
, 0, 0, 0, prec
);
1866 gcc_assert (r
== NULL_TREE
);
1871 w
= wi::lrshift (w
, tz
);
1872 tz
= wi::ctz (wi::bit_not (w
));
1873 if (pos
+ tz
> prec
)
1876 w
= wi::lrshift (w
, tz
);
1880 ~vn_walk_cb_data ();
1881 void *finish (alias_set_type
, alias_set_type
, tree
);
1882 void *push_partial_def (pd_data pd
,
1883 alias_set_type
, alias_set_type
, HOST_WIDE_INT
,
1888 tree
*last_vuse_ptr
;
1893 vn_lookup_kind vn_walk_kind
;
1895 bool redundant_store_removal_p
;
1896 vec
<vn_reference_op_s
> saved_operands
;
1898 /* The VDEFs of partial defs we come along. */
1899 auto_vec
<pd_data
, 2> partial_defs
;
1900 /* The first defs range to avoid splay tree setup in most cases. */
1901 pd_range first_range
;
1902 alias_set_type first_set
;
1903 alias_set_type first_base_set
;
1904 splay_tree known_ranges
;
1905 obstack ranges_obstack
;
1906 static constexpr HOST_WIDE_INT bufsize
= 64;
1909 vn_walk_cb_data::~vn_walk_cb_data ()
1913 splay_tree_delete (known_ranges
);
1914 obstack_free (&ranges_obstack
, NULL
);
1916 saved_operands
.release ();
1920 vn_walk_cb_data::finish (alias_set_type set
, alias_set_type base_set
, tree val
)
1922 if (first_set
!= -2)
1925 base_set
= first_base_set
;
1929 masked_result
= val
;
1932 if (same_val
&& !operand_equal_p (val
, same_val
))
1934 vec
<vn_reference_op_s
> &operands
1935 = saved_operands
.exists () ? saved_operands
: vr
->operands
;
1936 return vn_reference_lookup_or_insert_for_pieces (last_vuse
, set
, base_set
,
1937 vr
->type
, operands
, val
);
1940 /* pd_range splay-tree helpers. */
1943 pd_range_compare (splay_tree_key offset1p
, splay_tree_key offset2p
)
1945 HOST_WIDE_INT offset1
= *(HOST_WIDE_INT
*)offset1p
;
1946 HOST_WIDE_INT offset2
= *(HOST_WIDE_INT
*)offset2p
;
1947 if (offset1
< offset2
)
1949 else if (offset1
> offset2
)
1955 pd_tree_alloc (int size
, void *data_
)
1957 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
1958 return obstack_alloc (&data
->ranges_obstack
, size
);
1962 pd_tree_dealloc (void *, void *)
1966 /* Push PD to the vector of partial definitions returning a
1967 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1968 NULL when we want to continue looking for partial defs or -1
1972 vn_walk_cb_data::push_partial_def (pd_data pd
,
1973 alias_set_type set
, alias_set_type base_set
,
1974 HOST_WIDE_INT offseti
,
1975 HOST_WIDE_INT maxsizei
)
1977 /* We're using a fixed buffer for encoding so fail early if the object
1978 we want to interpret is bigger. */
1979 if (maxsizei
> bufsize
* BITS_PER_UNIT
1981 || BITS_PER_UNIT
!= 8
1982 /* Not prepared to handle PDP endian. */
1983 || BYTES_BIG_ENDIAN
!= WORDS_BIG_ENDIAN
)
1986 /* Turn too large constant stores into non-constant stores. */
1987 if (CONSTANT_CLASS_P (pd
.rhs
) && pd
.size
> bufsize
* BITS_PER_UNIT
)
1988 pd
.rhs
= error_mark_node
;
1990 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1991 most a partial byte before and/or after the region. */
1992 if (!CONSTANT_CLASS_P (pd
.rhs
))
1994 if (pd
.offset
< offseti
)
1996 HOST_WIDE_INT o
= ROUND_DOWN (offseti
- pd
.offset
, BITS_PER_UNIT
);
1997 gcc_assert (pd
.size
> o
);
2001 if (pd
.size
> maxsizei
)
2002 pd
.size
= maxsizei
+ ((pd
.size
- maxsizei
) % BITS_PER_UNIT
);
2005 pd
.offset
-= offseti
;
2007 bool pd_constant_p
= (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
2008 || CONSTANT_CLASS_P (pd
.rhs
));
2010 if (partial_defs
.is_empty ())
2012 /* If we get a clobber upfront, fail. */
2013 if (TREE_CLOBBER_P (pd
.rhs
))
2017 partial_defs
.safe_push (pd
);
2018 first_range
.offset
= pd
.offset
;
2019 first_range
.size
= pd
.size
;
2021 first_base_set
= base_set
;
2022 last_vuse_ptr
= NULL
;
2024 /* Go check if the first partial definition was a full one in case
2025 the caller didn't optimize for this. */
2031 /* ??? Optimize the case where the 2nd partial def completes
2033 gcc_obstack_init (&ranges_obstack
);
2034 known_ranges
= splay_tree_new_with_allocator (pd_range_compare
, 0, 0,
2036 pd_tree_dealloc
, this);
2037 splay_tree_insert (known_ranges
,
2038 (splay_tree_key
)&first_range
.offset
,
2039 (splay_tree_value
)&first_range
);
2042 pd_range newr
= { pd
.offset
, pd
.size
};
2044 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2045 HOST_WIDE_INT loffset
= newr
.offset
+ 1;
2046 if ((n
= splay_tree_predecessor (known_ranges
, (splay_tree_key
)&loffset
))
2047 && ((r
= (pd_range
*)n
->value
), true)
2048 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2049 newr
.offset
, newr
.size
))
2051 /* Ignore partial defs already covered. Here we also drop shadowed
2052 clobbers arriving here at the floor. */
2053 if (known_subrange_p (newr
.offset
, newr
.size
, r
->offset
, r
->size
))
2056 = MAX (r
->offset
+ r
->size
, newr
.offset
+ newr
.size
) - r
->offset
;
2060 /* newr.offset wasn't covered yet, insert the range. */
2061 r
= XOBNEW (&ranges_obstack
, pd_range
);
2063 splay_tree_insert (known_ranges
, (splay_tree_key
)&r
->offset
,
2064 (splay_tree_value
)r
);
2066 /* Merge r which now contains newr and is a member of the splay tree with
2067 adjacent overlapping ranges. */
2069 while ((n
= splay_tree_successor (known_ranges
,
2070 (splay_tree_key
)&r
->offset
))
2071 && ((rafter
= (pd_range
*)n
->value
), true)
2072 && ranges_known_overlap_p (r
->offset
, r
->size
+ 1,
2073 rafter
->offset
, rafter
->size
))
2075 r
->size
= MAX (r
->offset
+ r
->size
,
2076 rafter
->offset
+ rafter
->size
) - r
->offset
;
2077 splay_tree_remove (known_ranges
, (splay_tree_key
)&rafter
->offset
);
2079 /* If we get a clobber, fail. */
2080 if (TREE_CLOBBER_P (pd
.rhs
))
2082 /* Non-constants are OK as long as they are shadowed by a constant. */
2085 partial_defs
.safe_push (pd
);
2088 /* Now we have merged newr into the range tree. When we have covered
2089 [offseti, sizei] then the tree will contain exactly one node which has
2090 the desired properties and it will be 'r'. */
2091 if (!known_subrange_p (0, maxsizei
, r
->offset
, r
->size
))
2092 /* Continue looking for partial defs. */
2095 /* Now simply native encode all partial defs in reverse order. */
2096 unsigned ndefs
= partial_defs
.length ();
2097 /* We support up to 512-bit values (for V8DFmode). */
2098 unsigned char buffer
[bufsize
+ 1];
2099 unsigned char this_buffer
[bufsize
+ 1];
2102 memset (buffer
, 0, bufsize
+ 1);
2103 unsigned needed_len
= ROUND_UP (maxsizei
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2104 while (!partial_defs
.is_empty ())
2106 pd_data pd
= partial_defs
.pop ();
2108 if (TREE_CODE (pd
.rhs
) == CONSTRUCTOR
)
2110 /* Empty CONSTRUCTOR. */
2111 if (pd
.size
>= needed_len
* BITS_PER_UNIT
)
2114 len
= ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
;
2115 memset (this_buffer
, 0, len
);
2117 else if (pd
.rhs_off
>= 0)
2119 len
= native_encode_expr (pd
.rhs
, this_buffer
, bufsize
,
2120 (MAX (0, -pd
.offset
)
2121 + pd
.rhs_off
) / BITS_PER_UNIT
);
2123 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
2124 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
2126 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2127 fprintf (dump_file
, "Failed to encode %u "
2128 "partial definitions\n", ndefs
);
2132 else /* negative pd.rhs_off indicates we want to chop off first bits */
2134 if (-pd
.rhs_off
>= bufsize
)
2136 len
= native_encode_expr (pd
.rhs
,
2137 this_buffer
+ -pd
.rhs_off
/ BITS_PER_UNIT
,
2138 bufsize
- -pd
.rhs_off
/ BITS_PER_UNIT
,
2139 MAX (0, -pd
.offset
) / BITS_PER_UNIT
);
2141 || len
< (ROUND_UP (pd
.size
, BITS_PER_UNIT
) / BITS_PER_UNIT
2142 - MAX (0, -pd
.offset
) / BITS_PER_UNIT
))
2144 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2145 fprintf (dump_file
, "Failed to encode %u "
2146 "partial definitions\n", ndefs
);
2151 unsigned char *p
= buffer
;
2152 HOST_WIDE_INT size
= pd
.size
;
2154 size
-= ROUND_DOWN (-pd
.offset
, BITS_PER_UNIT
);
2155 this_buffer
[len
] = 0;
2156 if (BYTES_BIG_ENDIAN
)
2158 /* LSB of this_buffer[len - 1] byte should be at
2159 pd.offset + pd.size - 1 bits in buffer. */
2160 amnt
= ((unsigned HOST_WIDE_INT
) pd
.offset
2161 + pd
.size
) % BITS_PER_UNIT
;
2163 shift_bytes_in_array_right (this_buffer
, len
+ 1, amnt
);
2164 unsigned char *q
= this_buffer
;
2165 unsigned int off
= 0;
2169 off
= pd
.offset
/ BITS_PER_UNIT
;
2170 gcc_assert (off
< needed_len
);
2174 msk
= ((1 << size
) - 1) << (BITS_PER_UNIT
- amnt
);
2175 *p
= (*p
& ~msk
) | (this_buffer
[len
] & msk
);
2180 if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2181 q
= (this_buffer
+ len
2182 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2184 if (pd
.offset
% BITS_PER_UNIT
)
2186 msk
= -1U << (BITS_PER_UNIT
2187 - (pd
.offset
% BITS_PER_UNIT
));
2188 *p
= (*p
& msk
) | (*q
& ~msk
);
2192 size
-= BITS_PER_UNIT
- (pd
.offset
% BITS_PER_UNIT
);
2193 gcc_assert (size
>= 0);
2197 else if (TREE_CODE (pd
.rhs
) != CONSTRUCTOR
)
2199 q
= (this_buffer
+ len
2200 - (ROUND_UP (size
- amnt
, BITS_PER_UNIT
)
2202 if (pd
.offset
% BITS_PER_UNIT
)
2205 size
-= BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) pd
.offset
2207 gcc_assert (size
>= 0);
2210 if ((unsigned HOST_WIDE_INT
) size
/ BITS_PER_UNIT
+ off
2212 size
= (needed_len
- off
) * BITS_PER_UNIT
;
2213 memcpy (p
, q
, size
/ BITS_PER_UNIT
);
2214 if (size
% BITS_PER_UNIT
)
2217 = -1U << (BITS_PER_UNIT
- (size
% BITS_PER_UNIT
));
2218 p
+= size
/ BITS_PER_UNIT
;
2219 q
+= size
/ BITS_PER_UNIT
;
2220 *p
= (*q
& msk
) | (*p
& ~msk
);
2227 /* LSB of this_buffer[0] byte should be at pd.offset bits
2230 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2231 amnt
= pd
.offset
% BITS_PER_UNIT
;
2233 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2234 unsigned int off
= pd
.offset
/ BITS_PER_UNIT
;
2235 gcc_assert (off
< needed_len
);
2237 (HOST_WIDE_INT
) (needed_len
- off
) * BITS_PER_UNIT
);
2239 if (amnt
+ size
< BITS_PER_UNIT
)
2241 /* Low amnt bits come from *p, then size bits
2242 from this_buffer[0] and the remaining again from
2244 msk
= ((1 << size
) - 1) << amnt
;
2245 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2251 *p
= (*p
& ~msk
) | (this_buffer
[0] & msk
);
2253 size
-= (BITS_PER_UNIT
- amnt
);
2258 amnt
= (unsigned HOST_WIDE_INT
) pd
.offset
% BITS_PER_UNIT
;
2260 size
-= BITS_PER_UNIT
- amnt
;
2261 size
= MIN (size
, (HOST_WIDE_INT
) needed_len
* BITS_PER_UNIT
);
2263 shift_bytes_in_array_left (this_buffer
, len
+ 1, amnt
);
2265 memcpy (p
, this_buffer
+ (amnt
!= 0), size
/ BITS_PER_UNIT
);
2266 p
+= size
/ BITS_PER_UNIT
;
2267 if (size
% BITS_PER_UNIT
)
2269 unsigned int msk
= -1U << (size
% BITS_PER_UNIT
);
2270 *p
= (this_buffer
[(amnt
!= 0) + size
/ BITS_PER_UNIT
]
2271 & ~msk
) | (*p
& msk
);
2276 tree type
= vr
->type
;
2277 /* Make sure to interpret in a type that has a range covering the whole
2279 if (INTEGRAL_TYPE_P (vr
->type
) && maxsizei
!= TYPE_PRECISION (vr
->type
))
2280 type
= build_nonstandard_integer_type (maxsizei
, TYPE_UNSIGNED (type
));
2282 if (BYTES_BIG_ENDIAN
)
2284 unsigned sz
= needed_len
;
2285 if (maxsizei
% BITS_PER_UNIT
)
2286 shift_bytes_in_array_right (buffer
, needed_len
,
2288 - (maxsizei
% BITS_PER_UNIT
));
2289 if (INTEGRAL_TYPE_P (type
))
2291 if (TYPE_MODE (type
) != BLKmode
)
2292 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
2294 sz
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type
));
2296 if (sz
> needed_len
)
2298 memcpy (this_buffer
+ (sz
- needed_len
), buffer
, needed_len
);
2299 val
= native_interpret_expr (type
, this_buffer
, sz
);
2302 val
= native_interpret_expr (type
, buffer
, needed_len
);
2305 val
= native_interpret_expr (type
, buffer
, bufsize
);
2306 /* If we chop off bits because the types precision doesn't match the memory
2307 access size this is ok when optimizing reads but not when called from
2308 the DSE code during elimination. */
2309 if (val
&& type
!= vr
->type
)
2311 if (! int_fits_type_p (val
, vr
->type
))
2314 val
= fold_convert (vr
->type
, val
);
2319 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2321 "Successfully combined %u partial definitions\n", ndefs
);
2322 /* We are using the alias-set of the first store we encounter which
2323 should be appropriate here. */
2324 return finish (first_set
, first_base_set
, val
);
2328 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2330 "Failed to interpret %u encoded partial definitions\n", ndefs
);
2335 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2336 with the current VUSE and performs the expression lookup. */
2339 vn_reference_lookup_2 (ao_ref
*op
, tree vuse
, void *data_
)
2341 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2342 vn_reference_t vr
= data
->vr
;
2343 vn_reference_s
**slot
;
2346 /* If we have partial definitions recorded we have to go through
2347 vn_reference_lookup_3. */
2348 if (!data
->partial_defs
.is_empty ())
2351 if (data
->last_vuse_ptr
)
2353 *data
->last_vuse_ptr
= vuse
;
2354 data
->last_vuse
= vuse
;
2357 /* Fixup vuse and hash. */
2359 vr
->hashcode
= vr
->hashcode
- SSA_NAME_VERSION (vr
->vuse
);
2360 vr
->vuse
= vuse_ssa_val (vuse
);
2362 vr
->hashcode
= vr
->hashcode
+ SSA_NAME_VERSION (vr
->vuse
);
2364 hash
= vr
->hashcode
;
2365 slot
= valid_info
->references
->find_slot_with_hash (vr
, hash
, NO_INSERT
);
2368 if ((*slot
)->result
&& data
->saved_operands
.exists ())
2369 return data
->finish (vr
->set
, vr
->base_set
, (*slot
)->result
);
2373 if (SSA_NAME_IS_DEFAULT_DEF (vuse
))
2375 HOST_WIDE_INT op_offset
, op_size
;
2377 tree base
= ao_ref_base (op
);
2380 && op
->offset
.is_constant (&op_offset
)
2381 && op
->size
.is_constant (&op_size
)
2382 && op
->max_size_known_p ()
2383 && known_eq (op
->size
, op
->max_size
))
2385 if (TREE_CODE (base
) == PARM_DECL
)
2386 v
= ipcp_get_aggregate_const (cfun
, base
, false, op_offset
,
2388 else if (TREE_CODE (base
) == MEM_REF
2389 && integer_zerop (TREE_OPERAND (base
, 1))
2390 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
2391 && SSA_NAME_IS_DEFAULT_DEF (TREE_OPERAND (base
, 0))
2392 && (TREE_CODE (SSA_NAME_VAR (TREE_OPERAND (base
, 0)))
2394 v
= ipcp_get_aggregate_const (cfun
,
2395 SSA_NAME_VAR (TREE_OPERAND (base
, 0)),
2396 true, op_offset
, op_size
);
2399 return data
->finish (vr
->set
, vr
->base_set
, v
);
2405 /* Lookup an existing or insert a new vn_reference entry into the
2406 value table for the VUSE, SET, TYPE, OPERANDS reference which
2407 has the value VALUE which is either a constant or an SSA name. */
2409 static vn_reference_t
2410 vn_reference_lookup_or_insert_for_pieces (tree vuse
,
2412 alias_set_type base_set
,
2414 vec
<vn_reference_op_s
,
2419 vn_reference_t result
;
2421 vr1
.vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
2422 vr1
.operands
= operands
;
2425 vr1
.base_set
= base_set
;
2426 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
2427 if (vn_reference_lookup_1 (&vr1
, &result
))
2429 if (TREE_CODE (value
) == SSA_NAME
)
2430 value_id
= VN_INFO (value
)->value_id
;
2432 value_id
= get_or_alloc_constant_value_id (value
);
2433 return vn_reference_insert_pieces (vuse
, set
, base_set
, type
,
2434 operands
.copy (), value
, value_id
);
2437 /* Return a value-number for RCODE OPS... either by looking up an existing
2438 value-number for the possibly simplified result or by inserting the
2439 operation if INSERT is true. If SIMPLIFY is false, return a value
2440 number for the unsimplified expression. */
2443 vn_nary_build_or_lookup_1 (gimple_match_op
*res_op
, bool insert
,
2446 tree result
= NULL_TREE
;
2447 /* We will be creating a value number for
2449 So first simplify and lookup this expression to see if it
2450 is already available. */
2451 /* For simplification valueize. */
2454 for (i
= 0; i
< res_op
->num_ops
; ++i
)
2455 if (TREE_CODE (res_op
->ops
[i
]) == SSA_NAME
)
2457 tree tem
= vn_valueize (res_op
->ops
[i
]);
2460 res_op
->ops
[i
] = tem
;
2462 /* If valueization of an operand fails (it is not available), skip
2465 if (i
== res_op
->num_ops
)
2467 mprts_hook
= vn_lookup_simplify_result
;
2468 res
= res_op
->resimplify (NULL
, vn_valueize
);
2471 gimple
*new_stmt
= NULL
;
2473 && gimple_simplified_result_is_gimple_val (res_op
))
2475 /* The expression is already available. */
2476 result
= res_op
->ops
[0];
2477 /* Valueize it, simplification returns sth in AVAIL only. */
2478 if (TREE_CODE (result
) == SSA_NAME
)
2479 result
= SSA_VAL (result
);
2483 tree val
= vn_lookup_simplify_result (res_op
);
2486 gimple_seq stmts
= NULL
;
2487 result
= maybe_push_res_to_seq (res_op
, &stmts
);
2490 gcc_assert (gimple_seq_singleton_p (stmts
));
2491 new_stmt
= gimple_seq_first_stmt (stmts
);
2495 /* The expression is already available. */
2500 /* The expression is not yet available, value-number lhs to
2501 the new SSA_NAME we created. */
2502 /* Initialize value-number information properly. */
2503 vn_ssa_aux_t result_info
= VN_INFO (result
);
2504 result_info
->valnum
= result
;
2505 result_info
->value_id
= get_next_value_id ();
2506 result_info
->visited
= 1;
2507 gimple_seq_add_stmt_without_update (&VN_INFO (result
)->expr
,
2509 result_info
->needs_insertion
= true;
2510 /* ??? PRE phi-translation inserts NARYs without corresponding
2511 SSA name result. Re-use those but set their result according
2512 to the stmt we just built. */
2513 vn_nary_op_t nary
= NULL
;
2514 vn_nary_op_lookup_stmt (new_stmt
, &nary
);
2517 gcc_assert (! nary
->predicated_values
&& nary
->u
.result
== NULL_TREE
);
2518 nary
->u
.result
= gimple_assign_lhs (new_stmt
);
2520 /* As all "inserted" statements are singleton SCCs, insert
2521 to the valid table. This is strictly needed to
2522 avoid re-generating new value SSA_NAMEs for the same
2523 expression during SCC iteration over and over (the
2524 optimistic table gets cleared after each iteration).
2525 We do not need to insert into the optimistic table, as
2526 lookups there will fall back to the valid table. */
2529 unsigned int length
= vn_nary_length_from_stmt (new_stmt
);
2531 = alloc_vn_nary_op_noinit (length
, &vn_tables_insert_obstack
);
2532 vno1
->value_id
= result_info
->value_id
;
2533 vno1
->length
= length
;
2534 vno1
->predicated_values
= 0;
2535 vno1
->u
.result
= result
;
2536 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (new_stmt
));
2537 vn_nary_op_insert_into (vno1
, valid_info
->nary
);
2538 /* Also do not link it into the undo chain. */
2539 last_inserted_nary
= vno1
->next
;
2540 vno1
->next
= (vn_nary_op_t
)(void *)-1;
2542 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2544 fprintf (dump_file
, "Inserting name ");
2545 print_generic_expr (dump_file
, result
);
2546 fprintf (dump_file
, " for expression ");
2547 print_gimple_expr (dump_file
, new_stmt
, 0, TDF_SLIM
);
2548 fprintf (dump_file
, "\n");
2554 /* Return a value-number for RCODE OPS... either by looking up an existing
2555 value-number for the simplified result or by inserting the operation. */
2558 vn_nary_build_or_lookup (gimple_match_op
*res_op
)
2560 return vn_nary_build_or_lookup_1 (res_op
, true, true);
2563 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2564 its value if present. */
2567 vn_nary_simplify (vn_nary_op_t nary
)
2569 if (nary
->length
> gimple_match_op::MAX_NUM_OPS
)
2571 gimple_match_op
op (gimple_match_cond::UNCOND
, nary
->opcode
,
2572 nary
->type
, nary
->length
);
2573 memcpy (op
.ops
, nary
->op
, sizeof (tree
) * nary
->length
);
2574 return vn_nary_build_or_lookup_1 (&op
, false, true);
2577 /* Elimination engine. */
2579 class eliminate_dom_walker
: public dom_walker
2582 eliminate_dom_walker (cdi_direction
, bitmap
);
2583 ~eliminate_dom_walker ();
2585 edge
before_dom_children (basic_block
) final override
;
2586 void after_dom_children (basic_block
) final override
;
2588 virtual tree
eliminate_avail (basic_block
, tree op
);
2589 virtual void eliminate_push_avail (basic_block
, tree op
);
2590 tree
eliminate_insert (basic_block
, gimple_stmt_iterator
*gsi
, tree val
);
2592 void eliminate_stmt (basic_block
, gimple_stmt_iterator
*);
2594 unsigned eliminate_cleanup (bool region_p
= false);
2597 unsigned int el_todo
;
2598 unsigned int eliminations
;
2599 unsigned int insertions
;
2601 /* SSA names that had their defs inserted by PRE if do_pre. */
2602 bitmap inserted_exprs
;
2604 /* Blocks with statements that have had their EH properties changed. */
2605 bitmap need_eh_cleanup
;
2607 /* Blocks with statements that have had their AB properties changed. */
2608 bitmap need_ab_cleanup
;
2610 /* Local state for the eliminate domwalk. */
2611 auto_vec
<gimple
*> to_remove
;
2612 auto_vec
<gimple
*> to_fixup
;
2613 auto_vec
<tree
> avail
;
2614 auto_vec
<tree
> avail_stack
;
2617 /* Adaptor to the elimination engine using RPO availability. */
2619 class rpo_elim
: public eliminate_dom_walker
2622 rpo_elim(basic_block entry_
)
2623 : eliminate_dom_walker (CDI_DOMINATORS
, NULL
), entry (entry_
),
2624 m_avail_freelist (NULL
) {}
2626 tree
eliminate_avail (basic_block
, tree op
) final override
;
2628 void eliminate_push_avail (basic_block
, tree
) final override
;
2631 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2633 vn_avail
*m_avail_freelist
;
2636 /* Global RPO state for access from hooks. */
2637 static eliminate_dom_walker
*rpo_avail
;
2638 basic_block vn_context_bb
;
2640 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2641 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2642 Otherwise return false. */
2645 adjust_offsets_for_equal_base_address (tree base1
, poly_int64
*offset1
,
2646 tree base2
, poly_int64
*offset2
)
2649 if (TREE_CODE (base1
) == MEM_REF
2650 && TREE_CODE (base2
) == MEM_REF
)
2652 if (mem_ref_offset (base1
).to_shwi (&soff
))
2654 base1
= TREE_OPERAND (base1
, 0);
2655 *offset1
+= soff
* BITS_PER_UNIT
;
2657 if (mem_ref_offset (base2
).to_shwi (&soff
))
2659 base2
= TREE_OPERAND (base2
, 0);
2660 *offset2
+= soff
* BITS_PER_UNIT
;
2662 return operand_equal_p (base1
, base2
, 0);
2664 return operand_equal_p (base1
, base2
, OEP_ADDRESS_OF
);
2667 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2668 from the statement defining VUSE and if not successful tries to
2669 translate *REFP and VR_ through an aggregate copy at the definition
2670 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2671 of *REF and *VR. If only disambiguation was performed then
2672 *DISAMBIGUATE_ONLY is set to true. */
2675 vn_reference_lookup_3 (ao_ref
*ref
, tree vuse
, void *data_
,
2676 translate_flags
*disambiguate_only
)
2678 vn_walk_cb_data
*data
= (vn_walk_cb_data
*)data_
;
2679 vn_reference_t vr
= data
->vr
;
2680 gimple
*def_stmt
= SSA_NAME_DEF_STMT (vuse
);
2681 tree base
= ao_ref_base (ref
);
2682 HOST_WIDE_INT offseti
= 0, maxsizei
, sizei
= 0;
2683 static vec
<vn_reference_op_s
> lhs_ops
;
2685 bool lhs_ref_ok
= false;
2686 poly_int64 copy_size
;
2688 /* First try to disambiguate after value-replacing in the definitions LHS. */
2689 if (is_gimple_assign (def_stmt
))
2691 tree lhs
= gimple_assign_lhs (def_stmt
);
2692 bool valueized_anything
= false;
2693 /* Avoid re-allocation overhead. */
2694 lhs_ops
.truncate (0);
2695 basic_block saved_rpo_bb
= vn_context_bb
;
2696 vn_context_bb
= gimple_bb (def_stmt
);
2697 if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
)
2699 copy_reference_ops_from_ref (lhs
, &lhs_ops
);
2700 valueize_refs_1 (&lhs_ops
, &valueized_anything
, true);
2702 vn_context_bb
= saved_rpo_bb
;
2703 ao_ref_init (&lhs_ref
, lhs
);
2705 if (valueized_anything
2706 && ao_ref_init_from_vn_reference
2707 (&lhs_ref
, ao_ref_alias_set (&lhs_ref
),
2708 ao_ref_base_alias_set (&lhs_ref
), TREE_TYPE (lhs
), lhs_ops
)
2709 && !refs_may_alias_p_1 (ref
, &lhs_ref
, data
->tbaa_p
))
2711 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2715 /* When the def is a CLOBBER we can optimistically disambiguate
2716 against it since any overlap it would be undefined behavior.
2717 Avoid this for obvious must aliases to save compile-time though.
2718 We also may not do this when the query is used for redundant
2720 if (!data
->redundant_store_removal_p
2721 && gimple_clobber_p (def_stmt
)
2722 && !operand_equal_p (ao_ref_base (&lhs_ref
), base
, OEP_ADDRESS_OF
))
2724 *disambiguate_only
= TR_DISAMBIGUATE
;
2728 /* Besides valueizing the LHS we can also use access-path based
2729 disambiguation on the original non-valueized ref. */
2732 && data
->orig_ref
.ref
)
2734 /* We want to use the non-valueized LHS for this, but avoid redundant
2736 ao_ref
*lref
= &lhs_ref
;
2738 if (valueized_anything
)
2740 ao_ref_init (&lref_alt
, lhs
);
2743 if (!refs_may_alias_p_1 (&data
->orig_ref
, lref
, data
->tbaa_p
))
2745 *disambiguate_only
= (valueized_anything
2746 ? TR_VALUEIZE_AND_DISAMBIGUATE
2752 /* If we reach a clobbering statement try to skip it and see if
2753 we find a VN result with exactly the same value as the
2754 possible clobber. In this case we can ignore the clobber
2755 and return the found value. */
2756 if (is_gimple_reg_type (TREE_TYPE (lhs
))
2757 && types_compatible_p (TREE_TYPE (lhs
), vr
->type
)
2758 && (ref
->ref
|| data
->orig_ref
.ref
)
2760 && data
->partial_defs
.is_empty ()
2761 && multiple_p (get_object_alignment
2762 (ref
->ref
? ref
->ref
: data
->orig_ref
.ref
),
2764 && multiple_p (get_object_alignment (lhs
), ref
->size
))
2766 tree rhs
= gimple_assign_rhs1 (def_stmt
);
2767 /* ??? We may not compare to ahead values which might be from
2768 a different loop iteration but only to loop invariants. Use
2769 CONSTANT_CLASS_P (unvalueized!) as conservative approximation.
2770 The one-hop lookup below doesn't have this issue since there's
2771 a virtual PHI before we ever reach a backedge to cross.
2772 We can skip multiple defs as long as they are from the same
2775 && !operand_equal_p (data
->same_val
, rhs
))
2777 else if (CONSTANT_CLASS_P (rhs
))
2779 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2782 "Skipping possible redundant definition ");
2783 print_gimple_stmt (dump_file
, def_stmt
, 0);
2785 /* Delay the actual compare of the values to the end of the walk
2786 but do not update last_vuse from here. */
2787 data
->last_vuse_ptr
= NULL
;
2788 data
->same_val
= rhs
;
2793 tree
*saved_last_vuse_ptr
= data
->last_vuse_ptr
;
2794 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2795 data
->last_vuse_ptr
= NULL
;
2796 tree saved_vuse
= vr
->vuse
;
2797 hashval_t saved_hashcode
= vr
->hashcode
;
2798 void *res
= vn_reference_lookup_2 (ref
, gimple_vuse (def_stmt
),
2800 /* Need to restore vr->vuse and vr->hashcode. */
2801 vr
->vuse
= saved_vuse
;
2802 vr
->hashcode
= saved_hashcode
;
2803 data
->last_vuse_ptr
= saved_last_vuse_ptr
;
2804 if (res
&& res
!= (void *)-1)
2806 vn_reference_t vnresult
= (vn_reference_t
) res
;
2807 if (TREE_CODE (rhs
) == SSA_NAME
)
2808 rhs
= SSA_VAL (rhs
);
2809 if (vnresult
->result
2810 && operand_equal_p (vnresult
->result
, rhs
, 0))
2816 else if (*disambiguate_only
<= TR_VALUEIZE_AND_DISAMBIGUATE
2817 && gimple_call_builtin_p (def_stmt
, BUILT_IN_NORMAL
)
2818 && gimple_call_num_args (def_stmt
) <= 4)
2820 /* For builtin calls valueize its arguments and call the
2821 alias oracle again. Valueization may improve points-to
2822 info of pointers and constify size and position arguments.
2823 Originally this was motivated by PR61034 which has
2824 conditional calls to free falsely clobbering ref because
2825 of imprecise points-to info of the argument. */
2827 bool valueized_anything
= false;
2828 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2830 oldargs
[i
] = gimple_call_arg (def_stmt
, i
);
2831 tree val
= vn_valueize (oldargs
[i
]);
2832 if (val
!= oldargs
[i
])
2834 gimple_call_set_arg (def_stmt
, i
, val
);
2835 valueized_anything
= true;
2838 if (valueized_anything
)
2840 bool res
= call_may_clobber_ref_p_1 (as_a
<gcall
*> (def_stmt
),
2842 for (unsigned i
= 0; i
< gimple_call_num_args (def_stmt
); ++i
)
2843 gimple_call_set_arg (def_stmt
, i
, oldargs
[i
]);
2846 *disambiguate_only
= TR_VALUEIZE_AND_DISAMBIGUATE
;
2852 if (*disambiguate_only
> TR_TRANSLATE
)
2855 /* If we cannot constrain the size of the reference we cannot
2856 test if anything kills it. */
2857 if (!ref
->max_size_known_p ())
2860 poly_int64 offset
= ref
->offset
;
2861 poly_int64 maxsize
= ref
->max_size
;
2863 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2864 from that definition.
2866 if (is_gimple_reg_type (vr
->type
)
2867 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET
)
2868 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMSET_CHK
))
2869 && (integer_zerop (gimple_call_arg (def_stmt
, 1))
2870 || ((TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
2871 || (INTEGRAL_TYPE_P (vr
->type
) && known_eq (ref
->size
, 8)))
2873 && BITS_PER_UNIT
== 8
2874 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
2875 && offset
.is_constant (&offseti
)
2876 && ref
->size
.is_constant (&sizei
)
2877 && (offseti
% BITS_PER_UNIT
== 0
2878 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == INTEGER_CST
)))
2879 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2))
2880 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
2881 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)))))
2882 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
2883 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
))
2886 poly_int64 offset2
, size2
, maxsize2
;
2888 tree ref2
= gimple_call_arg (def_stmt
, 0);
2889 if (TREE_CODE (ref2
) == SSA_NAME
)
2891 ref2
= SSA_VAL (ref2
);
2892 if (TREE_CODE (ref2
) == SSA_NAME
2893 && (TREE_CODE (base
) != MEM_REF
2894 || TREE_OPERAND (base
, 0) != ref2
))
2896 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref2
);
2897 if (gimple_assign_single_p (def_stmt
)
2898 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
2899 ref2
= gimple_assign_rhs1 (def_stmt
);
2902 if (TREE_CODE (ref2
) == ADDR_EXPR
)
2904 ref2
= TREE_OPERAND (ref2
, 0);
2905 base2
= get_ref_base_and_extent (ref2
, &offset2
, &size2
, &maxsize2
,
2907 if (!known_size_p (maxsize2
)
2908 || !known_eq (maxsize2
, size2
)
2909 || !operand_equal_p (base
, base2
, OEP_ADDRESS_OF
))
2912 else if (TREE_CODE (ref2
) == SSA_NAME
)
2915 if (TREE_CODE (base
) != MEM_REF
2916 || !(mem_ref_offset (base
)
2917 << LOG2_BITS_PER_UNIT
).to_shwi (&soff
))
2921 if (TREE_OPERAND (base
, 0) != ref2
)
2923 gimple
*def
= SSA_NAME_DEF_STMT (ref2
);
2924 if (is_gimple_assign (def
)
2925 && gimple_assign_rhs_code (def
) == POINTER_PLUS_EXPR
2926 && gimple_assign_rhs1 (def
) == TREE_OPERAND (base
, 0)
2927 && poly_int_tree_p (gimple_assign_rhs2 (def
)))
2929 tree rhs2
= gimple_assign_rhs2 (def
);
2930 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2
),
2932 << LOG2_BITS_PER_UNIT
).to_shwi (&offset2
))
2934 ref2
= gimple_assign_rhs1 (def
);
2935 if (TREE_CODE (ref2
) == SSA_NAME
)
2936 ref2
= SSA_VAL (ref2
);
2944 tree len
= gimple_call_arg (def_stmt
, 2);
2945 HOST_WIDE_INT leni
, offset2i
;
2946 if (TREE_CODE (len
) == SSA_NAME
)
2947 len
= SSA_VAL (len
);
2948 /* Sometimes the above trickery is smarter than alias analysis. Take
2949 advantage of that. */
2950 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
,
2951 (wi::to_poly_offset (len
)
2952 << LOG2_BITS_PER_UNIT
)))
2954 if (data
->partial_defs
.is_empty ()
2955 && known_subrange_p (offset
, maxsize
, offset2
,
2956 wi::to_poly_offset (len
) << LOG2_BITS_PER_UNIT
))
2959 if (integer_zerop (gimple_call_arg (def_stmt
, 1)))
2960 val
= build_zero_cst (vr
->type
);
2961 else if (INTEGRAL_TYPE_P (vr
->type
)
2962 && known_eq (ref
->size
, 8)
2963 && offseti
% BITS_PER_UNIT
== 0)
2965 gimple_match_op
res_op (gimple_match_cond::UNCOND
, NOP_EXPR
,
2966 vr
->type
, gimple_call_arg (def_stmt
, 1));
2967 val
= vn_nary_build_or_lookup (&res_op
);
2969 || (TREE_CODE (val
) == SSA_NAME
2970 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
2976 = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr
->type
)) + 1;
2977 if (INTEGRAL_TYPE_P (vr
->type
)
2978 && TYPE_MODE (vr
->type
) != BLKmode
)
2979 buflen
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr
->type
)) + 1;
2980 unsigned char *buf
= XALLOCAVEC (unsigned char, buflen
);
2981 memset (buf
, TREE_INT_CST_LOW (gimple_call_arg (def_stmt
, 1)),
2983 if (BYTES_BIG_ENDIAN
)
2986 = (((unsigned HOST_WIDE_INT
) offseti
+ sizei
)
2990 shift_bytes_in_array_right (buf
, buflen
,
2991 BITS_PER_UNIT
- amnt
);
2996 else if (offseti
% BITS_PER_UNIT
!= 0)
2999 = BITS_PER_UNIT
- ((unsigned HOST_WIDE_INT
) offseti
3001 shift_bytes_in_array_left (buf
, buflen
, amnt
);
3005 val
= native_interpret_expr (vr
->type
, buf
, buflen
);
3009 return data
->finish (0, 0, val
);
3011 /* For now handle clearing memory with partial defs. */
3012 else if (known_eq (ref
->size
, maxsize
)
3013 && integer_zerop (gimple_call_arg (def_stmt
, 1))
3014 && tree_fits_poly_int64_p (len
)
3015 && tree_to_poly_int64 (len
).is_constant (&leni
)
3016 && leni
<= INTTYPE_MAXIMUM (HOST_WIDE_INT
) / BITS_PER_UNIT
3017 && offset
.is_constant (&offseti
)
3018 && offset2
.is_constant (&offset2i
)
3019 && maxsize
.is_constant (&maxsizei
)
3020 && ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
3021 leni
<< LOG2_BITS_PER_UNIT
))
3024 pd
.rhs
= build_constructor (NULL_TREE
, NULL
);
3026 pd
.offset
= offset2i
;
3027 pd
.size
= leni
<< LOG2_BITS_PER_UNIT
;
3028 return data
->push_partial_def (pd
, 0, 0, offseti
, maxsizei
);
3032 /* 2) Assignment from an empty CONSTRUCTOR. */
3033 else if (is_gimple_reg_type (vr
->type
)
3034 && gimple_assign_single_p (def_stmt
)
3035 && gimple_assign_rhs_code (def_stmt
) == CONSTRUCTOR
3036 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt
)) == 0)
3039 poly_int64 offset2
, size2
, maxsize2
;
3040 HOST_WIDE_INT offset2i
, size2i
;
3041 gcc_assert (lhs_ref_ok
);
3042 base2
= ao_ref_base (&lhs_ref
);
3043 offset2
= lhs_ref
.offset
;
3044 size2
= lhs_ref
.size
;
3045 maxsize2
= lhs_ref
.max_size
;
3046 if (known_size_p (maxsize2
)
3047 && known_eq (maxsize2
, size2
)
3048 && adjust_offsets_for_equal_base_address (base
, &offset
,
3051 if (data
->partial_defs
.is_empty ()
3052 && known_subrange_p (offset
, maxsize
, offset2
, size2
))
3054 /* While technically undefined behavior do not optimize
3055 a full read from a clobber. */
3056 if (gimple_clobber_p (def_stmt
))
3058 tree val
= build_zero_cst (vr
->type
);
3059 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3060 ao_ref_base_alias_set (&lhs_ref
), val
);
3062 else if (known_eq (ref
->size
, maxsize
)
3063 && maxsize
.is_constant (&maxsizei
)
3064 && offset
.is_constant (&offseti
)
3065 && offset2
.is_constant (&offset2i
)
3066 && size2
.is_constant (&size2i
)
3067 && ranges_known_overlap_p (offseti
, maxsizei
,
3070 /* Let clobbers be consumed by the partial-def tracker
3071 which can choose to ignore them if they are shadowed
3074 pd
.rhs
= gimple_assign_rhs1 (def_stmt
);
3076 pd
.offset
= offset2i
;
3078 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3079 ao_ref_base_alias_set (&lhs_ref
),
3085 /* 3) Assignment from a constant. We can use folds native encode/interpret
3086 routines to extract the assigned bits. */
3087 else if (known_eq (ref
->size
, maxsize
)
3088 && is_gimple_reg_type (vr
->type
)
3089 && !reverse_storage_order_for_component_p (vr
->operands
)
3090 && !contains_storage_order_barrier_p (vr
->operands
)
3091 && gimple_assign_single_p (def_stmt
)
3093 && BITS_PER_UNIT
== 8
3094 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
3095 /* native_encode and native_decode operate on arrays of bytes
3096 and so fundamentally need a compile-time size and offset. */
3097 && maxsize
.is_constant (&maxsizei
)
3098 && offset
.is_constant (&offseti
)
3099 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt
))
3100 || (TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
3101 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt
))))))
3103 tree lhs
= gimple_assign_lhs (def_stmt
);
3105 poly_int64 offset2
, size2
, maxsize2
;
3106 HOST_WIDE_INT offset2i
, size2i
;
3108 gcc_assert (lhs_ref_ok
);
3109 base2
= ao_ref_base (&lhs_ref
);
3110 offset2
= lhs_ref
.offset
;
3111 size2
= lhs_ref
.size
;
3112 maxsize2
= lhs_ref
.max_size
;
3113 reverse
= reverse_storage_order_for_component_p (lhs
);
3116 && !storage_order_barrier_p (lhs
)
3117 && known_eq (maxsize2
, size2
)
3118 && adjust_offsets_for_equal_base_address (base
, &offset
,
3120 && offset
.is_constant (&offseti
)
3121 && offset2
.is_constant (&offset2i
)
3122 && size2
.is_constant (&size2i
))
3124 if (data
->partial_defs
.is_empty ()
3125 && known_subrange_p (offseti
, maxsizei
, offset2
, size2
))
3127 /* We support up to 512-bit values (for V8DFmode). */
3128 unsigned char buffer
[65];
3131 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3132 if (TREE_CODE (rhs
) == SSA_NAME
)
3133 rhs
= SSA_VAL (rhs
);
3134 len
= native_encode_expr (rhs
,
3135 buffer
, sizeof (buffer
) - 1,
3136 (offseti
- offset2i
) / BITS_PER_UNIT
);
3137 if (len
> 0 && len
* BITS_PER_UNIT
>= maxsizei
)
3139 tree type
= vr
->type
;
3140 unsigned char *buf
= buffer
;
3141 unsigned int amnt
= 0;
3142 /* Make sure to interpret in a type that has a range
3143 covering the whole access size. */
3144 if (INTEGRAL_TYPE_P (vr
->type
)
3145 && maxsizei
!= TYPE_PRECISION (vr
->type
))
3146 type
= build_nonstandard_integer_type (maxsizei
,
3147 TYPE_UNSIGNED (type
));
3148 if (BYTES_BIG_ENDIAN
)
3150 /* For big-endian native_encode_expr stored the rhs
3151 such that the LSB of it is the LSB of buffer[len - 1].
3152 That bit is stored into memory at position
3153 offset2 + size2 - 1, i.e. in byte
3154 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3155 E.g. for offset2 1 and size2 14, rhs -1 and memory
3156 previously cleared that is:
3159 Now, if we want to extract offset 2 and size 12 from
3160 it using native_interpret_expr (which actually works
3161 for integral bitfield types in terms of byte size of
3162 the mode), the native_encode_expr stored the value
3165 and returned len 2 (the X bits are outside of
3167 Let sz be maxsize / BITS_PER_UNIT if not extracting
3168 a bitfield, and GET_MODE_SIZE otherwise.
3169 We need to align the LSB of the value we want to
3170 extract as the LSB of buf[sz - 1].
3171 The LSB from memory we need to read is at position
3172 offset + maxsize - 1. */
3173 HOST_WIDE_INT sz
= maxsizei
/ BITS_PER_UNIT
;
3174 if (INTEGRAL_TYPE_P (type
))
3176 if (TYPE_MODE (type
) != BLKmode
)
3177 sz
= GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type
));
3179 sz
= TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type
));
3181 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3182 - offseti
- maxsizei
) % BITS_PER_UNIT
;
3184 shift_bytes_in_array_right (buffer
, len
, amnt
);
3185 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
+ size2i
3186 - offseti
- maxsizei
- amnt
) / BITS_PER_UNIT
;
3187 if ((unsigned HOST_WIDE_INT
) sz
+ amnt
> (unsigned) len
)
3191 buf
= buffer
+ len
- sz
- amnt
;
3192 len
-= (buf
- buffer
);
3197 amnt
= ((unsigned HOST_WIDE_INT
) offset2i
3198 - offseti
) % BITS_PER_UNIT
;
3202 shift_bytes_in_array_left (buffer
, len
+ 1, amnt
);
3206 tree val
= native_interpret_expr (type
, buf
, len
);
3207 /* If we chop off bits because the types precision doesn't
3208 match the memory access size this is ok when optimizing
3209 reads but not when called from the DSE code during
3212 && type
!= vr
->type
)
3214 if (! int_fits_type_p (val
, vr
->type
))
3217 val
= fold_convert (vr
->type
, val
);
3221 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3222 ao_ref_base_alias_set (&lhs_ref
), val
);
3225 else if (ranges_known_overlap_p (offseti
, maxsizei
, offset2i
,
3229 tree rhs
= gimple_assign_rhs1 (def_stmt
);
3230 if (TREE_CODE (rhs
) == SSA_NAME
)
3231 rhs
= SSA_VAL (rhs
);
3234 pd
.offset
= offset2i
;
3236 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3237 ao_ref_base_alias_set (&lhs_ref
),
3243 /* 4) Assignment from an SSA name which definition we may be able
3244 to access pieces from or we can combine to a larger entity. */
3245 else if (known_eq (ref
->size
, maxsize
)
3246 && is_gimple_reg_type (vr
->type
)
3247 && !reverse_storage_order_for_component_p (vr
->operands
)
3248 && !contains_storage_order_barrier_p (vr
->operands
)
3249 && gimple_assign_single_p (def_stmt
)
3250 && TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == SSA_NAME
)
3252 tree lhs
= gimple_assign_lhs (def_stmt
);
3254 poly_int64 offset2
, size2
, maxsize2
;
3255 HOST_WIDE_INT offset2i
, size2i
, offseti
;
3257 gcc_assert (lhs_ref_ok
);
3258 base2
= ao_ref_base (&lhs_ref
);
3259 offset2
= lhs_ref
.offset
;
3260 size2
= lhs_ref
.size
;
3261 maxsize2
= lhs_ref
.max_size
;
3262 reverse
= reverse_storage_order_for_component_p (lhs
);
3263 tree def_rhs
= gimple_assign_rhs1 (def_stmt
);
3265 && !storage_order_barrier_p (lhs
)
3266 && known_size_p (maxsize2
)
3267 && known_eq (maxsize2
, size2
)
3268 && adjust_offsets_for_equal_base_address (base
, &offset
,
3271 if (data
->partial_defs
.is_empty ()
3272 && known_subrange_p (offset
, maxsize
, offset2
, size2
)
3273 /* ??? We can't handle bitfield precision extracts without
3274 either using an alternate type for the BIT_FIELD_REF and
3275 then doing a conversion or possibly adjusting the offset
3276 according to endianness. */
3277 && (! INTEGRAL_TYPE_P (vr
->type
)
3278 || known_eq (ref
->size
, TYPE_PRECISION (vr
->type
)))
3279 && multiple_p (ref
->size
, BITS_PER_UNIT
))
3281 tree val
= NULL_TREE
;
3282 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs
))
3283 || type_has_mode_precision_p (TREE_TYPE (def_rhs
)))
3285 gimple_match_op
op (gimple_match_cond::UNCOND
,
3286 BIT_FIELD_REF
, vr
->type
,
3288 bitsize_int (ref
->size
),
3289 bitsize_int (offset
- offset2
));
3290 val
= vn_nary_build_or_lookup (&op
);
3292 else if (known_eq (ref
->size
, size2
))
3294 gimple_match_op
op (gimple_match_cond::UNCOND
,
3295 VIEW_CONVERT_EXPR
, vr
->type
,
3297 val
= vn_nary_build_or_lookup (&op
);
3300 && (TREE_CODE (val
) != SSA_NAME
3301 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val
)))
3302 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3303 ao_ref_base_alias_set (&lhs_ref
), val
);
3305 else if (maxsize
.is_constant (&maxsizei
)
3306 && offset
.is_constant (&offseti
)
3307 && offset2
.is_constant (&offset2i
)
3308 && size2
.is_constant (&size2i
)
3309 && ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3312 pd
.rhs
= SSA_VAL (def_rhs
);
3314 pd
.offset
= offset2i
;
3316 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3317 ao_ref_base_alias_set (&lhs_ref
),
3323 /* 4b) Assignment done via one of the vectorizer internal store
3324 functions where we may be able to access pieces from or we can
3325 combine to a larger entity. */
3326 else if (known_eq (ref
->size
, maxsize
)
3327 && is_gimple_reg_type (vr
->type
)
3328 && !reverse_storage_order_for_component_p (vr
->operands
)
3329 && !contains_storage_order_barrier_p (vr
->operands
)
3330 && is_gimple_call (def_stmt
)
3331 && gimple_call_internal_p (def_stmt
)
3332 && internal_store_fn_p (gimple_call_internal_fn (def_stmt
)))
3334 gcall
*call
= as_a
<gcall
*> (def_stmt
);
3335 internal_fn fn
= gimple_call_internal_fn (call
);
3337 tree mask
= NULL_TREE
, len
= NULL_TREE
, bias
= NULL_TREE
;
3340 case IFN_MASK_STORE
:
3341 mask
= gimple_call_arg (call
, internal_fn_mask_index (fn
));
3342 mask
= vn_valueize (mask
);
3343 if (TREE_CODE (mask
) != VECTOR_CST
)
3348 int len_index
= internal_fn_len_index (fn
);
3349 len
= gimple_call_arg (call
, len_index
);
3350 bias
= gimple_call_arg (call
, len_index
+ 1);
3351 if (!tree_fits_uhwi_p (len
) || !tree_fits_shwi_p (bias
))
3358 tree def_rhs
= gimple_call_arg (call
,
3359 internal_fn_stored_value_index (fn
));
3360 def_rhs
= vn_valueize (def_rhs
);
3361 if (TREE_CODE (def_rhs
) != VECTOR_CST
)
3364 ao_ref_init_from_ptr_and_size (&lhs_ref
,
3365 vn_valueize (gimple_call_arg (call
, 0)),
3366 TYPE_SIZE_UNIT (TREE_TYPE (def_rhs
)));
3368 poly_int64 offset2
, size2
, maxsize2
;
3369 HOST_WIDE_INT offset2i
, size2i
, offseti
;
3370 base2
= ao_ref_base (&lhs_ref
);
3371 offset2
= lhs_ref
.offset
;
3372 size2
= lhs_ref
.size
;
3373 maxsize2
= lhs_ref
.max_size
;
3374 if (known_size_p (maxsize2
)
3375 && known_eq (maxsize2
, size2
)
3376 && adjust_offsets_for_equal_base_address (base
, &offset
,
3378 && maxsize
.is_constant (&maxsizei
)
3379 && offset
.is_constant (&offseti
)
3380 && offset2
.is_constant (&offset2i
)
3381 && size2
.is_constant (&size2i
))
3383 if (!ranges_maybe_overlap_p (offset
, maxsize
, offset2
, size2
))
3384 /* Poor-mans disambiguation. */
3386 else if (ranges_known_overlap_p (offset
, maxsize
, offset2
, size2
))
3390 tree aa
= gimple_call_arg (call
, 1);
3391 alias_set_type set
= get_deref_alias_set (TREE_TYPE (aa
));
3392 tree vectype
= TREE_TYPE (def_rhs
);
3393 unsigned HOST_WIDE_INT elsz
3394 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (vectype
)));
3397 HOST_WIDE_INT start
= 0, length
= 0;
3398 unsigned mask_idx
= 0;
3401 if (integer_zerop (VECTOR_CST_ELT (mask
, mask_idx
)))
3406 pd
.offset
= offset2i
+ start
;
3408 if (ranges_known_overlap_p
3409 (offset
, maxsize
, pd
.offset
, pd
.size
))
3411 void *res
= data
->push_partial_def
3412 (pd
, set
, set
, offseti
, maxsizei
);
3417 start
= (mask_idx
+ 1) * elsz
;
3424 while (known_lt (mask_idx
, TYPE_VECTOR_SUBPARTS (vectype
)));
3428 pd
.offset
= offset2i
+ start
;
3430 if (ranges_known_overlap_p (offset
, maxsize
,
3431 pd
.offset
, pd
.size
))
3432 return data
->push_partial_def (pd
, set
, set
,
3436 else if (fn
== IFN_LEN_STORE
)
3438 pd
.offset
= offset2i
;
3439 pd
.size
= (tree_to_uhwi (len
)
3440 + -tree_to_shwi (bias
)) * BITS_PER_UNIT
;
3441 if (BYTES_BIG_ENDIAN
)
3442 pd
.rhs_off
= pd
.size
- tree_to_uhwi (TYPE_SIZE (vectype
));
3445 if (ranges_known_overlap_p (offset
, maxsize
,
3446 pd
.offset
, pd
.size
))
3447 return data
->push_partial_def (pd
, set
, set
,
3457 /* 5) For aggregate copies translate the reference through them if
3458 the copy kills ref. */
3459 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3460 && gimple_assign_single_p (def_stmt
)
3461 && (DECL_P (gimple_assign_rhs1 (def_stmt
))
3462 || TREE_CODE (gimple_assign_rhs1 (def_stmt
)) == MEM_REF
3463 || handled_component_p (gimple_assign_rhs1 (def_stmt
))))
3467 auto_vec
<vn_reference_op_s
> rhs
;
3468 vn_reference_op_t vro
;
3471 gcc_assert (lhs_ref_ok
);
3473 /* See if the assignment kills REF. */
3474 base2
= ao_ref_base (&lhs_ref
);
3475 if (!lhs_ref
.max_size_known_p ()
3477 && (TREE_CODE (base
) != MEM_REF
3478 || TREE_CODE (base2
) != MEM_REF
3479 || TREE_OPERAND (base
, 0) != TREE_OPERAND (base2
, 0)
3480 || !tree_int_cst_equal (TREE_OPERAND (base
, 1),
3481 TREE_OPERAND (base2
, 1))))
3482 || !stmt_kills_ref_p (def_stmt
, ref
))
3485 /* Find the common base of ref and the lhs. lhs_ops already
3486 contains valueized operands for the lhs. */
3487 i
= vr
->operands
.length () - 1;
3488 j
= lhs_ops
.length () - 1;
3489 while (j
>= 0 && i
>= 0
3490 && vn_reference_op_eq (&vr
->operands
[i
], &lhs_ops
[j
]))
3496 /* ??? The innermost op should always be a MEM_REF and we already
3497 checked that the assignment to the lhs kills vr. Thus for
3498 aggregate copies using char[] types the vn_reference_op_eq
3499 may fail when comparing types for compatibility. But we really
3500 don't care here - further lookups with the rewritten operands
3501 will simply fail if we messed up types too badly. */
3502 poly_int64 extra_off
= 0;
3503 if (j
== 0 && i
>= 0
3504 && lhs_ops
[0].opcode
== MEM_REF
3505 && maybe_ne (lhs_ops
[0].off
, -1))
3507 if (known_eq (lhs_ops
[0].off
, vr
->operands
[i
].off
))
3509 else if (vr
->operands
[i
].opcode
== MEM_REF
3510 && maybe_ne (vr
->operands
[i
].off
, -1))
3512 extra_off
= vr
->operands
[i
].off
- lhs_ops
[0].off
;
3517 /* i now points to the first additional op.
3518 ??? LHS may not be completely contained in VR, one or more
3519 VIEW_CONVERT_EXPRs could be in its way. We could at least
3520 try handling outermost VIEW_CONVERT_EXPRs. */
3524 /* Punt if the additional ops contain a storage order barrier. */
3525 for (k
= i
; k
>= 0; k
--)
3527 vro
= &vr
->operands
[k
];
3528 if (vro
->opcode
== VIEW_CONVERT_EXPR
&& vro
->reverse
)
3532 /* Now re-write REF to be based on the rhs of the assignment. */
3533 tree rhs1
= gimple_assign_rhs1 (def_stmt
);
3534 copy_reference_ops_from_ref (rhs1
, &rhs
);
3536 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3537 bool force_no_tbaa
= false;
3538 if (maybe_ne (extra_off
, 0))
3540 if (rhs
.length () < 2)
3542 int ix
= rhs
.length () - 2;
3543 if (rhs
[ix
].opcode
!= MEM_REF
3544 || known_eq (rhs
[ix
].off
, -1))
3546 rhs
[ix
].off
+= extra_off
;
3547 rhs
[ix
].op0
= int_const_binop (PLUS_EXPR
, rhs
[ix
].op0
,
3548 build_int_cst (TREE_TYPE (rhs
[ix
].op0
),
3550 /* When we have offsetted the RHS, reading only parts of it,
3551 we can no longer use the original TBAA type, force alias-set
3553 force_no_tbaa
= true;
3556 /* Save the operands since we need to use the original ones for
3557 the hash entry we use. */
3558 if (!data
->saved_operands
.exists ())
3559 data
->saved_operands
= vr
->operands
.copy ();
3561 /* We need to pre-pend vr->operands[0..i] to rhs. */
3562 vec
<vn_reference_op_s
> old
= vr
->operands
;
3563 if (i
+ 1 + rhs
.length () > vr
->operands
.length ())
3564 vr
->operands
.safe_grow (i
+ 1 + rhs
.length (), true);
3566 vr
->operands
.truncate (i
+ 1 + rhs
.length ());
3567 FOR_EACH_VEC_ELT (rhs
, j
, vro
)
3568 vr
->operands
[i
+ 1 + j
] = *vro
;
3569 valueize_refs (&vr
->operands
);
3570 if (old
== shared_lookup_references
)
3571 shared_lookup_references
= vr
->operands
;
3572 vr
->hashcode
= vn_reference_compute_hash (vr
);
3574 /* Try folding the new reference to a constant. */
3575 tree val
= fully_constant_vn_reference_p (vr
);
3578 if (data
->partial_defs
.is_empty ())
3579 return data
->finish (ao_ref_alias_set (&lhs_ref
),
3580 ao_ref_base_alias_set (&lhs_ref
), val
);
3581 /* This is the only interesting case for partial-def handling
3582 coming from targets that like to gimplify init-ctors as
3583 aggregate copies from constant data like aarch64 for
3585 if (maxsize
.is_constant (&maxsizei
) && known_eq (ref
->size
, maxsize
))
3592 return data
->push_partial_def (pd
, ao_ref_alias_set (&lhs_ref
),
3593 ao_ref_base_alias_set (&lhs_ref
),
3598 /* Continuing with partial defs isn't easily possible here, we
3599 have to find a full def from further lookups from here. Probably
3600 not worth the special-casing everywhere. */
3601 if (!data
->partial_defs
.is_empty ())
3604 /* Adjust *ref from the new operands. */
3606 ao_ref_init (&rhs1_ref
, rhs1
);
3607 if (!ao_ref_init_from_vn_reference (&r
,
3609 : ao_ref_alias_set (&rhs1_ref
),
3611 : ao_ref_base_alias_set (&rhs1_ref
),
3612 vr
->type
, vr
->operands
))
3614 /* This can happen with bitfields. */
3615 if (maybe_ne (ref
->size
, r
.size
))
3617 /* If the access lacks some subsetting simply apply that by
3618 shortening it. That in the end can only be successful
3619 if we can pun the lookup result which in turn requires
3621 if (known_eq (r
.size
, r
.max_size
)
3622 && known_lt (ref
->size
, r
.size
))
3623 r
.size
= r
.max_size
= ref
->size
;
3629 /* Do not update last seen VUSE after translating. */
3630 data
->last_vuse_ptr
= NULL
;
3631 /* Invalidate the original access path since it now contains
3633 data
->orig_ref
.ref
= NULL_TREE
;
3634 /* Use the alias-set of this LHS for recording an eventual result. */
3635 if (data
->first_set
== -2)
3637 data
->first_set
= ao_ref_alias_set (&lhs_ref
);
3638 data
->first_base_set
= ao_ref_base_alias_set (&lhs_ref
);
3641 /* Keep looking for the adjusted *REF / VR pair. */
3645 /* 6) For memcpy copies translate the reference through them if the copy
3646 kills ref. But we cannot (easily) do this translation if the memcpy is
3647 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3648 can modify the storage order of objects (see storage_order_barrier_p). */
3649 else if (data
->vn_walk_kind
== VN_WALKREWRITE
3650 && is_gimple_reg_type (vr
->type
)
3651 /* ??? Handle BCOPY as well. */
3652 && (gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY
)
3653 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMCPY_CHK
)
3654 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY
)
3655 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMPCPY_CHK
)
3656 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE
)
3657 || gimple_call_builtin_p (def_stmt
, BUILT_IN_MEMMOVE_CHK
))
3658 && (TREE_CODE (gimple_call_arg (def_stmt
, 0)) == ADDR_EXPR
3659 || TREE_CODE (gimple_call_arg (def_stmt
, 0)) == SSA_NAME
)
3660 && (TREE_CODE (gimple_call_arg (def_stmt
, 1)) == ADDR_EXPR
3661 || TREE_CODE (gimple_call_arg (def_stmt
, 1)) == SSA_NAME
)
3662 && (poly_int_tree_p (gimple_call_arg (def_stmt
, 2), ©_size
)
3663 || (TREE_CODE (gimple_call_arg (def_stmt
, 2)) == SSA_NAME
3664 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt
, 2)),
3666 /* Handling this is more complicated, give up for now. */
3667 && data
->partial_defs
.is_empty ())
3671 poly_int64 rhs_offset
, lhs_offset
;
3672 vn_reference_op_s op
;
3673 poly_uint64 mem_offset
;
3674 poly_int64 at
, byte_maxsize
;
3676 /* Only handle non-variable, addressable refs. */
3677 if (maybe_ne (ref
->size
, maxsize
)
3678 || !multiple_p (offset
, BITS_PER_UNIT
, &at
)
3679 || !multiple_p (maxsize
, BITS_PER_UNIT
, &byte_maxsize
))
3682 /* Extract a pointer base and an offset for the destination. */
3683 lhs
= gimple_call_arg (def_stmt
, 0);
3685 if (TREE_CODE (lhs
) == SSA_NAME
)
3687 lhs
= vn_valueize (lhs
);
3688 if (TREE_CODE (lhs
) == SSA_NAME
)
3690 gimple
*def_stmt
= SSA_NAME_DEF_STMT (lhs
);
3691 if (gimple_assign_single_p (def_stmt
)
3692 && gimple_assign_rhs_code (def_stmt
) == ADDR_EXPR
)
3693 lhs
= gimple_assign_rhs1 (def_stmt
);
3696 if (TREE_CODE (lhs
) == ADDR_EXPR
)
3698 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
)))
3699 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs
))))
3701 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (lhs
, 0),
3705 if (TREE_CODE (tem
) == MEM_REF
3706 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3708 lhs
= TREE_OPERAND (tem
, 0);
3709 if (TREE_CODE (lhs
) == SSA_NAME
)
3710 lhs
= vn_valueize (lhs
);
3711 lhs_offset
+= mem_offset
;
3713 else if (DECL_P (tem
))
3714 lhs
= build_fold_addr_expr (tem
);
3718 if (TREE_CODE (lhs
) != SSA_NAME
3719 && TREE_CODE (lhs
) != ADDR_EXPR
)
3722 /* Extract a pointer base and an offset for the source. */
3723 rhs
= gimple_call_arg (def_stmt
, 1);
3725 if (TREE_CODE (rhs
) == SSA_NAME
)
3726 rhs
= vn_valueize (rhs
);
3727 if (TREE_CODE (rhs
) == ADDR_EXPR
)
3729 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs
)))
3730 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs
))))
3732 tree tem
= get_addr_base_and_unit_offset (TREE_OPERAND (rhs
, 0),
3736 if (TREE_CODE (tem
) == MEM_REF
3737 && poly_int_tree_p (TREE_OPERAND (tem
, 1), &mem_offset
))
3739 rhs
= TREE_OPERAND (tem
, 0);
3740 rhs_offset
+= mem_offset
;
3742 else if (DECL_P (tem
)
3743 || TREE_CODE (tem
) == STRING_CST
)
3744 rhs
= build_fold_addr_expr (tem
);
3748 if (TREE_CODE (rhs
) == SSA_NAME
)
3749 rhs
= SSA_VAL (rhs
);
3750 else if (TREE_CODE (rhs
) != ADDR_EXPR
)
3753 /* The bases of the destination and the references have to agree. */
3754 if (TREE_CODE (base
) == MEM_REF
)
3756 if (TREE_OPERAND (base
, 0) != lhs
3757 || !poly_int_tree_p (TREE_OPERAND (base
, 1), &mem_offset
))
3761 else if (!DECL_P (base
)
3762 || TREE_CODE (lhs
) != ADDR_EXPR
3763 || TREE_OPERAND (lhs
, 0) != base
)
3766 /* If the access is completely outside of the memcpy destination
3767 area there is no aliasing. */
3768 if (!ranges_maybe_overlap_p (lhs_offset
, copy_size
, at
, byte_maxsize
))
3770 /* And the access has to be contained within the memcpy destination. */
3771 if (!known_subrange_p (at
, byte_maxsize
, lhs_offset
, copy_size
))
3774 /* Save the operands since we need to use the original ones for
3775 the hash entry we use. */
3776 if (!data
->saved_operands
.exists ())
3777 data
->saved_operands
= vr
->operands
.copy ();
3779 /* Make room for 2 operands in the new reference. */
3780 if (vr
->operands
.length () < 2)
3782 vec
<vn_reference_op_s
> old
= vr
->operands
;
3783 vr
->operands
.safe_grow_cleared (2, true);
3784 if (old
== shared_lookup_references
)
3785 shared_lookup_references
= vr
->operands
;
3788 vr
->operands
.truncate (2);
3790 /* The looked-through reference is a simple MEM_REF. */
3791 memset (&op
, 0, sizeof (op
));
3793 op
.opcode
= MEM_REF
;
3794 op
.op0
= build_int_cst (ptr_type_node
, at
- lhs_offset
+ rhs_offset
);
3795 op
.off
= at
- lhs_offset
+ rhs_offset
;
3796 vr
->operands
[0] = op
;
3797 op
.type
= TREE_TYPE (rhs
);
3798 op
.opcode
= TREE_CODE (rhs
);
3801 vr
->operands
[1] = op
;
3802 vr
->hashcode
= vn_reference_compute_hash (vr
);
3804 /* Try folding the new reference to a constant. */
3805 tree val
= fully_constant_vn_reference_p (vr
);
3807 return data
->finish (0, 0, val
);
3809 /* Adjust *ref from the new operands. */
3810 if (!ao_ref_init_from_vn_reference (&r
, 0, 0, vr
->type
, vr
->operands
))
3812 /* This can happen with bitfields. */
3813 if (maybe_ne (ref
->size
, r
.size
))
3817 /* Do not update last seen VUSE after translating. */
3818 data
->last_vuse_ptr
= NULL
;
3819 /* Invalidate the original access path since it now contains
3821 data
->orig_ref
.ref
= NULL_TREE
;
3822 /* Use the alias-set of this stmt for recording an eventual result. */
3823 if (data
->first_set
== -2)
3825 data
->first_set
= 0;
3826 data
->first_base_set
= 0;
3829 /* Keep looking for the adjusted *REF / VR pair. */
3833 /* Bail out and stop walking. */
3837 /* Return a reference op vector from OP that can be used for
3838 vn_reference_lookup_pieces. The caller is responsible for releasing
3841 vec
<vn_reference_op_s
>
3842 vn_reference_operands_for_lookup (tree op
)
3845 return valueize_shared_reference_ops_from_ref (op
, &valueized
).copy ();
3848 /* Lookup a reference operation by it's parts, in the current hash table.
3849 Returns the resulting value number if it exists in the hash table,
3850 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3851 vn_reference_t stored in the hashtable if something is found. */
3854 vn_reference_lookup_pieces (tree vuse
, alias_set_type set
,
3855 alias_set_type base_set
, tree type
,
3856 vec
<vn_reference_op_s
> operands
,
3857 vn_reference_t
*vnresult
, vn_lookup_kind kind
)
3859 struct vn_reference_s vr1
;
3867 vr1
.vuse
= vuse_ssa_val (vuse
);
3868 shared_lookup_references
.truncate (0);
3869 shared_lookup_references
.safe_grow (operands
.length (), true);
3870 memcpy (shared_lookup_references
.address (),
3871 operands
.address (),
3872 sizeof (vn_reference_op_s
)
3873 * operands
.length ());
3875 valueize_refs_1 (&shared_lookup_references
, &valueized_p
);
3876 vr1
.operands
= shared_lookup_references
;
3879 vr1
.base_set
= base_set
;
3880 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
3881 if ((cst
= fully_constant_vn_reference_p (&vr1
)))
3884 vn_reference_lookup_1 (&vr1
, vnresult
);
3886 && kind
!= VN_NOWALK
3890 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
3891 vn_walk_cb_data
data (&vr1
, NULL_TREE
, NULL
, kind
, true, NULL_TREE
,
3893 vec
<vn_reference_op_s
> ops_for_ref
;
3895 ops_for_ref
= vr1
.operands
;
3898 /* For ao_ref_from_mem we have to ensure only available SSA names
3899 end up in base and the only convenient way to make this work
3900 for PRE is to re-valueize with that in mind. */
3901 ops_for_ref
.create (operands
.length ());
3902 ops_for_ref
.quick_grow (operands
.length ());
3903 memcpy (ops_for_ref
.address (),
3904 operands
.address (),
3905 sizeof (vn_reference_op_s
)
3906 * operands
.length ());
3907 valueize_refs_1 (&ops_for_ref
, &valueized_p
, true);
3909 if (ao_ref_init_from_vn_reference (&r
, set
, base_set
, type
,
3913 walk_non_aliased_vuses (&r
, vr1
.vuse
, true, vn_reference_lookup_2
,
3914 vn_reference_lookup_3
, vuse_valueize
,
3916 if (ops_for_ref
!= shared_lookup_references
)
3917 ops_for_ref
.release ();
3918 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
3921 && (!(*vnresult
)->result
3922 || !operand_equal_p ((*vnresult
)->result
, data
.same_val
)))
3930 return (*vnresult
)->result
;
3935 /* Lookup OP in the current hash table, and return the resulting value
3936 number if it exists in the hash table. Return NULL_TREE if it does
3937 not exist in the hash table or if the result field of the structure
3938 was NULL.. VNRESULT will be filled in with the vn_reference_t
3939 stored in the hashtable if one exists. When TBAA_P is false assume
3940 we are looking up a store and treat it as having alias-set zero.
3941 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3942 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3943 load is bitwise anded with MASK and so we are only interested in a subset
3944 of the bits and can ignore if the other bits are uninitialized or
3945 not initialized with constants. When doing redundant store removal
3946 the caller has to set REDUNDANT_STORE_REMOVAL_P. */
3949 vn_reference_lookup (tree op
, tree vuse
, vn_lookup_kind kind
,
3950 vn_reference_t
*vnresult
, bool tbaa_p
,
3951 tree
*last_vuse_ptr
, tree mask
,
3952 bool redundant_store_removal_p
)
3954 vec
<vn_reference_op_s
> operands
;
3955 struct vn_reference_s vr1
;
3956 bool valueized_anything
;
3961 vr1
.vuse
= vuse_ssa_val (vuse
);
3962 vr1
.operands
= operands
3963 = valueize_shared_reference_ops_from_ref (op
, &valueized_anything
);
3965 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing
3966 this before the pass folding __builtin_object_size had a chance to run. */
3967 if ((cfun
->curr_properties
& PROP_objsz
)
3968 && operands
[0].opcode
== ADDR_EXPR
3969 && operands
.last ().opcode
== SSA_NAME
)
3972 vn_reference_op_t vro
;
3974 for (i
= 1; operands
.iterate (i
, &vro
); ++i
)
3976 if (vro
->opcode
== SSA_NAME
)
3978 else if (known_eq (vro
->off
, -1))
3982 if (i
== operands
.length () - 1
3983 /* Make sure we the offset we accumulated in a 64bit int
3984 fits the address computation carried out in target
3985 offset precision. */
3987 == sext_hwi (off
.coeffs
[0], TYPE_PRECISION (sizetype
))))
3989 gcc_assert (operands
[i
-1].opcode
== MEM_REF
);
3991 ops
[0] = operands
[i
].op0
;
3992 ops
[1] = wide_int_to_tree (sizetype
, off
);
3993 tree res
= vn_nary_op_lookup_pieces (2, POINTER_PLUS_EXPR
,
3994 TREE_TYPE (op
), ops
, NULL
);
4001 vr1
.type
= TREE_TYPE (op
);
4003 ao_ref_init (&op_ref
, op
);
4004 vr1
.set
= ao_ref_alias_set (&op_ref
);
4005 vr1
.base_set
= ao_ref_base_alias_set (&op_ref
);
4006 vr1
.hashcode
= vn_reference_compute_hash (&vr1
);
4007 if (mask
== NULL_TREE
)
4008 if (tree cst
= fully_constant_vn_reference_p (&vr1
))
4011 if (kind
!= VN_NOWALK
&& vr1
.vuse
)
4013 vn_reference_t wvnresult
;
4015 unsigned limit
= param_sccvn_max_alias_queries_per_access
;
4016 auto_vec
<vn_reference_op_s
> ops_for_ref
;
4017 if (valueized_anything
)
4019 copy_reference_ops_from_ref (op
, &ops_for_ref
);
4021 valueize_refs_1 (&ops_for_ref
, &tem
, true);
4023 /* Make sure to use a valueized reference if we valueized anything.
4024 Otherwise preserve the full reference for advanced TBAA. */
4025 if (!valueized_anything
4026 || !ao_ref_init_from_vn_reference (&r
, vr1
.set
, vr1
.base_set
,
4027 vr1
.type
, ops_for_ref
))
4028 ao_ref_init (&r
, op
);
4029 vn_walk_cb_data
data (&vr1
, r
.ref
? NULL_TREE
: op
,
4030 last_vuse_ptr
, kind
, tbaa_p
, mask
,
4031 redundant_store_removal_p
);
4035 walk_non_aliased_vuses (&r
, vr1
.vuse
, tbaa_p
, vn_reference_lookup_2
,
4036 vn_reference_lookup_3
, vuse_valueize
, limit
,
4038 gcc_checking_assert (vr1
.operands
== shared_lookup_references
);
4041 gcc_assert (mask
== NULL_TREE
);
4043 && (!wvnresult
->result
4044 || !operand_equal_p (wvnresult
->result
, data
.same_val
)))
4047 *vnresult
= wvnresult
;
4048 return wvnresult
->result
;
4051 return data
.masked_result
;
4057 *last_vuse_ptr
= vr1
.vuse
;
4060 return vn_reference_lookup_1 (&vr1
, vnresult
);
4063 /* Lookup CALL in the current hash table and return the entry in
4064 *VNRESULT if found. Populates *VR for the hashtable lookup. */
4067 vn_reference_lookup_call (gcall
*call
, vn_reference_t
*vnresult
,
4073 tree vuse
= gimple_vuse (call
);
4075 vr
->vuse
= vuse
? SSA_VAL (vuse
) : NULL_TREE
;
4076 vr
->operands
= valueize_shared_reference_ops_from_call (call
);
4077 tree lhs
= gimple_call_lhs (call
);
4078 /* For non-SSA return values the referece ops contain the LHS. */
4079 vr
->type
= ((lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
4080 ? TREE_TYPE (lhs
) : NULL_TREE
);
4084 vr
->hashcode
= vn_reference_compute_hash (vr
);
4085 vn_reference_lookup_1 (vr
, vnresult
);
4088 /* Insert OP into the current hash table with a value number of RESULT. */
4091 vn_reference_insert (tree op
, tree result
, tree vuse
, tree vdef
)
4093 vn_reference_s
**slot
;
4097 vec
<vn_reference_op_s
> operands
4098 = valueize_shared_reference_ops_from_ref (op
, &tem
);
4099 /* Handle &MEM[ptr + 5].b[1].c as POINTER_PLUS_EXPR. Avoid doing this
4100 before the pass folding __builtin_object_size had a chance to run. */
4101 if ((cfun
->curr_properties
& PROP_objsz
)
4102 && operands
[0].opcode
== ADDR_EXPR
4103 && operands
.last ().opcode
== SSA_NAME
)
4106 vn_reference_op_t vro
;
4108 for (i
= 1; operands
.iterate (i
, &vro
); ++i
)
4110 if (vro
->opcode
== SSA_NAME
)
4112 else if (known_eq (vro
->off
, -1))
4116 if (i
== operands
.length () - 1
4117 /* Make sure we the offset we accumulated in a 64bit int
4118 fits the address computation carried out in target
4119 offset precision. */
4121 == sext_hwi (off
.coeffs
[0], TYPE_PRECISION (sizetype
))))
4123 gcc_assert (operands
[i
-1].opcode
== MEM_REF
);
4125 ops
[0] = operands
[i
].op0
;
4126 ops
[1] = wide_int_to_tree (sizetype
, off
);
4127 vn_nary_op_insert_pieces (2, POINTER_PLUS_EXPR
,
4128 TREE_TYPE (op
), ops
, result
,
4129 VN_INFO (result
)->value_id
);
4134 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4135 if (TREE_CODE (result
) == SSA_NAME
)
4136 vr1
->value_id
= VN_INFO (result
)->value_id
;
4138 vr1
->value_id
= get_or_alloc_constant_value_id (result
);
4139 vr1
->vuse
= vuse_ssa_val (vuse
);
4140 vr1
->operands
= operands
.copy ();
4141 vr1
->type
= TREE_TYPE (op
);
4142 vr1
->punned
= false;
4144 ao_ref_init (&op_ref
, op
);
4145 vr1
->set
= ao_ref_alias_set (&op_ref
);
4146 vr1
->base_set
= ao_ref_base_alias_set (&op_ref
);
4147 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
4148 vr1
->result
= TREE_CODE (result
) == SSA_NAME
? SSA_VAL (result
) : result
;
4149 vr1
->result_vdef
= vdef
;
4151 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
4154 /* Because IL walking on reference lookup can end up visiting
4155 a def that is only to be visited later in iteration order
4156 when we are about to make an irreducible region reducible
4157 the def can be effectively processed and its ref being inserted
4158 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
4159 but save a lookup if we deal with already inserted refs here. */
4162 /* We cannot assert that we have the same value either because
4163 when disentangling an irreducible region we may end up visiting
4164 a use before the corresponding def. That's a missed optimization
4165 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
4166 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4167 && !operand_equal_p ((*slot
)->result
, vr1
->result
, 0))
4169 fprintf (dump_file
, "Keeping old value ");
4170 print_generic_expr (dump_file
, (*slot
)->result
);
4171 fprintf (dump_file
, " because of collision\n");
4173 free_reference (vr1
);
4174 obstack_free (&vn_tables_obstack
, vr1
);
4179 vr1
->next
= last_inserted_ref
;
4180 last_inserted_ref
= vr1
;
4183 /* Insert a reference by it's pieces into the current hash table with
4184 a value number of RESULT. Return the resulting reference
4185 structure we created. */
4188 vn_reference_insert_pieces (tree vuse
, alias_set_type set
,
4189 alias_set_type base_set
, tree type
,
4190 vec
<vn_reference_op_s
> operands
,
4191 tree result
, unsigned int value_id
)
4194 vn_reference_s
**slot
;
4197 vr1
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
4198 vr1
->value_id
= value_id
;
4199 vr1
->vuse
= vuse_ssa_val (vuse
);
4200 vr1
->operands
= operands
;
4201 valueize_refs (&vr1
->operands
);
4203 vr1
->punned
= false;
4205 vr1
->base_set
= base_set
;
4206 vr1
->hashcode
= vn_reference_compute_hash (vr1
);
4207 if (result
&& TREE_CODE (result
) == SSA_NAME
)
4208 result
= SSA_VAL (result
);
4209 vr1
->result
= result
;
4210 vr1
->result_vdef
= NULL_TREE
;
4212 slot
= valid_info
->references
->find_slot_with_hash (vr1
, vr1
->hashcode
,
4215 /* At this point we should have all the things inserted that we have
4216 seen before, and we should never try inserting something that
4218 gcc_assert (!*slot
);
4221 vr1
->next
= last_inserted_ref
;
4222 last_inserted_ref
= vr1
;
4226 /* Compute and return the hash value for nary operation VBO1. */
4229 vn_nary_op_compute_hash (const vn_nary_op_t vno1
)
4231 inchash::hash hstate
;
4234 if (((vno1
->length
== 2
4235 && commutative_tree_code (vno1
->opcode
))
4236 || (vno1
->length
== 3
4237 && commutative_ternary_tree_code (vno1
->opcode
)))
4238 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
4239 std::swap (vno1
->op
[0], vno1
->op
[1]);
4240 else if (TREE_CODE_CLASS (vno1
->opcode
) == tcc_comparison
4241 && tree_swap_operands_p (vno1
->op
[0], vno1
->op
[1]))
4243 std::swap (vno1
->op
[0], vno1
->op
[1]);
4244 vno1
->opcode
= swap_tree_comparison (vno1
->opcode
);
4247 hstate
.add_int (vno1
->opcode
);
4248 for (i
= 0; i
< vno1
->length
; ++i
)
4249 inchash::add_expr (vno1
->op
[i
], hstate
);
4251 return hstate
.end ();
4254 /* Compare nary operations VNO1 and VNO2 and return true if they are
4258 vn_nary_op_eq (const_vn_nary_op_t
const vno1
, const_vn_nary_op_t
const vno2
)
4262 if (vno1
->hashcode
!= vno2
->hashcode
)
4265 if (vno1
->length
!= vno2
->length
)
4268 if (vno1
->opcode
!= vno2
->opcode
4269 || !types_compatible_p (vno1
->type
, vno2
->type
))
4272 for (i
= 0; i
< vno1
->length
; ++i
)
4273 if (!expressions_equal_p (vno1
->op
[i
], vno2
->op
[i
]))
4276 /* BIT_INSERT_EXPR has an implict operand as the type precision
4277 of op1. Need to check to make sure they are the same. */
4278 if (vno1
->opcode
== BIT_INSERT_EXPR
4279 && TREE_CODE (vno1
->op
[1]) == INTEGER_CST
4280 && TYPE_PRECISION (TREE_TYPE (vno1
->op
[1]))
4281 != TYPE_PRECISION (TREE_TYPE (vno2
->op
[1])))
4287 /* Initialize VNO from the pieces provided. */
4290 init_vn_nary_op_from_pieces (vn_nary_op_t vno
, unsigned int length
,
4291 enum tree_code code
, tree type
, tree
*ops
)
4294 vno
->length
= length
;
4296 memcpy (&vno
->op
[0], ops
, sizeof (tree
) * length
);
4299 /* Return the number of operands for a vn_nary ops structure from STMT. */
4302 vn_nary_length_from_stmt (gimple
*stmt
)
4304 switch (gimple_assign_rhs_code (stmt
))
4308 case VIEW_CONVERT_EXPR
:
4315 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
4318 return gimple_num_ops (stmt
) - 1;
4322 /* Initialize VNO from STMT. */
4325 init_vn_nary_op_from_stmt (vn_nary_op_t vno
, gassign
*stmt
)
4329 vno
->opcode
= gimple_assign_rhs_code (stmt
);
4330 vno
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
4331 switch (vno
->opcode
)
4335 case VIEW_CONVERT_EXPR
:
4337 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
4342 vno
->op
[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 0);
4343 vno
->op
[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1);
4344 vno
->op
[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2);
4348 vno
->length
= CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt
));
4349 for (i
= 0; i
< vno
->length
; ++i
)
4350 vno
->op
[i
] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt
), i
)->value
;
4354 gcc_checking_assert (!gimple_assign_single_p (stmt
));
4355 vno
->length
= gimple_num_ops (stmt
) - 1;
4356 for (i
= 0; i
< vno
->length
; ++i
)
4357 vno
->op
[i
] = gimple_op (stmt
, i
+ 1);
4361 /* Compute the hashcode for VNO and look for it in the hash table;
4362 return the resulting value number if it exists in the hash table.
4363 Return NULL_TREE if it does not exist in the hash table or if the
4364 result field of the operation is NULL. VNRESULT will contain the
4365 vn_nary_op_t from the hashtable if it exists. */
4368 vn_nary_op_lookup_1 (vn_nary_op_t vno
, vn_nary_op_t
*vnresult
)
4370 vn_nary_op_s
**slot
;
4375 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4376 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4377 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4379 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4380 slot
= valid_info
->nary
->find_slot_with_hash (vno
, vno
->hashcode
, NO_INSERT
);
4385 return (*slot
)->predicated_values
? NULL_TREE
: (*slot
)->u
.result
;
4388 /* Lookup a n-ary operation by its pieces and return the resulting value
4389 number if it exists in the hash table. Return NULL_TREE if it does
4390 not exist in the hash table or if the result field of the operation
4391 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4395 vn_nary_op_lookup_pieces (unsigned int length
, enum tree_code code
,
4396 tree type
, tree
*ops
, vn_nary_op_t
*vnresult
)
4398 vn_nary_op_t vno1
= XALLOCAVAR (struct vn_nary_op_s
,
4399 sizeof_vn_nary_op (length
));
4400 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4401 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4404 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4405 value number if it exists in the hash table. Return NULL_TREE if
4406 it does not exist in the hash table. VNRESULT will contain the
4407 vn_nary_op_t from the hashtable if it exists. */
4410 vn_nary_op_lookup_stmt (gimple
*stmt
, vn_nary_op_t
*vnresult
)
4413 = XALLOCAVAR (struct vn_nary_op_s
,
4414 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt
)));
4415 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4416 return vn_nary_op_lookup_1 (vno1
, vnresult
);
4419 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4422 alloc_vn_nary_op_noinit (unsigned int length
, struct obstack
*stack
)
4424 return (vn_nary_op_t
) obstack_alloc (stack
, sizeof_vn_nary_op (length
));
4427 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4431 alloc_vn_nary_op (unsigned int length
, tree result
, unsigned int value_id
)
4433 vn_nary_op_t vno1
= alloc_vn_nary_op_noinit (length
, &vn_tables_obstack
);
4435 vno1
->value_id
= value_id
;
4436 vno1
->length
= length
;
4437 vno1
->predicated_values
= 0;
4438 vno1
->u
.result
= result
;
4443 /* Insert VNO into TABLE. */
4446 vn_nary_op_insert_into (vn_nary_op_t vno
, vn_nary_op_table_type
*table
)
4448 vn_nary_op_s
**slot
;
4450 gcc_assert (! vno
->predicated_values
4451 || (! vno
->u
.values
->next
4452 && vno
->u
.values
->n
== 1));
4454 for (unsigned i
= 0; i
< vno
->length
; ++i
)
4455 if (TREE_CODE (vno
->op
[i
]) == SSA_NAME
)
4456 vno
->op
[i
] = SSA_VAL (vno
->op
[i
]);
4458 vno
->hashcode
= vn_nary_op_compute_hash (vno
);
4459 slot
= table
->find_slot_with_hash (vno
, vno
->hashcode
, INSERT
);
4460 vno
->unwind_to
= *slot
;
4463 /* Prefer non-predicated values.
4464 ??? Only if those are constant, otherwise, with constant predicated
4465 value, turn them into predicated values with entry-block validity
4466 (??? but we always find the first valid result currently). */
4467 if ((*slot
)->predicated_values
4468 && ! vno
->predicated_values
)
4470 /* ??? We cannot remove *slot from the unwind stack list.
4471 For the moment we deal with this by skipping not found
4472 entries but this isn't ideal ... */
4474 /* ??? Maintain a stack of states we can unwind in
4475 vn_nary_op_s? But how far do we unwind? In reality
4476 we need to push change records somewhere... Or not
4477 unwind vn_nary_op_s and linking them but instead
4478 unwind the results "list", linking that, which also
4479 doesn't move on hashtable resize. */
4480 /* We can also have a ->unwind_to recording *slot there.
4481 That way we can make u.values a fixed size array with
4482 recording the number of entries but of course we then
4483 have always N copies for each unwind_to-state. Or we
4484 make sure to only ever append and each unwinding will
4485 pop off one entry (but how to deal with predicated
4486 replaced with non-predicated here?) */
4487 vno
->next
= last_inserted_nary
;
4488 last_inserted_nary
= vno
;
4491 else if (vno
->predicated_values
4492 && ! (*slot
)->predicated_values
)
4494 else if (vno
->predicated_values
4495 && (*slot
)->predicated_values
)
4497 /* ??? Factor this all into a insert_single_predicated_value
4499 gcc_assert (!vno
->u
.values
->next
&& vno
->u
.values
->n
== 1);
4501 = BASIC_BLOCK_FOR_FN (cfun
, vno
->u
.values
->valid_dominated_by_p
[0]);
4502 vn_pval
*nval
= vno
->u
.values
;
4503 vn_pval
**next
= &vno
->u
.values
;
4505 for (vn_pval
*val
= (*slot
)->u
.values
; val
; val
= val
->next
)
4507 if (expressions_equal_p (val
->result
, nval
->result
))
4510 for (unsigned i
= 0; i
< val
->n
; ++i
)
4513 = BASIC_BLOCK_FOR_FN (cfun
,
4514 val
->valid_dominated_by_p
[i
]);
4515 if (dominated_by_p (CDI_DOMINATORS
, vno_bb
, val_bb
))
4516 /* Value registered with more generic predicate. */
4518 else if (flag_checking
)
4519 /* Shouldn't happen, we insert in RPO order. */
4520 gcc_assert (!dominated_by_p (CDI_DOMINATORS
,
4524 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4526 + val
->n
* sizeof (int));
4527 (*next
)->next
= NULL
;
4528 (*next
)->result
= val
->result
;
4529 (*next
)->n
= val
->n
+ 1;
4530 memcpy ((*next
)->valid_dominated_by_p
,
4531 val
->valid_dominated_by_p
,
4532 val
->n
* sizeof (int));
4533 (*next
)->valid_dominated_by_p
[val
->n
] = vno_bb
->index
;
4534 next
= &(*next
)->next
;
4535 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
4536 fprintf (dump_file
, "Appending predicate to value.\n");
4539 /* Copy other predicated values. */
4540 *next
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4542 + (val
->n
-1) * sizeof (int));
4543 memcpy (*next
, val
, sizeof (vn_pval
) + (val
->n
-1) * sizeof (int));
4544 (*next
)->next
= NULL
;
4545 next
= &(*next
)->next
;
4551 vno
->next
= last_inserted_nary
;
4552 last_inserted_nary
= vno
;
4556 /* While we do not want to insert things twice it's awkward to
4557 avoid it in the case where visit_nary_op pattern-matches stuff
4558 and ends up simplifying the replacement to itself. We then
4559 get two inserts, one from visit_nary_op and one from
4560 vn_nary_build_or_lookup.
4561 So allow inserts with the same value number. */
4562 if ((*slot
)->u
.result
== vno
->u
.result
)
4566 /* ??? There's also optimistic vs. previous commited state merging
4567 that is problematic for the case of unwinding. */
4569 /* ??? We should return NULL if we do not use 'vno' and have the
4570 caller release it. */
4571 gcc_assert (!*slot
);
4574 vno
->next
= last_inserted_nary
;
4575 last_inserted_nary
= vno
;
4579 /* Insert a n-ary operation into the current hash table using it's
4580 pieces. Return the vn_nary_op_t structure we created and put in
4584 vn_nary_op_insert_pieces (unsigned int length
, enum tree_code code
,
4585 tree type
, tree
*ops
,
4586 tree result
, unsigned int value_id
)
4588 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, result
, value_id
);
4589 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4590 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4593 /* Return whether we can track a predicate valid when PRED_E is executed. */
4596 can_track_predicate_on_edge (edge pred_e
)
4598 /* ??? As we are currently recording the destination basic-block index in
4599 vn_pval.valid_dominated_by_p and using dominance for the
4600 validity check we cannot track predicates on all edges. */
4601 if (single_pred_p (pred_e
->dest
))
4603 /* Never record for backedges. */
4604 if (pred_e
->flags
& EDGE_DFS_BACK
)
4606 /* When there's more than one predecessor we cannot track
4607 predicate validity based on the destination block. The
4608 exception is when all other incoming edges sources are
4609 dominated by the destination block. */
4612 FOR_EACH_EDGE (e
, ei
, pred_e
->dest
->preds
)
4613 if (e
!= pred_e
&& ! dominated_by_p (CDI_DOMINATORS
, e
->src
, e
->dest
))
4619 vn_nary_op_insert_pieces_predicated (unsigned int length
, enum tree_code code
,
4620 tree type
, tree
*ops
,
4621 tree result
, unsigned int value_id
,
4624 gcc_assert (can_track_predicate_on_edge (pred_e
));
4626 if (dump_file
&& (dump_flags
& TDF_DETAILS
)
4627 /* ??? Fix dumping, but currently we only get comparisons. */
4628 && TREE_CODE_CLASS (code
) == tcc_comparison
)
4630 fprintf (dump_file
, "Recording on edge %d->%d ", pred_e
->src
->index
,
4631 pred_e
->dest
->index
);
4632 print_generic_expr (dump_file
, ops
[0], TDF_SLIM
);
4633 fprintf (dump_file
, " %s ", get_tree_code_name (code
));
4634 print_generic_expr (dump_file
, ops
[1], TDF_SLIM
);
4635 fprintf (dump_file
, " == %s\n",
4636 integer_zerop (result
) ? "false" : "true");
4638 vn_nary_op_t vno1
= alloc_vn_nary_op (length
, NULL_TREE
, value_id
);
4639 init_vn_nary_op_from_pieces (vno1
, length
, code
, type
, ops
);
4640 vno1
->predicated_values
= 1;
4641 vno1
->u
.values
= (vn_pval
*) obstack_alloc (&vn_tables_obstack
,
4643 vno1
->u
.values
->next
= NULL
;
4644 vno1
->u
.values
->result
= result
;
4645 vno1
->u
.values
->n
= 1;
4646 vno1
->u
.values
->valid_dominated_by_p
[0] = pred_e
->dest
->index
;
4647 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4651 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool);
4654 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, basic_block bb
,
4657 if (! vno
->predicated_values
)
4658 return vno
->u
.result
;
4659 for (vn_pval
*val
= vno
->u
.values
; val
; val
= val
->next
)
4660 for (unsigned i
= 0; i
< val
->n
; ++i
)
4663 = BASIC_BLOCK_FOR_FN (cfun
, val
->valid_dominated_by_p
[i
]);
4664 /* Do not handle backedge executability optimistically since
4665 when figuring out whether to iterate we do not consider
4666 changed predication.
4667 When asking for predicated values on an edge avoid looking
4668 at edge executability for edges forward in our iteration
4670 if (e
&& (e
->flags
& EDGE_DFS_BACK
))
4672 if (dominated_by_p (CDI_DOMINATORS
, bb
, cand
))
4675 else if (dominated_by_p_w_unex (bb
, cand
, false))
4682 vn_nary_op_get_predicated_value (vn_nary_op_t vno
, edge e
)
4684 return vn_nary_op_get_predicated_value (vno
, e
->src
, e
);
4687 /* Insert the rhs of STMT into the current hash table with a value number of
4691 vn_nary_op_insert_stmt (gimple
*stmt
, tree result
)
4694 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt
),
4695 result
, VN_INFO (result
)->value_id
);
4696 init_vn_nary_op_from_stmt (vno1
, as_a
<gassign
*> (stmt
));
4697 return vn_nary_op_insert_into (vno1
, valid_info
->nary
);
4700 /* Compute a hashcode for PHI operation VP1 and return it. */
4702 static inline hashval_t
4703 vn_phi_compute_hash (vn_phi_t vp1
)
4705 inchash::hash hstate
;
4711 hstate
.add_int (EDGE_COUNT (vp1
->block
->preds
));
4712 switch (EDGE_COUNT (vp1
->block
->preds
))
4717 /* When this is a PHI node subject to CSE for different blocks
4718 avoid hashing the block index. */
4723 hstate
.add_int (vp1
->block
->index
);
4726 /* If all PHI arguments are constants we need to distinguish
4727 the PHI node via its type. */
4729 hstate
.merge_hash (vn_hash_type (type
));
4731 FOR_EACH_EDGE (e
, ei
, vp1
->block
->preds
)
4733 /* Don't hash backedge values they need to be handled as VN_TOP
4734 for optimistic value-numbering. */
4735 if (e
->flags
& EDGE_DFS_BACK
)
4738 phi1op
= vp1
->phiargs
[e
->dest_idx
];
4739 if (phi1op
== VN_TOP
)
4741 inchash::add_expr (phi1op
, hstate
);
4744 return hstate
.end ();
4748 /* Return true if COND1 and COND2 represent the same condition, set
4749 *INVERTED_P if one needs to be inverted to make it the same as
4753 cond_stmts_equal_p (gcond
*cond1
, tree lhs1
, tree rhs1
,
4754 gcond
*cond2
, tree lhs2
, tree rhs2
, bool *inverted_p
)
4756 enum tree_code code1
= gimple_cond_code (cond1
);
4757 enum tree_code code2
= gimple_cond_code (cond2
);
4759 *inverted_p
= false;
4762 else if (code1
== swap_tree_comparison (code2
))
4763 std::swap (lhs2
, rhs2
);
4764 else if (code1
== invert_tree_comparison (code2
, HONOR_NANS (lhs2
)))
4766 else if (code1
== invert_tree_comparison
4767 (swap_tree_comparison (code2
), HONOR_NANS (lhs2
)))
4769 std::swap (lhs2
, rhs2
);
4775 return ((expressions_equal_p (lhs1
, lhs2
)
4776 && expressions_equal_p (rhs1
, rhs2
))
4777 || (commutative_tree_code (code1
)
4778 && expressions_equal_p (lhs1
, rhs2
)
4779 && expressions_equal_p (rhs1
, lhs2
)));
4782 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4785 vn_phi_eq (const_vn_phi_t
const vp1
, const_vn_phi_t
const vp2
)
4787 if (vp1
->hashcode
!= vp2
->hashcode
)
4790 if (vp1
->block
!= vp2
->block
)
4792 if (EDGE_COUNT (vp1
->block
->preds
) != EDGE_COUNT (vp2
->block
->preds
))
4795 switch (EDGE_COUNT (vp1
->block
->preds
))
4798 /* Single-arg PHIs are just copies. */
4803 /* Make sure both PHIs are classified as CSEable. */
4804 if (! vp1
->cclhs
|| ! vp2
->cclhs
)
4807 /* Rule out backedges into the PHI. */
4809 (vp1
->block
->loop_father
->header
!= vp1
->block
4810 && vp2
->block
->loop_father
->header
!= vp2
->block
);
4812 /* If the PHI nodes do not have compatible types
4813 they are not the same. */
4814 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4817 /* If the immediate dominator end in switch stmts multiple
4818 values may end up in the same PHI arg via intermediate
4821 = get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4823 = get_immediate_dominator (CDI_DOMINATORS
, vp2
->block
);
4824 gcc_checking_assert (EDGE_COUNT (idom1
->succs
) == 2
4825 && EDGE_COUNT (idom2
->succs
) == 2);
4827 /* Verify the controlling stmt is the same. */
4828 gcond
*last1
= as_a
<gcond
*> (*gsi_last_bb (idom1
));
4829 gcond
*last2
= as_a
<gcond
*> (*gsi_last_bb (idom2
));
4831 if (! cond_stmts_equal_p (last1
, vp1
->cclhs
, vp1
->ccrhs
,
4832 last2
, vp2
->cclhs
, vp2
->ccrhs
,
4836 /* Get at true/false controlled edges into the PHI. */
4837 edge te1
, te2
, fe1
, fe2
;
4838 if (! extract_true_false_controlled_edges (idom1
, vp1
->block
,
4840 || ! extract_true_false_controlled_edges (idom2
, vp2
->block
,
4844 /* Swap edges if the second condition is the inverted of the
4847 std::swap (te2
, fe2
);
4849 /* Since we do not know which edge will be executed we have
4850 to be careful when matching VN_TOP. Be conservative and
4851 only match VN_TOP == VN_TOP for now, we could allow
4852 VN_TOP on the not prevailing PHI though. See for example
4854 if (! expressions_equal_p (vp1
->phiargs
[te1
->dest_idx
],
4855 vp2
->phiargs
[te2
->dest_idx
], false)
4856 || ! expressions_equal_p (vp1
->phiargs
[fe1
->dest_idx
],
4857 vp2
->phiargs
[fe2
->dest_idx
], false))
4868 /* If the PHI nodes do not have compatible types
4869 they are not the same. */
4870 if (!types_compatible_p (vp1
->type
, vp2
->type
))
4873 /* Any phi in the same block will have it's arguments in the
4874 same edge order, because of how we store phi nodes. */
4875 unsigned nargs
= EDGE_COUNT (vp1
->block
->preds
);
4876 for (unsigned i
= 0; i
< nargs
; ++i
)
4878 tree phi1op
= vp1
->phiargs
[i
];
4879 tree phi2op
= vp2
->phiargs
[i
];
4880 if (phi1op
== phi2op
)
4882 if (!expressions_equal_p (phi1op
, phi2op
, false))
4889 /* Lookup PHI in the current hash table, and return the resulting
4890 value number if it exists in the hash table. Return NULL_TREE if
4891 it does not exist in the hash table. */
4894 vn_phi_lookup (gimple
*phi
, bool backedges_varying_p
)
4897 struct vn_phi_s
*vp1
;
4901 vp1
= XALLOCAVAR (struct vn_phi_s
,
4902 sizeof (struct vn_phi_s
)
4903 + (gimple_phi_num_args (phi
) - 1) * sizeof (tree
));
4905 /* Canonicalize the SSA_NAME's to their value number. */
4906 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4908 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4909 if (TREE_CODE (def
) == SSA_NAME
4910 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4912 if (!virtual_operand_p (def
)
4913 && ssa_undefined_value_p (def
, false))
4916 def
= SSA_VAL (def
);
4918 vp1
->phiargs
[e
->dest_idx
] = def
;
4920 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4921 vp1
->block
= gimple_bb (phi
);
4922 /* Extract values of the controlling condition. */
4923 vp1
->cclhs
= NULL_TREE
;
4924 vp1
->ccrhs
= NULL_TREE
;
4925 if (EDGE_COUNT (vp1
->block
->preds
) == 2
4926 && vp1
->block
->loop_father
->header
!= vp1
->block
)
4928 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4929 if (EDGE_COUNT (idom1
->succs
) == 2)
4930 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (idom1
)))
4932 /* ??? We want to use SSA_VAL here. But possibly not
4934 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4935 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4938 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4939 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, NO_INSERT
);
4942 return (*slot
)->result
;
4945 /* Insert PHI into the current hash table with a value number of
4949 vn_phi_insert (gimple
*phi
, tree result
, bool backedges_varying_p
)
4952 vn_phi_t vp1
= (vn_phi_t
) obstack_alloc (&vn_tables_obstack
,
4954 + ((gimple_phi_num_args (phi
) - 1)
4959 /* Canonicalize the SSA_NAME's to their value number. */
4960 FOR_EACH_EDGE (e
, ei
, gimple_bb (phi
)->preds
)
4962 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
4963 if (TREE_CODE (def
) == SSA_NAME
4964 && (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
)))
4966 if (!virtual_operand_p (def
)
4967 && ssa_undefined_value_p (def
, false))
4970 def
= SSA_VAL (def
);
4972 vp1
->phiargs
[e
->dest_idx
] = def
;
4974 vp1
->value_id
= VN_INFO (result
)->value_id
;
4975 vp1
->type
= TREE_TYPE (gimple_phi_result (phi
));
4976 vp1
->block
= gimple_bb (phi
);
4977 /* Extract values of the controlling condition. */
4978 vp1
->cclhs
= NULL_TREE
;
4979 vp1
->ccrhs
= NULL_TREE
;
4980 if (EDGE_COUNT (vp1
->block
->preds
) == 2
4981 && vp1
->block
->loop_father
->header
!= vp1
->block
)
4983 basic_block idom1
= get_immediate_dominator (CDI_DOMINATORS
, vp1
->block
);
4984 if (EDGE_COUNT (idom1
->succs
) == 2)
4985 if (gcond
*last1
= safe_dyn_cast
<gcond
*> (*gsi_last_bb (idom1
)))
4987 /* ??? We want to use SSA_VAL here. But possibly not
4989 vp1
->cclhs
= vn_valueize (gimple_cond_lhs (last1
));
4990 vp1
->ccrhs
= vn_valueize (gimple_cond_rhs (last1
));
4993 vp1
->result
= result
;
4994 vp1
->hashcode
= vn_phi_compute_hash (vp1
);
4996 slot
= valid_info
->phis
->find_slot_with_hash (vp1
, vp1
->hashcode
, INSERT
);
4997 gcc_assert (!*slot
);
5000 vp1
->next
= last_inserted_phi
;
5001 last_inserted_phi
= vp1
;
5006 /* Return true if BB1 is dominated by BB2 taking into account edges
5007 that are not executable. When ALLOW_BACK is false consider not
5008 executable backedges as executable. */
5011 dominated_by_p_w_unex (basic_block bb1
, basic_block bb2
, bool allow_back
)
5016 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
5019 /* Before iterating we'd like to know if there exists a
5020 (executable) path from bb2 to bb1 at all, if not we can
5021 directly return false. For now simply iterate once. */
5023 /* Iterate to the single executable bb1 predecessor. */
5024 if (EDGE_COUNT (bb1
->preds
) > 1)
5027 FOR_EACH_EDGE (e
, ei
, bb1
->preds
)
5028 if ((e
->flags
& EDGE_EXECUTABLE
)
5029 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
5042 /* Re-do the dominance check with changed bb1. */
5043 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
5048 /* Iterate to the single executable bb2 successor. */
5049 if (EDGE_COUNT (bb2
->succs
) > 1)
5052 FOR_EACH_EDGE (e
, ei
, bb2
->succs
)
5053 if ((e
->flags
& EDGE_EXECUTABLE
)
5054 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
)))
5065 /* Verify the reached block is only reached through succe.
5066 If there is only one edge we can spare us the dominator
5067 check and iterate directly. */
5068 if (EDGE_COUNT (succe
->dest
->preds
) > 1)
5070 FOR_EACH_EDGE (e
, ei
, succe
->dest
->preds
)
5072 && ((e
->flags
& EDGE_EXECUTABLE
)
5073 || (!allow_back
&& (e
->flags
& EDGE_DFS_BACK
))))
5083 /* Re-do the dominance check with changed bb2. */
5084 if (dominated_by_p (CDI_DOMINATORS
, bb1
, bb2
))
5090 /* We could now iterate updating bb1 / bb2. */
5094 /* Set the value number of FROM to TO, return true if it has changed
5098 set_ssa_val_to (tree from
, tree to
)
5100 vn_ssa_aux_t from_info
= VN_INFO (from
);
5101 tree currval
= from_info
->valnum
; // SSA_VAL (from)
5102 poly_int64 toff
, coff
;
5103 bool curr_undefined
= false;
5104 bool curr_invariant
= false;
5106 /* The only thing we allow as value numbers are ssa_names
5107 and invariants. So assert that here. We don't allow VN_TOP
5108 as visiting a stmt should produce a value-number other than
5110 ??? Still VN_TOP can happen for unreachable code, so force
5111 it to varying in that case. Not all code is prepared to
5112 get VN_TOP on valueization. */
5115 /* ??? When iterating and visiting PHI <undef, backedge-value>
5116 for the first time we rightfully get VN_TOP and we need to
5117 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
5118 With SCCVN we were simply lucky we iterated the other PHI
5119 cycles first and thus visited the backedge-value DEF. */
5120 if (currval
== VN_TOP
)
5122 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5123 fprintf (dump_file
, "Forcing value number to varying on "
5124 "receiving VN_TOP\n");
5128 gcc_checking_assert (to
!= NULL_TREE
5129 && ((TREE_CODE (to
) == SSA_NAME
5130 && (to
== from
|| SSA_VAL (to
) == to
))
5131 || is_gimple_min_invariant (to
)));
5135 if (currval
== from
)
5137 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5139 fprintf (dump_file
, "Not changing value number of ");
5140 print_generic_expr (dump_file
, from
);
5141 fprintf (dump_file
, " from VARYING to ");
5142 print_generic_expr (dump_file
, to
);
5143 fprintf (dump_file
, "\n");
5147 curr_invariant
= is_gimple_min_invariant (currval
);
5148 curr_undefined
= (TREE_CODE (currval
) == SSA_NAME
5149 && !virtual_operand_p (currval
)
5150 && ssa_undefined_value_p (currval
, false));
5151 if (currval
!= VN_TOP
5154 && is_gimple_min_invariant (to
))
5156 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5158 fprintf (dump_file
, "Forcing VARYING instead of changing "
5159 "value number of ");
5160 print_generic_expr (dump_file
, from
);
5161 fprintf (dump_file
, " from ");
5162 print_generic_expr (dump_file
, currval
);
5163 fprintf (dump_file
, " (non-constant) to ");
5164 print_generic_expr (dump_file
, to
);
5165 fprintf (dump_file
, " (constant)\n");
5169 else if (currval
!= VN_TOP
5171 && TREE_CODE (to
) == SSA_NAME
5172 && !virtual_operand_p (to
)
5173 && ssa_undefined_value_p (to
, false))
5175 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5177 fprintf (dump_file
, "Forcing VARYING instead of changing "
5178 "value number of ");
5179 print_generic_expr (dump_file
, from
);
5180 fprintf (dump_file
, " from ");
5181 print_generic_expr (dump_file
, currval
);
5182 fprintf (dump_file
, " (non-undefined) to ");
5183 print_generic_expr (dump_file
, to
);
5184 fprintf (dump_file
, " (undefined)\n");
5188 else if (TREE_CODE (to
) == SSA_NAME
5189 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to
))
5194 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5196 fprintf (dump_file
, "Setting value number of ");
5197 print_generic_expr (dump_file
, from
);
5198 fprintf (dump_file
, " to ");
5199 print_generic_expr (dump_file
, to
);
5203 && !operand_equal_p (currval
, to
, 0)
5204 /* Different undefined SSA names are not actually different. See
5205 PR82320 for a testcase were we'd otherwise not terminate iteration. */
5207 && TREE_CODE (to
) == SSA_NAME
5208 && !virtual_operand_p (to
)
5209 && ssa_undefined_value_p (to
, false))
5210 /* ??? For addresses involving volatile objects or types operand_equal_p
5211 does not reliably detect ADDR_EXPRs as equal. We know we are only
5212 getting invariant gimple addresses here, so can use
5213 get_addr_base_and_unit_offset to do this comparison. */
5214 && !(TREE_CODE (currval
) == ADDR_EXPR
5215 && TREE_CODE (to
) == ADDR_EXPR
5216 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval
, 0), &coff
)
5217 == get_addr_base_and_unit_offset (TREE_OPERAND (to
, 0), &toff
))
5218 && known_eq (coff
, toff
)))
5221 && currval
!= VN_TOP
5223 /* We do not want to allow lattice transitions from one value
5224 to another since that may lead to not terminating iteration
5225 (see PR95049). Since there's no convenient way to check
5226 for the allowed transition of VAL -> PHI (loop entry value,
5227 same on two PHIs, to same PHI result) we restrict the check
5230 && is_gimple_min_invariant (to
))
5232 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5233 fprintf (dump_file
, " forced VARYING");
5236 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5237 fprintf (dump_file
, " (changed)\n");
5238 from_info
->valnum
= to
;
5241 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5242 fprintf (dump_file
, "\n");
5246 /* Set all definitions in STMT to value number to themselves.
5247 Return true if a value number changed. */
5250 defs_to_varying (gimple
*stmt
)
5252 bool changed
= false;
5256 FOR_EACH_SSA_DEF_OPERAND (defp
, stmt
, iter
, SSA_OP_ALL_DEFS
)
5258 tree def
= DEF_FROM_PTR (defp
);
5259 changed
|= set_ssa_val_to (def
, def
);
5264 /* Visit a copy between LHS and RHS, return true if the value number
5268 visit_copy (tree lhs
, tree rhs
)
5271 rhs
= SSA_VAL (rhs
);
5273 return set_ssa_val_to (lhs
, rhs
);
5276 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
5280 valueized_wider_op (tree wide_type
, tree op
, bool allow_truncate
)
5282 if (TREE_CODE (op
) == SSA_NAME
)
5283 op
= vn_valueize (op
);
5285 /* Either we have the op widened available. */
5288 tree tem
= vn_nary_op_lookup_pieces (1, NOP_EXPR
,
5289 wide_type
, ops
, NULL
);
5293 /* Or the op is truncated from some existing value. */
5294 if (allow_truncate
&& TREE_CODE (op
) == SSA_NAME
)
5296 gimple
*def
= SSA_NAME_DEF_STMT (op
);
5297 if (is_gimple_assign (def
)
5298 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def
)))
5300 tem
= gimple_assign_rhs1 (def
);
5301 if (useless_type_conversion_p (wide_type
, TREE_TYPE (tem
)))
5303 if (TREE_CODE (tem
) == SSA_NAME
)
5304 tem
= vn_valueize (tem
);
5310 /* For constants simply extend it. */
5311 if (TREE_CODE (op
) == INTEGER_CST
)
5312 return wide_int_to_tree (wide_type
, wi::to_widest (op
));
5317 /* Visit a nary operator RHS, value number it, and return true if the
5318 value number of LHS has changed as a result. */
5321 visit_nary_op (tree lhs
, gassign
*stmt
)
5323 vn_nary_op_t vnresult
;
5324 tree result
= vn_nary_op_lookup_stmt (stmt
, &vnresult
);
5325 if (! result
&& vnresult
)
5326 result
= vn_nary_op_get_predicated_value (vnresult
, gimple_bb (stmt
));
5328 return set_ssa_val_to (lhs
, result
);
5330 /* Do some special pattern matching for redundancies of operations
5331 in different types. */
5332 enum tree_code code
= gimple_assign_rhs_code (stmt
);
5333 tree type
= TREE_TYPE (lhs
);
5334 tree rhs1
= gimple_assign_rhs1 (stmt
);
5338 /* Match arithmetic done in a different type where we can easily
5339 substitute the result from some earlier sign-changed or widened
5341 if (INTEGRAL_TYPE_P (type
)
5342 && TREE_CODE (rhs1
) == SSA_NAME
5343 /* We only handle sign-changes, zero-extension -> & mask or
5344 sign-extension if we know the inner operation doesn't
5346 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1
))
5347 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5348 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
5349 && TYPE_PRECISION (type
) > TYPE_PRECISION (TREE_TYPE (rhs1
)))
5350 || TYPE_PRECISION (type
) == TYPE_PRECISION (TREE_TYPE (rhs1
))))
5352 gassign
*def
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
5354 && (gimple_assign_rhs_code (def
) == PLUS_EXPR
5355 || gimple_assign_rhs_code (def
) == MINUS_EXPR
5356 || gimple_assign_rhs_code (def
) == MULT_EXPR
))
5359 /* When requiring a sign-extension we cannot model a
5360 previous truncation with a single op so don't bother. */
5361 bool allow_truncate
= TYPE_UNSIGNED (TREE_TYPE (rhs1
));
5362 /* Either we have the op widened available. */
5363 ops
[0] = valueized_wider_op (type
, gimple_assign_rhs1 (def
),
5366 ops
[1] = valueized_wider_op (type
, gimple_assign_rhs2 (def
),
5368 if (ops
[0] && ops
[1])
5370 ops
[0] = vn_nary_op_lookup_pieces
5371 (2, gimple_assign_rhs_code (def
), type
, ops
, NULL
);
5372 /* We have wider operation available. */
5374 /* If the leader is a wrapping operation we can
5375 insert it for code hoisting w/o introducing
5376 undefined overflow. If it is not it has to
5377 be available. See PR86554. */
5378 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops
[0]))
5379 || (rpo_avail
&& vn_context_bb
5380 && rpo_avail
->eliminate_avail (vn_context_bb
,
5383 unsigned lhs_prec
= TYPE_PRECISION (type
);
5384 unsigned rhs_prec
= TYPE_PRECISION (TREE_TYPE (rhs1
));
5385 if (lhs_prec
== rhs_prec
5386 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1
))
5387 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1
))))
5389 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5390 NOP_EXPR
, type
, ops
[0]);
5391 result
= vn_nary_build_or_lookup (&match_op
);
5394 bool changed
= set_ssa_val_to (lhs
, result
);
5395 vn_nary_op_insert_stmt (stmt
, result
);
5401 tree mask
= wide_int_to_tree
5402 (type
, wi::mask (rhs_prec
, false, lhs_prec
));
5403 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5407 result
= vn_nary_build_or_lookup (&match_op
);
5410 bool changed
= set_ssa_val_to (lhs
, result
);
5411 vn_nary_op_insert_stmt (stmt
, result
);
5421 if (INTEGRAL_TYPE_P (type
)
5422 && TREE_CODE (rhs1
) == SSA_NAME
5423 && TREE_CODE (gimple_assign_rhs2 (stmt
)) == INTEGER_CST
5424 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)
5425 && default_vn_walk_kind
!= VN_NOWALK
5427 && BITS_PER_UNIT
== 8
5428 && BYTES_BIG_ENDIAN
== WORDS_BIG_ENDIAN
5429 && TYPE_PRECISION (type
) <= vn_walk_cb_data::bufsize
* BITS_PER_UNIT
5430 && !integer_all_onesp (gimple_assign_rhs2 (stmt
))
5431 && !integer_zerop (gimple_assign_rhs2 (stmt
)))
5433 gassign
*ass
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (rhs1
));
5435 && !gimple_has_volatile_ops (ass
)
5436 && vn_get_stmt_kind (ass
) == VN_REFERENCE
)
5438 tree last_vuse
= gimple_vuse (ass
);
5439 tree op
= gimple_assign_rhs1 (ass
);
5440 tree result
= vn_reference_lookup (op
, gimple_vuse (ass
),
5441 default_vn_walk_kind
,
5442 NULL
, true, &last_vuse
,
5443 gimple_assign_rhs2 (stmt
));
5445 && useless_type_conversion_p (TREE_TYPE (result
),
5447 return set_ssa_val_to (lhs
, result
);
5451 case TRUNC_DIV_EXPR
:
5452 if (TYPE_UNSIGNED (type
))
5457 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5458 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type
))
5462 rhs
[1] = gimple_assign_rhs2 (stmt
);
5463 for (unsigned i
= 0; i
<= 1; ++i
)
5465 unsigned j
= i
== 0 ? 1 : 0;
5467 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5468 NEGATE_EXPR
, type
, rhs
[i
]);
5469 ops
[i
] = vn_nary_build_or_lookup_1 (&match_op
, false, true);
5472 && (ops
[0] = vn_nary_op_lookup_pieces (2, code
,
5475 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5476 NEGATE_EXPR
, type
, ops
[0]);
5477 result
= vn_nary_build_or_lookup_1 (&match_op
, true, false);
5480 bool changed
= set_ssa_val_to (lhs
, result
);
5481 vn_nary_op_insert_stmt (stmt
, result
);
5489 /* For X << C, use the value number of X * (1 << C). */
5490 if (INTEGRAL_TYPE_P (type
)
5491 && TYPE_OVERFLOW_WRAPS (type
)
5492 && !TYPE_SATURATING (type
))
5494 tree rhs2
= gimple_assign_rhs2 (stmt
);
5495 if (TREE_CODE (rhs2
) == INTEGER_CST
5496 && tree_fits_uhwi_p (rhs2
)
5497 && tree_to_uhwi (rhs2
) < TYPE_PRECISION (type
))
5499 wide_int w
= wi::set_bit_in_zero (tree_to_uhwi (rhs2
),
5500 TYPE_PRECISION (type
));
5501 gimple_match_op
match_op (gimple_match_cond::UNCOND
,
5502 MULT_EXPR
, type
, rhs1
,
5503 wide_int_to_tree (type
, w
));
5504 result
= vn_nary_build_or_lookup (&match_op
);
5507 bool changed
= set_ssa_val_to (lhs
, result
);
5508 if (TREE_CODE (result
) == SSA_NAME
)
5509 vn_nary_op_insert_stmt (stmt
, result
);
5519 bool changed
= set_ssa_val_to (lhs
, lhs
);
5520 vn_nary_op_insert_stmt (stmt
, lhs
);
5524 /* Visit a call STMT storing into LHS. Return true if the value number
5525 of the LHS has changed as a result. */
5528 visit_reference_op_call (tree lhs
, gcall
*stmt
)
5530 bool changed
= false;
5531 struct vn_reference_s vr1
;
5532 vn_reference_t vnresult
= NULL
;
5533 tree vdef
= gimple_vdef (stmt
);
5534 modref_summary
*summary
;
5536 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5537 if (lhs
&& TREE_CODE (lhs
) != SSA_NAME
)
5540 vn_reference_lookup_call (stmt
, &vnresult
, &vr1
);
5542 /* If the lookup did not succeed for pure functions try to use
5543 modref info to find a candidate to CSE to. */
5544 const unsigned accesses_limit
= 8;
5548 && gimple_vuse (stmt
)
5549 && (((summary
= get_modref_function_summary (stmt
, NULL
))
5550 && !summary
->global_memory_read
5551 && summary
->load_accesses
< accesses_limit
)
5552 || gimple_call_flags (stmt
) & ECF_CONST
))
5554 /* First search if we can do someting useful and build a
5555 vector of all loads we have to check. */
5556 bool unknown_memory_access
= false;
5557 auto_vec
<ao_ref
, accesses_limit
> accesses
;
5558 unsigned load_accesses
= summary
? summary
->load_accesses
: 0;
5559 if (!unknown_memory_access
)
5560 /* Add loads done as part of setting up the call arguments.
5561 That's also necessary for CONST functions which will
5562 not have a modref summary. */
5563 for (unsigned i
= 0; i
< gimple_call_num_args (stmt
); ++i
)
5565 tree arg
= gimple_call_arg (stmt
, i
);
5566 if (TREE_CODE (arg
) != SSA_NAME
5567 && !is_gimple_min_invariant (arg
))
5569 if (accesses
.length () >= accesses_limit
- load_accesses
)
5571 unknown_memory_access
= true;
5574 accesses
.quick_grow (accesses
.length () + 1);
5575 ao_ref_init (&accesses
.last (), arg
);
5578 if (summary
&& !unknown_memory_access
)
5580 /* Add loads as analyzed by IPA modref. */
5581 for (auto base_node
: summary
->loads
->bases
)
5582 if (unknown_memory_access
)
5584 else for (auto ref_node
: base_node
->refs
)
5585 if (unknown_memory_access
)
5587 else for (auto access_node
: ref_node
->accesses
)
5589 accesses
.quick_grow (accesses
.length () + 1);
5590 ao_ref
*r
= &accesses
.last ();
5591 if (!access_node
.get_ao_ref (stmt
, r
))
5593 /* Initialize a ref based on the argument and
5594 unknown offset if possible. */
5595 tree arg
= access_node
.get_call_arg (stmt
);
5596 if (arg
&& TREE_CODE (arg
) == SSA_NAME
)
5597 arg
= SSA_VAL (arg
);
5599 && TREE_CODE (arg
) == ADDR_EXPR
5600 && (arg
= get_base_address (arg
))
5603 ao_ref_init (r
, arg
);
5609 unknown_memory_access
= true;
5613 r
->base_alias_set
= base_node
->base
;
5614 r
->ref_alias_set
= ref_node
->ref
;
5618 /* Walk the VUSE->VDEF chain optimistically trying to find an entry
5619 for the call in the hashtable. */
5620 unsigned limit
= (unknown_memory_access
5622 : (param_sccvn_max_alias_queries_per_access
5623 / (accesses
.length () + 1)));
5624 tree saved_vuse
= vr1
.vuse
;
5625 hashval_t saved_hashcode
= vr1
.hashcode
;
5626 while (limit
> 0 && !vnresult
&& !SSA_NAME_IS_DEFAULT_DEF (vr1
.vuse
))
5628 vr1
.hashcode
= vr1
.hashcode
- SSA_NAME_VERSION (vr1
.vuse
);
5629 gimple
*def
= SSA_NAME_DEF_STMT (vr1
.vuse
);
5630 /* ??? We could use fancy stuff like in walk_non_aliased_vuses, but
5631 do not bother for now. */
5632 if (is_a
<gphi
*> (def
))
5634 vr1
.vuse
= vuse_ssa_val (gimple_vuse (def
));
5635 vr1
.hashcode
= vr1
.hashcode
+ SSA_NAME_VERSION (vr1
.vuse
);
5636 vn_reference_lookup_1 (&vr1
, &vnresult
);
5640 /* If we found a candidate to CSE to verify it is valid. */
5641 if (vnresult
&& !accesses
.is_empty ())
5643 tree vuse
= vuse_ssa_val (gimple_vuse (stmt
));
5644 while (vnresult
&& vuse
!= vr1
.vuse
)
5646 gimple
*def
= SSA_NAME_DEF_STMT (vuse
);
5647 for (auto &ref
: accesses
)
5649 /* ??? stmt_may_clobber_ref_p_1 does per stmt constant
5650 analysis overhead that we might be able to cache. */
5651 if (stmt_may_clobber_ref_p_1 (def
, &ref
, true))
5657 vuse
= vuse_ssa_val (gimple_vuse (def
));
5660 vr1
.vuse
= saved_vuse
;
5661 vr1
.hashcode
= saved_hashcode
;
5668 if (vnresult
->result_vdef
)
5669 changed
|= set_ssa_val_to (vdef
, vnresult
->result_vdef
);
5670 else if (!lhs
&& gimple_call_lhs (stmt
))
5671 /* If stmt has non-SSA_NAME lhs, value number the vdef to itself,
5672 as the call still acts as a lhs store. */
5673 changed
|= set_ssa_val_to (vdef
, vdef
);
5675 /* If the call was discovered to be pure or const reflect
5676 that as far as possible. */
5677 changed
|= set_ssa_val_to (vdef
,
5678 vuse_ssa_val (gimple_vuse (stmt
)));
5681 if (!vnresult
->result
&& lhs
)
5682 vnresult
->result
= lhs
;
5684 if (vnresult
->result
&& lhs
)
5685 changed
|= set_ssa_val_to (lhs
, vnresult
->result
);
5690 vn_reference_s
**slot
;
5691 tree vdef_val
= vdef
;
5694 /* If we value numbered an indirect functions function to
5695 one not clobbering memory value number its VDEF to its
5697 tree fn
= gimple_call_fn (stmt
);
5698 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
5701 if (TREE_CODE (fn
) == ADDR_EXPR
5702 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
5703 && (flags_from_decl_or_type (TREE_OPERAND (fn
, 0))
5704 & (ECF_CONST
| ECF_PURE
))
5705 /* If stmt has non-SSA_NAME lhs, value number the
5706 vdef to itself, as the call still acts as a lhs
5708 && (lhs
|| gimple_call_lhs (stmt
) == NULL_TREE
))
5709 vdef_val
= vuse_ssa_val (gimple_vuse (stmt
));
5711 changed
|= set_ssa_val_to (vdef
, vdef_val
);
5714 changed
|= set_ssa_val_to (lhs
, lhs
);
5715 vr2
= XOBNEW (&vn_tables_obstack
, vn_reference_s
);
5716 vr2
->vuse
= vr1
.vuse
;
5717 /* As we are not walking the virtual operand chain we know the
5718 shared_lookup_references are still original so we can re-use
5720 vr2
->operands
= vr1
.operands
.copy ();
5721 vr2
->type
= vr1
.type
;
5722 vr2
->punned
= vr1
.punned
;
5724 vr2
->base_set
= vr1
.base_set
;
5725 vr2
->hashcode
= vr1
.hashcode
;
5727 vr2
->result_vdef
= vdef_val
;
5729 slot
= valid_info
->references
->find_slot_with_hash (vr2
, vr2
->hashcode
,
5731 gcc_assert (!*slot
);
5733 vr2
->next
= last_inserted_ref
;
5734 last_inserted_ref
= vr2
;
5740 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5741 and return true if the value number of the LHS has changed as a result. */
5744 visit_reference_op_load (tree lhs
, tree op
, gimple
*stmt
)
5746 bool changed
= false;
5750 tree vuse
= gimple_vuse (stmt
);
5751 tree last_vuse
= vuse
;
5752 result
= vn_reference_lookup (op
, vuse
, default_vn_walk_kind
, &res
, true, &last_vuse
);
5754 /* We handle type-punning through unions by value-numbering based
5755 on offset and size of the access. Be prepared to handle a
5756 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5758 && !useless_type_conversion_p (TREE_TYPE (result
), TREE_TYPE (op
)))
5760 /* Avoid the type punning in case the result mode has padding where
5761 the op we lookup has not. */
5762 if (TYPE_MODE (TREE_TYPE (result
)) != BLKmode
5763 && maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result
))),
5764 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op
)))))
5766 else if (CONSTANT_CLASS_P (result
))
5767 result
= const_unop (VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
5770 /* We will be setting the value number of lhs to the value number
5771 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5772 So first simplify and lookup this expression to see if it
5773 is already available. */
5774 gimple_match_op
res_op (gimple_match_cond::UNCOND
,
5775 VIEW_CONVERT_EXPR
, TREE_TYPE (op
), result
);
5776 result
= vn_nary_build_or_lookup (&res_op
);
5778 && TREE_CODE (result
) == SSA_NAME
5779 && VN_INFO (result
)->needs_insertion
)
5780 /* Track whether this is the canonical expression for different
5781 typed loads. We use that as a stopgap measure for code
5782 hoisting when dealing with floating point loads. */
5786 /* When building the conversion fails avoid inserting the reference
5789 return set_ssa_val_to (lhs
, lhs
);
5793 changed
= set_ssa_val_to (lhs
, result
);
5796 changed
= set_ssa_val_to (lhs
, lhs
);
5797 vn_reference_insert (op
, lhs
, last_vuse
, NULL_TREE
);
5798 if (vuse
&& SSA_VAL (last_vuse
) != SSA_VAL (vuse
))
5800 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5802 fprintf (dump_file
, "Using extra use virtual operand ");
5803 print_generic_expr (dump_file
, last_vuse
);
5804 fprintf (dump_file
, "\n");
5806 vn_reference_insert (op
, lhs
, vuse
, NULL_TREE
);
5814 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5815 and return true if the value number of the LHS has changed as a result. */
5818 visit_reference_op_store (tree lhs
, tree op
, gimple
*stmt
)
5820 bool changed
= false;
5821 vn_reference_t vnresult
= NULL
;
5823 bool resultsame
= false;
5824 tree vuse
= gimple_vuse (stmt
);
5825 tree vdef
= gimple_vdef (stmt
);
5827 if (TREE_CODE (op
) == SSA_NAME
)
5830 /* First we want to lookup using the *vuses* from the store and see
5831 if there the last store to this location with the same address
5834 The vuses represent the memory state before the store. If the
5835 memory state, address, and value of the store is the same as the
5836 last store to this location, then this store will produce the
5837 same memory state as that store.
5839 In this case the vdef versions for this store are value numbered to those
5840 vuse versions, since they represent the same memory state after
5843 Otherwise, the vdefs for the store are used when inserting into
5844 the table, since the store generates a new memory state. */
5846 vn_reference_lookup (lhs
, vuse
, VN_NOWALK
, &vnresult
, false);
5848 && vnresult
->result
)
5850 tree result
= vnresult
->result
;
5851 gcc_checking_assert (TREE_CODE (result
) != SSA_NAME
5852 || result
== SSA_VAL (result
));
5853 resultsame
= expressions_equal_p (result
, op
);
5856 /* If the TBAA state isn't compatible for downstream reads
5857 we cannot value-number the VDEFs the same. */
5859 ao_ref_init (&lhs_ref
, lhs
);
5860 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
5861 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
5862 if ((vnresult
->set
!= set
5863 && ! alias_set_subset_of (set
, vnresult
->set
))
5864 || (vnresult
->base_set
!= base_set
5865 && ! alias_set_subset_of (base_set
, vnresult
->base_set
)))
5872 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5874 fprintf (dump_file
, "No store match\n");
5875 fprintf (dump_file
, "Value numbering store ");
5876 print_generic_expr (dump_file
, lhs
);
5877 fprintf (dump_file
, " to ");
5878 print_generic_expr (dump_file
, op
);
5879 fprintf (dump_file
, "\n");
5881 /* Have to set value numbers before insert, since insert is
5882 going to valueize the references in-place. */
5884 changed
|= set_ssa_val_to (vdef
, vdef
);
5886 /* Do not insert structure copies into the tables. */
5887 if (is_gimple_min_invariant (op
)
5888 || is_gimple_reg (op
))
5889 vn_reference_insert (lhs
, op
, vdef
, NULL
);
5891 /* Only perform the following when being called from PRE
5892 which embeds tail merging. */
5893 if (default_vn_walk_kind
== VN_WALK
)
5895 assign
= build2 (MODIFY_EXPR
, TREE_TYPE (lhs
), lhs
, op
);
5896 vn_reference_lookup (assign
, vuse
, VN_NOWALK
, &vnresult
, false);
5898 vn_reference_insert (assign
, lhs
, vuse
, vdef
);
5903 /* We had a match, so value number the vdef to have the value
5904 number of the vuse it came from. */
5906 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5907 fprintf (dump_file
, "Store matched earlier value, "
5908 "value numbering store vdefs to matching vuses.\n");
5910 changed
|= set_ssa_val_to (vdef
, SSA_VAL (vuse
));
5916 /* Visit and value number PHI, return true if the value number
5917 changed. When BACKEDGES_VARYING_P is true then assume all
5918 backedge values are varying. When INSERTED is not NULL then
5919 this is just a ahead query for a possible iteration, set INSERTED
5920 to true if we'd insert into the hashtable. */
5923 visit_phi (gimple
*phi
, bool *inserted
, bool backedges_varying_p
)
5925 tree result
, sameval
= VN_TOP
, seen_undef
= NULL_TREE
;
5926 bool seen_undef_visited
= false;
5927 tree backedge_val
= NULL_TREE
;
5928 bool seen_non_backedge
= false;
5929 tree sameval_base
= NULL_TREE
;
5930 poly_int64 soff
, doff
;
5931 unsigned n_executable
= 0;
5933 edge e
, sameval_e
= NULL
;
5935 /* TODO: We could check for this in initialization, and replace this
5936 with a gcc_assert. */
5937 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi
)))
5938 return set_ssa_val_to (PHI_RESULT (phi
), PHI_RESULT (phi
));
5940 /* We track whether a PHI was CSEd to to avoid excessive iterations
5941 that would be necessary only because the PHI changed arguments
5944 gimple_set_plf (phi
, GF_PLF_1
, false);
5946 /* See if all non-TOP arguments have the same value. TOP is
5947 equivalent to everything, so we can ignore it. */
5948 basic_block bb
= gimple_bb (phi
);
5949 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
5950 if (e
->flags
& EDGE_EXECUTABLE
)
5952 tree def
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
5954 if (def
== PHI_RESULT (phi
))
5957 bool visited
= true;
5958 if (TREE_CODE (def
) == SSA_NAME
)
5960 tree val
= SSA_VAL (def
, &visited
);
5961 if (SSA_NAME_IS_DEFAULT_DEF (def
))
5963 if (!backedges_varying_p
|| !(e
->flags
& EDGE_DFS_BACK
))
5965 if (e
->flags
& EDGE_DFS_BACK
)
5968 if (!(e
->flags
& EDGE_DFS_BACK
))
5969 seen_non_backedge
= true;
5972 /* Ignore undefined defs for sameval but record one. */
5973 else if (TREE_CODE (def
) == SSA_NAME
5974 && ! virtual_operand_p (def
)
5975 && ssa_undefined_value_p (def
, false))
5978 /* Avoid having not visited undefined defs if we also have
5980 || (!seen_undef_visited
&& visited
))
5983 seen_undef_visited
= visited
;
5986 else if (sameval
== VN_TOP
)
5991 else if (expressions_equal_p (def
, sameval
))
5993 else if (virtual_operand_p (def
))
5995 sameval
= NULL_TREE
;
6000 /* We know we're arriving only with invariant addresses here,
6001 try harder comparing them. We can do some caching here
6002 which we cannot do in expressions_equal_p. */
6003 if (TREE_CODE (def
) == ADDR_EXPR
6004 && TREE_CODE (sameval
) == ADDR_EXPR
6005 && sameval_base
!= (void *)-1)
6008 sameval_base
= get_addr_base_and_unit_offset
6009 (TREE_OPERAND (sameval
, 0), &soff
);
6011 sameval_base
= (tree
)(void *)-1;
6012 else if ((get_addr_base_and_unit_offset
6013 (TREE_OPERAND (def
, 0), &doff
) == sameval_base
)
6014 && known_eq (soff
, doff
))
6017 /* There's also the possibility to use equivalences. */
6018 if (!FLOAT_TYPE_P (TREE_TYPE (def
))
6019 /* But only do this if we didn't force any of sameval or
6020 val to VARYING because of backedge processing rules. */
6021 && (TREE_CODE (sameval
) != SSA_NAME
6022 || SSA_VAL (sameval
) == sameval
)
6023 && (TREE_CODE (def
) != SSA_NAME
|| SSA_VAL (def
) == def
))
6025 vn_nary_op_t vnresult
;
6029 tree val
= vn_nary_op_lookup_pieces (2, EQ_EXPR
,
6032 if (! val
&& vnresult
&& vnresult
->predicated_values
)
6034 val
= vn_nary_op_get_predicated_value (vnresult
, e
);
6035 if (val
&& integer_truep (val
)
6036 && !(sameval_e
&& (sameval_e
->flags
& EDGE_DFS_BACK
)))
6038 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6040 fprintf (dump_file
, "Predication says ");
6041 print_generic_expr (dump_file
, def
, TDF_NONE
);
6042 fprintf (dump_file
, " and ");
6043 print_generic_expr (dump_file
, sameval
, TDF_NONE
);
6044 fprintf (dump_file
, " are equal on edge %d -> %d\n",
6045 e
->src
->index
, e
->dest
->index
);
6049 /* If on all previous edges the value was equal to def
6050 we can change sameval to def. */
6051 if (EDGE_COUNT (bb
->preds
) == 2
6052 && (val
= vn_nary_op_get_predicated_value
6053 (vnresult
, EDGE_PRED (bb
, 0)))
6054 && integer_truep (val
)
6055 && !(e
->flags
& EDGE_DFS_BACK
))
6057 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6059 fprintf (dump_file
, "Predication says ");
6060 print_generic_expr (dump_file
, def
, TDF_NONE
);
6061 fprintf (dump_file
, " and ");
6062 print_generic_expr (dump_file
, sameval
, TDF_NONE
);
6063 fprintf (dump_file
, " are equal on edge %d -> %d\n",
6064 EDGE_PRED (bb
, 0)->src
->index
,
6065 EDGE_PRED (bb
, 0)->dest
->index
);
6072 sameval
= NULL_TREE
;
6077 /* If the value we want to use is flowing over the backedge and we
6078 should take it as VARYING but it has a non-VARYING value drop to
6080 If we value-number a virtual operand never value-number to the
6081 value from the backedge as that confuses the alias-walking code.
6082 See gcc.dg/torture/pr87176.c. If the value is the same on a
6083 non-backedge everything is OK though. */
6086 && !seen_non_backedge
6087 && TREE_CODE (backedge_val
) == SSA_NAME
6088 && sameval
== backedge_val
6089 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val
)
6090 || SSA_VAL (backedge_val
) != backedge_val
))
6091 /* Do not value-number a virtual operand to sth not visited though
6092 given that allows us to escape a region in alias walking. */
6094 && TREE_CODE (sameval
) == SSA_NAME
6095 && !SSA_NAME_IS_DEFAULT_DEF (sameval
)
6096 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval
)
6097 && (SSA_VAL (sameval
, &visited_p
), !visited_p
)))
6098 /* Note this just drops to VARYING without inserting the PHI into
6100 result
= PHI_RESULT (phi
);
6101 /* If none of the edges was executable keep the value-number at VN_TOP,
6102 if only a single edge is exectuable use its value. */
6103 else if (n_executable
<= 1)
6104 result
= seen_undef
? seen_undef
: sameval
;
6105 /* If we saw only undefined values and VN_TOP use one of the
6106 undefined values. */
6107 else if (sameval
== VN_TOP
)
6108 result
= (seen_undef
&& seen_undef_visited
) ? seen_undef
: sameval
;
6109 /* First see if it is equivalent to a phi node in this block. We prefer
6110 this as it allows IV elimination - see PRs 66502 and 67167. */
6111 else if ((result
= vn_phi_lookup (phi
, backedges_varying_p
)))
6114 && TREE_CODE (result
) == SSA_NAME
6115 && gimple_code (SSA_NAME_DEF_STMT (result
)) == GIMPLE_PHI
)
6117 gimple_set_plf (SSA_NAME_DEF_STMT (result
), GF_PLF_1
, true);
6118 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6120 fprintf (dump_file
, "Marking CSEd to PHI node ");
6121 print_gimple_expr (dump_file
, SSA_NAME_DEF_STMT (result
),
6123 fprintf (dump_file
, "\n");
6127 /* If all values are the same use that, unless we've seen undefined
6128 values as well and the value isn't constant.
6129 CCP/copyprop have the same restriction to not remove uninit warnings. */
6131 && (! seen_undef
|| is_gimple_min_invariant (sameval
)))
6135 result
= PHI_RESULT (phi
);
6136 /* Only insert PHIs that are varying, for constant value numbers
6137 we mess up equivalences otherwise as we are only comparing
6138 the immediate controlling predicates. */
6139 vn_phi_insert (phi
, result
, backedges_varying_p
);
6144 return set_ssa_val_to (PHI_RESULT (phi
), result
);
6147 /* Try to simplify RHS using equivalences and constant folding. */
6150 try_to_simplify (gassign
*stmt
)
6152 enum tree_code code
= gimple_assign_rhs_code (stmt
);
6155 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
6156 in this case, there is no point in doing extra work. */
6157 if (code
== SSA_NAME
)
6160 /* First try constant folding based on our current lattice. */
6161 mprts_hook
= vn_lookup_simplify_result
;
6162 tem
= gimple_fold_stmt_to_constant_1 (stmt
, vn_valueize
, vn_valueize
);
6165 && (TREE_CODE (tem
) == SSA_NAME
6166 || is_gimple_min_invariant (tem
)))
6172 /* Visit and value number STMT, return true if the value number
6176 visit_stmt (gimple
*stmt
, bool backedges_varying_p
= false)
6178 bool changed
= false;
6180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6182 fprintf (dump_file
, "Value numbering stmt = ");
6183 print_gimple_stmt (dump_file
, stmt
, 0);
6186 if (gimple_code (stmt
) == GIMPLE_PHI
)
6187 changed
= visit_phi (stmt
, NULL
, backedges_varying_p
);
6188 else if (gimple_has_volatile_ops (stmt
))
6189 changed
= defs_to_varying (stmt
);
6190 else if (gassign
*ass
= dyn_cast
<gassign
*> (stmt
))
6192 enum tree_code code
= gimple_assign_rhs_code (ass
);
6193 tree lhs
= gimple_assign_lhs (ass
);
6194 tree rhs1
= gimple_assign_rhs1 (ass
);
6197 /* Shortcut for copies. Simplifying copies is pointless,
6198 since we copy the expression and value they represent. */
6199 if (code
== SSA_NAME
6200 && TREE_CODE (lhs
) == SSA_NAME
)
6202 changed
= visit_copy (lhs
, rhs1
);
6205 simplified
= try_to_simplify (ass
);
6208 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6210 fprintf (dump_file
, "RHS ");
6211 print_gimple_expr (dump_file
, ass
, 0);
6212 fprintf (dump_file
, " simplified to ");
6213 print_generic_expr (dump_file
, simplified
);
6214 fprintf (dump_file
, "\n");
6217 /* Setting value numbers to constants will occasionally
6218 screw up phi congruence because constants are not
6219 uniquely associated with a single ssa name that can be
6222 && is_gimple_min_invariant (simplified
)
6223 && TREE_CODE (lhs
) == SSA_NAME
)
6225 changed
= set_ssa_val_to (lhs
, simplified
);
6229 && TREE_CODE (simplified
) == SSA_NAME
6230 && TREE_CODE (lhs
) == SSA_NAME
)
6232 changed
= visit_copy (lhs
, simplified
);
6236 if ((TREE_CODE (lhs
) == SSA_NAME
6237 /* We can substitute SSA_NAMEs that are live over
6238 abnormal edges with their constant value. */
6239 && !(gimple_assign_copy_p (ass
)
6240 && is_gimple_min_invariant (rhs1
))
6242 && is_gimple_min_invariant (simplified
))
6243 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
6244 /* Stores or copies from SSA_NAMEs that are live over
6245 abnormal edges are a problem. */
6246 || (code
== SSA_NAME
6247 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6248 changed
= defs_to_varying (ass
);
6249 else if (REFERENCE_CLASS_P (lhs
)
6251 changed
= visit_reference_op_store (lhs
, rhs1
, ass
);
6252 else if (TREE_CODE (lhs
) == SSA_NAME
)
6254 if ((gimple_assign_copy_p (ass
)
6255 && is_gimple_min_invariant (rhs1
))
6257 && is_gimple_min_invariant (simplified
)))
6260 changed
= set_ssa_val_to (lhs
, simplified
);
6262 changed
= set_ssa_val_to (lhs
, rhs1
);
6266 /* Visit the original statement. */
6267 switch (vn_get_stmt_kind (ass
))
6270 changed
= visit_nary_op (lhs
, ass
);
6273 changed
= visit_reference_op_load (lhs
, rhs1
, ass
);
6276 changed
= defs_to_varying (ass
);
6282 changed
= defs_to_varying (ass
);
6284 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
6286 tree lhs
= gimple_call_lhs (call_stmt
);
6287 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
6289 /* Try constant folding based on our current lattice. */
6290 tree simplified
= gimple_fold_stmt_to_constant_1 (call_stmt
,
6294 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6296 fprintf (dump_file
, "call ");
6297 print_gimple_expr (dump_file
, call_stmt
, 0);
6298 fprintf (dump_file
, " simplified to ");
6299 print_generic_expr (dump_file
, simplified
);
6300 fprintf (dump_file
, "\n");
6303 /* Setting value numbers to constants will occasionally
6304 screw up phi congruence because constants are not
6305 uniquely associated with a single ssa name that can be
6308 && is_gimple_min_invariant (simplified
))
6310 changed
= set_ssa_val_to (lhs
, simplified
);
6311 if (gimple_vdef (call_stmt
))
6312 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
6313 SSA_VAL (gimple_vuse (call_stmt
)));
6317 && TREE_CODE (simplified
) == SSA_NAME
)
6319 changed
= visit_copy (lhs
, simplified
);
6320 if (gimple_vdef (call_stmt
))
6321 changed
|= set_ssa_val_to (gimple_vdef (call_stmt
),
6322 SSA_VAL (gimple_vuse (call_stmt
)));
6325 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
6327 changed
= defs_to_varying (call_stmt
);
6332 /* Pick up flags from a devirtualization target. */
6333 tree fn
= gimple_call_fn (stmt
);
6334 int extra_fnflags
= 0;
6335 if (fn
&& TREE_CODE (fn
) == SSA_NAME
)
6338 if (TREE_CODE (fn
) == ADDR_EXPR
6339 && TREE_CODE (TREE_OPERAND (fn
, 0)) == FUNCTION_DECL
)
6340 extra_fnflags
= flags_from_decl_or_type (TREE_OPERAND (fn
, 0));
6342 if ((/* Calls to the same function with the same vuse
6343 and the same operands do not necessarily return the same
6344 value, unless they're pure or const. */
6345 ((gimple_call_flags (call_stmt
) | extra_fnflags
)
6346 & (ECF_PURE
| ECF_CONST
))
6347 /* If calls have a vdef, subsequent calls won't have
6348 the same incoming vuse. So, if 2 calls with vdef have the
6349 same vuse, we know they're not subsequent.
6350 We can value number 2 calls to the same function with the
6351 same vuse and the same operands which are not subsequent
6352 the same, because there is no code in the program that can
6353 compare the 2 values... */
6354 || (gimple_vdef (call_stmt
)
6355 /* ... unless the call returns a pointer which does
6356 not alias with anything else. In which case the
6357 information that the values are distinct are encoded
6359 && !(gimple_call_return_flags (call_stmt
) & ERF_NOALIAS
)
6360 /* Only perform the following when being called from PRE
6361 which embeds tail merging. */
6362 && default_vn_walk_kind
== VN_WALK
))
6363 /* Do not process .DEFERRED_INIT since that confuses uninit
6365 && !gimple_call_internal_p (call_stmt
, IFN_DEFERRED_INIT
))
6366 changed
= visit_reference_op_call (lhs
, call_stmt
);
6368 changed
= defs_to_varying (call_stmt
);
6371 changed
= defs_to_varying (stmt
);
6377 /* Allocate a value number table. */
6380 allocate_vn_table (vn_tables_t table
, unsigned size
)
6382 table
->phis
= new vn_phi_table_type (size
);
6383 table
->nary
= new vn_nary_op_table_type (size
);
6384 table
->references
= new vn_reference_table_type (size
);
6387 /* Free a value number table. */
6390 free_vn_table (vn_tables_t table
)
6392 /* Walk over elements and release vectors. */
6393 vn_reference_iterator_type hir
;
6395 FOR_EACH_HASH_TABLE_ELEMENT (*table
->references
, vr
, vn_reference_t
, hir
)
6396 vr
->operands
.release ();
6401 delete table
->references
;
6402 table
->references
= NULL
;
6405 /* Set *ID according to RESULT. */
6408 set_value_id_for_result (tree result
, unsigned int *id
)
6410 if (result
&& TREE_CODE (result
) == SSA_NAME
)
6411 *id
= VN_INFO (result
)->value_id
;
6412 else if (result
&& is_gimple_min_invariant (result
))
6413 *id
= get_or_alloc_constant_value_id (result
);
6415 *id
= get_next_value_id ();
6418 /* Set the value ids in the valid hash tables. */
6421 set_hashtable_value_ids (void)
6423 vn_nary_op_iterator_type hin
;
6424 vn_phi_iterator_type hip
;
6425 vn_reference_iterator_type hir
;
6430 /* Now set the value ids of the things we had put in the hash
6433 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->nary
, vno
, vn_nary_op_t
, hin
)
6434 if (! vno
->predicated_values
)
6435 set_value_id_for_result (vno
->u
.result
, &vno
->value_id
);
6437 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->phis
, vp
, vn_phi_t
, hip
)
6438 set_value_id_for_result (vp
->result
, &vp
->value_id
);
6440 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info
->references
, vr
, vn_reference_t
,
6442 set_value_id_for_result (vr
->result
, &vr
->value_id
);
6445 /* Return the maximum value id we have ever seen. */
6448 get_max_value_id (void)
6450 return next_value_id
;
6453 /* Return the maximum constant value id we have ever seen. */
6456 get_max_constant_value_id (void)
6458 return -next_constant_value_id
;
6461 /* Return the next unique value id. */
6464 get_next_value_id (void)
6466 gcc_checking_assert ((int)next_value_id
> 0);
6467 return next_value_id
++;
6470 /* Return the next unique value id for constants. */
6473 get_next_constant_value_id (void)
6475 gcc_checking_assert (next_constant_value_id
< 0);
6476 return next_constant_value_id
--;
6480 /* Compare two expressions E1 and E2 and return true if they are equal.
6481 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
6482 otherwise VN_TOP only matches VN_TOP. */
6485 expressions_equal_p (tree e1
, tree e2
, bool match_vn_top_optimistically
)
6487 /* The obvious case. */
6491 /* If either one is VN_TOP consider them equal. */
6492 if (match_vn_top_optimistically
6493 && (e1
== VN_TOP
|| e2
== VN_TOP
))
6496 /* If only one of them is null, they cannot be equal. While in general
6497 this should not happen for operations like TARGET_MEM_REF some
6498 operands are optional and an identity value we could substitute
6499 has differing semantics. */
6503 /* SSA_NAME compare pointer equal. */
6504 if (TREE_CODE (e1
) == SSA_NAME
|| TREE_CODE (e2
) == SSA_NAME
)
6507 /* Now perform the actual comparison. */
6508 if (TREE_CODE (e1
) == TREE_CODE (e2
)
6509 && operand_equal_p (e1
, e2
, OEP_PURE_SAME
))
6516 /* Return true if the nary operation NARY may trap. This is a copy
6517 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
6520 vn_nary_may_trap (vn_nary_op_t nary
)
6523 tree rhs2
= NULL_TREE
;
6524 bool honor_nans
= false;
6525 bool honor_snans
= false;
6526 bool fp_operation
= false;
6527 bool honor_trapv
= false;
6531 if (TREE_CODE_CLASS (nary
->opcode
) == tcc_comparison
6532 || TREE_CODE_CLASS (nary
->opcode
) == tcc_unary
6533 || TREE_CODE_CLASS (nary
->opcode
) == tcc_binary
)
6536 fp_operation
= FLOAT_TYPE_P (type
);
6539 honor_nans
= flag_trapping_math
&& !flag_finite_math_only
;
6540 honor_snans
= flag_signaling_nans
!= 0;
6542 else if (INTEGRAL_TYPE_P (type
) && TYPE_OVERFLOW_TRAPS (type
))
6545 if (nary
->length
>= 2)
6547 ret
= operation_could_trap_helper_p (nary
->opcode
, fp_operation
,
6548 honor_trapv
, honor_nans
, honor_snans
,
6553 for (i
= 0; i
< nary
->length
; ++i
)
6554 if (tree_could_trap_p (nary
->op
[i
]))
6560 /* Return true if the reference operation REF may trap. */
6563 vn_reference_may_trap (vn_reference_t ref
)
6565 switch (ref
->operands
[0].opcode
)
6569 /* We do not handle calls. */
6572 /* And toplevel address computations never trap. */
6577 vn_reference_op_t op
;
6579 FOR_EACH_VEC_ELT (ref
->operands
, i
, op
)
6583 case WITH_SIZE_EXPR
:
6584 case TARGET_MEM_REF
:
6585 /* Always variable. */
6588 if (op
->op1
&& TREE_CODE (op
->op1
) == SSA_NAME
)
6591 case ARRAY_RANGE_REF
:
6592 if (TREE_CODE (op
->op0
) == SSA_NAME
)
6597 if (TREE_CODE (op
->op0
) != INTEGER_CST
)
6600 /* !in_array_bounds */
6601 tree domain_type
= TYPE_DOMAIN (ref
->operands
[i
+1].type
);
6606 tree max
= TYPE_MAX_VALUE (domain_type
);
6609 || TREE_CODE (min
) != INTEGER_CST
6610 || TREE_CODE (max
) != INTEGER_CST
)
6613 if (tree_int_cst_lt (op
->op0
, min
)
6614 || tree_int_cst_lt (max
, op
->op0
))
6620 /* Nothing interesting in itself, the base is separate. */
6622 /* The following are the address bases. */
6627 return tree_could_trap_p (TREE_OPERAND (op
->op0
, 0));
6635 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction
,
6636 bitmap inserted_exprs_
)
6637 : dom_walker (direction
), do_pre (inserted_exprs_
!= NULL
),
6638 el_todo (0), eliminations (0), insertions (0),
6639 inserted_exprs (inserted_exprs_
)
6641 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
6642 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
6645 eliminate_dom_walker::~eliminate_dom_walker ()
6647 BITMAP_FREE (need_eh_cleanup
);
6648 BITMAP_FREE (need_ab_cleanup
);
6651 /* Return a leader for OP that is available at the current point of the
6652 eliminate domwalk. */
6655 eliminate_dom_walker::eliminate_avail (basic_block
, tree op
)
6657 tree valnum
= VN_INFO (op
)->valnum
;
6658 if (TREE_CODE (valnum
) == SSA_NAME
)
6660 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
6662 if (avail
.length () > SSA_NAME_VERSION (valnum
))
6664 tree av
= avail
[SSA_NAME_VERSION (valnum
)];
6665 /* When PRE discovers a new redundancy there's no way to unite
6666 the value classes so it instead inserts a copy old-val = new-val.
6667 Look through such copies here, providing one more level of
6668 simplification at elimination time. */
6670 if (av
&& (ass
= dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (av
))))
6671 if (gimple_assign_rhs_class (ass
) == GIMPLE_SINGLE_RHS
)
6673 tree rhs1
= gimple_assign_rhs1 (ass
);
6674 if (CONSTANT_CLASS_P (rhs1
)
6675 || (TREE_CODE (rhs1
) == SSA_NAME
6676 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1
)))
6682 else if (is_gimple_min_invariant (valnum
))
6687 /* At the current point of the eliminate domwalk make OP available. */
6690 eliminate_dom_walker::eliminate_push_avail (basic_block
, tree op
)
6692 tree valnum
= VN_INFO (op
)->valnum
;
6693 if (TREE_CODE (valnum
) == SSA_NAME
)
6695 if (avail
.length () <= SSA_NAME_VERSION (valnum
))
6696 avail
.safe_grow_cleared (SSA_NAME_VERSION (valnum
) + 1, true);
6698 if (avail
[SSA_NAME_VERSION (valnum
)])
6699 pushop
= avail
[SSA_NAME_VERSION (valnum
)];
6700 avail_stack
.safe_push (pushop
);
6701 avail
[SSA_NAME_VERSION (valnum
)] = op
;
6705 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6706 the leader for the expression if insertion was successful. */
6709 eliminate_dom_walker::eliminate_insert (basic_block bb
,
6710 gimple_stmt_iterator
*gsi
, tree val
)
6712 /* We can insert a sequence with a single assignment only. */
6713 gimple_seq stmts
= VN_INFO (val
)->expr
;
6714 if (!gimple_seq_singleton_p (stmts
))
6716 gassign
*stmt
= dyn_cast
<gassign
*> (gimple_seq_first_stmt (stmts
));
6718 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6719 && gimple_assign_rhs_code (stmt
) != VIEW_CONVERT_EXPR
6720 && gimple_assign_rhs_code (stmt
) != NEGATE_EXPR
6721 && gimple_assign_rhs_code (stmt
) != BIT_FIELD_REF
6722 && (gimple_assign_rhs_code (stmt
) != BIT_AND_EXPR
6723 || TREE_CODE (gimple_assign_rhs2 (stmt
)) != INTEGER_CST
)))
6726 tree op
= gimple_assign_rhs1 (stmt
);
6727 if (gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
6728 || gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6729 op
= TREE_OPERAND (op
, 0);
6730 tree leader
= TREE_CODE (op
) == SSA_NAME
? eliminate_avail (bb
, op
) : op
;
6736 if (gimple_assign_rhs_code (stmt
) == BIT_FIELD_REF
)
6737 res
= gimple_build (&stmts
, BIT_FIELD_REF
,
6738 TREE_TYPE (val
), leader
,
6739 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 1),
6740 TREE_OPERAND (gimple_assign_rhs1 (stmt
), 2));
6741 else if (gimple_assign_rhs_code (stmt
) == BIT_AND_EXPR
)
6742 res
= gimple_build (&stmts
, BIT_AND_EXPR
,
6743 TREE_TYPE (val
), leader
, gimple_assign_rhs2 (stmt
));
6745 res
= gimple_build (&stmts
, gimple_assign_rhs_code (stmt
),
6746 TREE_TYPE (val
), leader
);
6747 if (TREE_CODE (res
) != SSA_NAME
6748 || SSA_NAME_IS_DEFAULT_DEF (res
)
6749 || gimple_bb (SSA_NAME_DEF_STMT (res
)))
6751 gimple_seq_discard (stmts
);
6753 /* During propagation we have to treat SSA info conservatively
6754 and thus we can end up simplifying the inserted expression
6755 at elimination time to sth not defined in stmts. */
6756 /* But then this is a redundancy we failed to detect. Which means
6757 res now has two values. That doesn't play well with how
6758 we track availability here, so give up. */
6759 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6761 if (TREE_CODE (res
) == SSA_NAME
)
6762 res
= eliminate_avail (bb
, res
);
6765 fprintf (dump_file
, "Failed to insert expression for value ");
6766 print_generic_expr (dump_file
, val
);
6767 fprintf (dump_file
, " which is really fully redundant to ");
6768 print_generic_expr (dump_file
, res
);
6769 fprintf (dump_file
, "\n");
6777 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
6778 vn_ssa_aux_t vn_info
= VN_INFO (res
);
6779 vn_info
->valnum
= val
;
6780 vn_info
->visited
= true;
6784 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6786 fprintf (dump_file
, "Inserted ");
6787 print_gimple_stmt (dump_file
, SSA_NAME_DEF_STMT (res
), 0);
6794 eliminate_dom_walker::eliminate_stmt (basic_block b
, gimple_stmt_iterator
*gsi
)
6796 tree sprime
= NULL_TREE
;
6797 gimple
*stmt
= gsi_stmt (*gsi
);
6798 tree lhs
= gimple_get_lhs (stmt
);
6799 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
6800 && !gimple_has_volatile_ops (stmt
)
6801 /* See PR43491. Do not replace a global register variable when
6802 it is a the RHS of an assignment. Do replace local register
6803 variables since gcc does not guarantee a local variable will
6804 be allocated in register.
6805 ??? The fix isn't effective here. This should instead
6806 be ensured by not value-numbering them the same but treating
6807 them like volatiles? */
6808 && !(gimple_assign_single_p (stmt
)
6809 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == VAR_DECL
6810 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt
))
6811 && is_global_var (gimple_assign_rhs1 (stmt
)))))
6813 sprime
= eliminate_avail (b
, lhs
);
6816 /* If there is no existing usable leader but SCCVN thinks
6817 it has an expression it wants to use as replacement,
6819 tree val
= VN_INFO (lhs
)->valnum
;
6820 vn_ssa_aux_t vn_info
;
6822 && TREE_CODE (val
) == SSA_NAME
6823 && (vn_info
= VN_INFO (val
), true)
6824 && vn_info
->needs_insertion
6825 && vn_info
->expr
!= NULL
6826 && (sprime
= eliminate_insert (b
, gsi
, val
)) != NULL_TREE
)
6827 eliminate_push_avail (b
, sprime
);
6830 /* If this now constitutes a copy duplicate points-to
6831 and range info appropriately. This is especially
6832 important for inserted code. See tree-ssa-copy.cc
6833 for similar code. */
6835 && TREE_CODE (sprime
) == SSA_NAME
)
6837 basic_block sprime_b
= gimple_bb (SSA_NAME_DEF_STMT (sprime
));
6838 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6839 && SSA_NAME_PTR_INFO (lhs
)
6840 && ! SSA_NAME_PTR_INFO (sprime
))
6842 duplicate_ssa_name_ptr_info (sprime
,
6843 SSA_NAME_PTR_INFO (lhs
));
6845 reset_flow_sensitive_info (sprime
);
6847 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
6848 && SSA_NAME_RANGE_INFO (lhs
)
6849 && ! SSA_NAME_RANGE_INFO (sprime
)
6851 duplicate_ssa_name_range_info (sprime
, lhs
);
6854 /* Inhibit the use of an inserted PHI on a loop header when
6855 the address of the memory reference is a simple induction
6856 variable. In other cases the vectorizer won't do anything
6857 anyway (either it's loop invariant or a complicated
6860 && TREE_CODE (sprime
) == SSA_NAME
6862 && (flag_tree_loop_vectorize
|| flag_tree_parallelize_loops
> 1)
6863 && loop_outer (b
->loop_father
)
6864 && has_zero_uses (sprime
)
6865 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))
6866 && gimple_assign_load_p (stmt
))
6868 gimple
*def_stmt
= SSA_NAME_DEF_STMT (sprime
);
6869 basic_block def_bb
= gimple_bb (def_stmt
);
6870 if (gimple_code (def_stmt
) == GIMPLE_PHI
6871 && def_bb
->loop_father
->header
== def_bb
)
6873 loop_p loop
= def_bb
->loop_father
;
6877 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
6880 def_bb
= gimple_bb (SSA_NAME_DEF_STMT (op
));
6882 && flow_bb_inside_loop_p (loop
, def_bb
)
6883 && simple_iv (loop
, loop
, op
, &iv
, true))
6891 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6893 fprintf (dump_file
, "Not replacing ");
6894 print_gimple_expr (dump_file
, stmt
, 0);
6895 fprintf (dump_file
, " with ");
6896 print_generic_expr (dump_file
, sprime
);
6897 fprintf (dump_file
, " which would add a loop"
6898 " carried dependence to loop %d\n",
6901 /* Don't keep sprime available. */
6909 /* If we can propagate the value computed for LHS into
6910 all uses don't bother doing anything with this stmt. */
6911 if (may_propagate_copy (lhs
, sprime
))
6913 /* Mark it for removal. */
6914 to_remove
.safe_push (stmt
);
6916 /* ??? Don't count copy/constant propagations. */
6917 if (gimple_assign_single_p (stmt
)
6918 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
6919 || gimple_assign_rhs1 (stmt
) == sprime
))
6922 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6924 fprintf (dump_file
, "Replaced ");
6925 print_gimple_expr (dump_file
, stmt
, 0);
6926 fprintf (dump_file
, " with ");
6927 print_generic_expr (dump_file
, sprime
);
6928 fprintf (dump_file
, " in all uses of ");
6929 print_gimple_stmt (dump_file
, stmt
, 0);
6936 /* If this is an assignment from our leader (which
6937 happens in the case the value-number is a constant)
6938 then there is nothing to do. Likewise if we run into
6939 inserted code that needed a conversion because of
6940 our type-agnostic value-numbering of loads. */
6941 if ((gimple_assign_single_p (stmt
)
6942 || (is_gimple_assign (stmt
)
6943 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
6944 || gimple_assign_rhs_code (stmt
) == VIEW_CONVERT_EXPR
)))
6945 && sprime
== gimple_assign_rhs1 (stmt
))
6948 /* Else replace its RHS. */
6949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6951 fprintf (dump_file
, "Replaced ");
6952 print_gimple_expr (dump_file
, stmt
, 0);
6953 fprintf (dump_file
, " with ");
6954 print_generic_expr (dump_file
, sprime
);
6955 fprintf (dump_file
, " in ");
6956 print_gimple_stmt (dump_file
, stmt
, 0);
6960 bool can_make_abnormal_goto
= (is_gimple_call (stmt
)
6961 && stmt_can_make_abnormal_goto (stmt
));
6962 gimple
*orig_stmt
= stmt
;
6963 if (!useless_type_conversion_p (TREE_TYPE (lhs
),
6964 TREE_TYPE (sprime
)))
6966 /* We preserve conversions to but not from function or method
6967 types. This asymmetry makes it necessary to re-instantiate
6968 conversions here. */
6969 if (POINTER_TYPE_P (TREE_TYPE (lhs
))
6970 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs
))))
6971 sprime
= fold_convert (TREE_TYPE (lhs
), sprime
);
6975 tree vdef
= gimple_vdef (stmt
);
6976 tree vuse
= gimple_vuse (stmt
);
6977 propagate_tree_value_into_stmt (gsi
, sprime
);
6978 stmt
= gsi_stmt (*gsi
);
6980 /* In case the VDEF on the original stmt was released, value-number
6981 it to the VUSE. This is to make vuse_ssa_val able to skip
6982 released virtual operands. */
6983 if (vdef
!= gimple_vdef (stmt
))
6985 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef
));
6986 VN_INFO (vdef
)->valnum
= vuse
;
6989 /* If we removed EH side-effects from the statement, clean
6990 its EH information. */
6991 if (maybe_clean_or_replace_eh_stmt (orig_stmt
, stmt
))
6993 bitmap_set_bit (need_eh_cleanup
,
6994 gimple_bb (stmt
)->index
);
6995 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6996 fprintf (dump_file
, " Removed EH side-effects.\n");
6999 /* Likewise for AB side-effects. */
7000 if (can_make_abnormal_goto
7001 && !stmt_can_make_abnormal_goto (stmt
))
7003 bitmap_set_bit (need_ab_cleanup
,
7004 gimple_bb (stmt
)->index
);
7005 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7006 fprintf (dump_file
, " Removed AB side-effects.\n");
7013 /* If the statement is a scalar store, see if the expression
7014 has the same value number as its rhs. If so, the store is
7016 if (gimple_assign_single_p (stmt
)
7017 && !gimple_has_volatile_ops (stmt
)
7018 && !is_gimple_reg (gimple_assign_lhs (stmt
))
7019 && (TREE_CODE (gimple_assign_rhs1 (stmt
)) == SSA_NAME
7020 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt
))))
7022 tree rhs
= gimple_assign_rhs1 (stmt
);
7023 vn_reference_t vnresult
;
7024 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
7025 typed load of a byte known to be 0x11 as 1 so a store of
7026 a boolean 1 is detected as redundant. Because of this we
7027 have to make sure to lookup with a ref where its size
7028 matches the precision. */
7029 tree lookup_lhs
= lhs
;
7030 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
7031 && (TREE_CODE (lhs
) != COMPONENT_REF
7032 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs
, 1)))
7033 && !type_has_mode_precision_p (TREE_TYPE (lhs
)))
7035 if (TREE_CODE (TREE_TYPE (lhs
)) == BITINT_TYPE
7036 && TYPE_PRECISION (TREE_TYPE (lhs
)) > MAX_FIXED_MODE_SIZE
)
7037 lookup_lhs
= NULL_TREE
;
7038 else if (TREE_CODE (lhs
) == COMPONENT_REF
7039 || TREE_CODE (lhs
) == MEM_REF
)
7041 tree ltype
= build_nonstandard_integer_type
7042 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs
))),
7043 TYPE_UNSIGNED (TREE_TYPE (lhs
)));
7044 if (TREE_CODE (lhs
) == COMPONENT_REF
)
7046 tree foff
= component_ref_field_offset (lhs
);
7047 tree f
= TREE_OPERAND (lhs
, 1);
7048 if (!poly_int_tree_p (foff
))
7049 lookup_lhs
= NULL_TREE
;
7051 lookup_lhs
= build3 (BIT_FIELD_REF
, ltype
,
7052 TREE_OPERAND (lhs
, 0),
7053 TYPE_SIZE (TREE_TYPE (lhs
)),
7055 (foff
, DECL_FIELD_BIT_OFFSET (f
)));
7058 lookup_lhs
= build2 (MEM_REF
, ltype
,
7059 TREE_OPERAND (lhs
, 0),
7060 TREE_OPERAND (lhs
, 1));
7063 lookup_lhs
= NULL_TREE
;
7065 tree val
= NULL_TREE
;
7067 val
= vn_reference_lookup (lookup_lhs
, gimple_vuse (stmt
),
7068 VN_WALKREWRITE
, &vnresult
, false,
7069 NULL
, NULL_TREE
, true);
7070 if (TREE_CODE (rhs
) == SSA_NAME
)
7071 rhs
= VN_INFO (rhs
)->valnum
;
7073 && (operand_equal_p (val
, rhs
, 0)
7074 /* Due to the bitfield lookups above we can get bit
7075 interpretations of the same RHS as values here. Those
7076 are redundant as well. */
7077 || (TREE_CODE (val
) == SSA_NAME
7078 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val
))
7079 && (val
= gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val
)))
7080 && TREE_CODE (val
) == VIEW_CONVERT_EXPR
7081 && TREE_OPERAND (val
, 0) == rhs
)))
7083 /* We can only remove the later store if the former aliases
7084 at least all accesses the later one does or if the store
7085 was to readonly memory storing the same value. */
7087 ao_ref_init (&lhs_ref
, lhs
);
7088 alias_set_type set
= ao_ref_alias_set (&lhs_ref
);
7089 alias_set_type base_set
= ao_ref_base_alias_set (&lhs_ref
);
7091 || ((vnresult
->set
== set
7092 || alias_set_subset_of (set
, vnresult
->set
))
7093 && (vnresult
->base_set
== base_set
7094 || alias_set_subset_of (base_set
, vnresult
->base_set
))))
7096 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7098 fprintf (dump_file
, "Deleted redundant store ");
7099 print_gimple_stmt (dump_file
, stmt
, 0);
7102 /* Queue stmt for removal. */
7103 to_remove
.safe_push (stmt
);
7109 /* If this is a control statement value numbering left edges
7110 unexecuted on force the condition in a way consistent with
7112 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
7114 if ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
)
7115 ^ (EDGE_SUCC (b
, 1)->flags
& EDGE_EXECUTABLE
))
7117 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7119 fprintf (dump_file
, "Removing unexecutable edge from ");
7120 print_gimple_stmt (dump_file
, stmt
, 0);
7122 if (((EDGE_SUCC (b
, 0)->flags
& EDGE_TRUE_VALUE
) != 0)
7123 == ((EDGE_SUCC (b
, 0)->flags
& EDGE_EXECUTABLE
) != 0))
7124 gimple_cond_make_true (cond
);
7126 gimple_cond_make_false (cond
);
7128 el_todo
|= TODO_cleanup_cfg
;
7133 bool can_make_abnormal_goto
= stmt_can_make_abnormal_goto (stmt
);
7134 bool was_noreturn
= (is_gimple_call (stmt
)
7135 && gimple_call_noreturn_p (stmt
));
7136 tree vdef
= gimple_vdef (stmt
);
7137 tree vuse
= gimple_vuse (stmt
);
7139 /* If we didn't replace the whole stmt (or propagate the result
7140 into all uses), replace all uses on this stmt with their
7142 bool modified
= false;
7143 use_operand_p use_p
;
7145 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
7147 tree use
= USE_FROM_PTR (use_p
);
7148 /* ??? The call code above leaves stmt operands un-updated. */
7149 if (TREE_CODE (use
) != SSA_NAME
)
7152 if (SSA_NAME_IS_DEFAULT_DEF (use
))
7153 /* ??? For default defs BB shouldn't matter, but we have to
7154 solve the inconsistency between rpo eliminate and
7155 dom eliminate avail valueization first. */
7156 sprime
= eliminate_avail (b
, use
);
7158 /* Look for sth available at the definition block of the argument.
7159 This avoids inconsistencies between availability there which
7160 decides if the stmt can be removed and availability at the
7161 use site. The SSA property ensures that things available
7162 at the definition are also available at uses. */
7163 sprime
= eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use
)), use
);
7164 if (sprime
&& sprime
!= use
7165 && may_propagate_copy (use
, sprime
, true)
7166 /* We substitute into debug stmts to avoid excessive
7167 debug temporaries created by removed stmts, but we need
7168 to avoid doing so for inserted sprimes as we never want
7169 to create debug temporaries for them. */
7171 || TREE_CODE (sprime
) != SSA_NAME
7172 || !is_gimple_debug (stmt
)
7173 || !bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (sprime
))))
7175 propagate_value (use_p
, sprime
);
7180 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
7181 into which is a requirement for the IPA devirt machinery. */
7182 gimple
*old_stmt
= stmt
;
7185 /* If a formerly non-invariant ADDR_EXPR is turned into an
7186 invariant one it was on a separate stmt. */
7187 if (gimple_assign_single_p (stmt
)
7188 && TREE_CODE (gimple_assign_rhs1 (stmt
)) == ADDR_EXPR
)
7189 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt
));
7190 gimple_stmt_iterator prev
= *gsi
;
7192 if (fold_stmt (gsi
, follow_all_ssa_edges
))
7194 /* fold_stmt may have created new stmts inbetween
7195 the previous stmt and the folded stmt. Mark
7196 all defs created there as varying to not confuse
7197 the SCCVN machinery as we're using that even during
7199 if (gsi_end_p (prev
))
7200 prev
= gsi_start_bb (b
);
7203 if (gsi_stmt (prev
) != gsi_stmt (*gsi
))
7208 FOR_EACH_SSA_TREE_OPERAND (def
, gsi_stmt (prev
),
7209 dit
, SSA_OP_ALL_DEFS
)
7210 /* As existing DEFs may move between stmts
7211 only process new ones. */
7212 if (! has_VN_INFO (def
))
7214 vn_ssa_aux_t vn_info
= VN_INFO (def
);
7215 vn_info
->valnum
= def
;
7216 vn_info
->visited
= true;
7218 if (gsi_stmt (prev
) == gsi_stmt (*gsi
))
7224 stmt
= gsi_stmt (*gsi
);
7225 /* In case we folded the stmt away schedule the NOP for removal. */
7226 if (gimple_nop_p (stmt
))
7227 to_remove
.safe_push (stmt
);
7230 /* Visit indirect calls and turn them into direct calls if
7231 possible using the devirtualization machinery. Do this before
7232 checking for required EH/abnormal/noreturn cleanup as devird
7233 may expose more of those. */
7234 if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
7236 tree fn
= gimple_call_fn (call_stmt
);
7238 && flag_devirtualize
7239 && virtual_method_call_p (fn
))
7241 tree otr_type
= obj_type_ref_class (fn
);
7242 unsigned HOST_WIDE_INT otr_tok
7243 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn
));
7245 ipa_polymorphic_call_context
context (current_function_decl
,
7246 fn
, stmt
, &instance
);
7247 context
.get_dynamic_type (instance
, OBJ_TYPE_REF_OBJECT (fn
),
7248 otr_type
, stmt
, NULL
);
7250 vec
<cgraph_node
*> targets
7251 = possible_polymorphic_call_targets (obj_type_ref_class (fn
),
7252 otr_tok
, context
, &final
);
7254 dump_possible_polymorphic_call_targets (dump_file
,
7255 obj_type_ref_class (fn
),
7257 if (final
&& targets
.length () <= 1 && dbg_cnt (devirt
))
7260 if (targets
.length () == 1)
7261 fn
= targets
[0]->decl
;
7263 fn
= builtin_decl_unreachable ();
7264 if (dump_enabled_p ())
7266 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS
, stmt
,
7267 "converting indirect call to "
7269 lang_hooks
.decl_printable_name (fn
, 2));
7271 gimple_call_set_fndecl (call_stmt
, fn
);
7272 /* If changing the call to __builtin_unreachable
7273 or similar noreturn function, adjust gimple_call_fntype
7275 if (gimple_call_noreturn_p (call_stmt
)
7276 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn
)))
7277 && TYPE_ARG_TYPES (TREE_TYPE (fn
))
7278 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn
)))
7280 gimple_call_set_fntype (call_stmt
, TREE_TYPE (fn
));
7281 maybe_remove_unused_call_args (cfun
, call_stmt
);
7289 /* When changing a call into a noreturn call, cfg cleanup
7290 is needed to fix up the noreturn call. */
7292 && is_gimple_call (stmt
) && gimple_call_noreturn_p (stmt
))
7293 to_fixup
.safe_push (stmt
);
7294 /* When changing a condition or switch into one we know what
7295 edge will be executed, schedule a cfg cleanup. */
7296 if ((gimple_code (stmt
) == GIMPLE_COND
7297 && (gimple_cond_true_p (as_a
<gcond
*> (stmt
))
7298 || gimple_cond_false_p (as_a
<gcond
*> (stmt
))))
7299 || (gimple_code (stmt
) == GIMPLE_SWITCH
7300 && TREE_CODE (gimple_switch_index
7301 (as_a
<gswitch
*> (stmt
))) == INTEGER_CST
))
7302 el_todo
|= TODO_cleanup_cfg
;
7303 /* If we removed EH side-effects from the statement, clean
7304 its EH information. */
7305 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
7307 bitmap_set_bit (need_eh_cleanup
,
7308 gimple_bb (stmt
)->index
);
7309 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7310 fprintf (dump_file
, " Removed EH side-effects.\n");
7312 /* Likewise for AB side-effects. */
7313 if (can_make_abnormal_goto
7314 && !stmt_can_make_abnormal_goto (stmt
))
7316 bitmap_set_bit (need_ab_cleanup
,
7317 gimple_bb (stmt
)->index
);
7318 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7319 fprintf (dump_file
, " Removed AB side-effects.\n");
7322 /* In case the VDEF on the original stmt was released, value-number
7323 it to the VUSE. This is to make vuse_ssa_val able to skip
7324 released virtual operands. */
7325 if (vdef
&& SSA_NAME_IN_FREE_LIST (vdef
))
7326 VN_INFO (vdef
)->valnum
= vuse
;
7329 /* Make new values available - for fully redundant LHS we
7330 continue with the next stmt above and skip this.
7331 But avoid picking up dead defs. */
7333 FOR_EACH_SSA_TREE_OPERAND (def
, stmt
, iter
, SSA_OP_DEF
)
7334 if (! has_zero_uses (def
)
7336 && bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (def
))))
7337 eliminate_push_avail (b
, def
);
7340 /* Perform elimination for the basic-block B during the domwalk. */
7343 eliminate_dom_walker::before_dom_children (basic_block b
)
7346 avail_stack
.safe_push (NULL_TREE
);
7348 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
7349 if (!(b
->flags
& BB_EXECUTABLE
))
7354 for (gphi_iterator gsi
= gsi_start_phis (b
); !gsi_end_p (gsi
);)
7356 gphi
*phi
= gsi
.phi ();
7357 tree res
= PHI_RESULT (phi
);
7359 if (virtual_operand_p (res
))
7365 tree sprime
= eliminate_avail (b
, res
);
7369 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7371 fprintf (dump_file
, "Replaced redundant PHI node defining ");
7372 print_generic_expr (dump_file
, res
);
7373 fprintf (dump_file
, " with ");
7374 print_generic_expr (dump_file
, sprime
);
7375 fprintf (dump_file
, "\n");
7378 /* If we inserted this PHI node ourself, it's not an elimination. */
7379 if (! inserted_exprs
7380 || ! bitmap_bit_p (inserted_exprs
, SSA_NAME_VERSION (res
)))
7383 /* If we will propagate into all uses don't bother to do
7385 if (may_propagate_copy (res
, sprime
))
7387 /* Mark the PHI for removal. */
7388 to_remove
.safe_push (phi
);
7393 remove_phi_node (&gsi
, false);
7395 if (!useless_type_conversion_p (TREE_TYPE (res
), TREE_TYPE (sprime
)))
7396 sprime
= fold_convert (TREE_TYPE (res
), sprime
);
7397 gimple
*stmt
= gimple_build_assign (res
, sprime
);
7398 gimple_stmt_iterator gsi2
= gsi_after_labels (b
);
7399 gsi_insert_before (&gsi2
, stmt
, GSI_NEW_STMT
);
7403 eliminate_push_avail (b
, res
);
7407 for (gimple_stmt_iterator gsi
= gsi_start_bb (b
);
7410 eliminate_stmt (b
, &gsi
);
7412 /* Replace destination PHI arguments. */
7415 FOR_EACH_EDGE (e
, ei
, b
->succs
)
7416 if (e
->flags
& EDGE_EXECUTABLE
)
7417 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
7421 gphi
*phi
= gsi
.phi ();
7422 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
7423 tree arg
= USE_FROM_PTR (use_p
);
7424 if (TREE_CODE (arg
) != SSA_NAME
7425 || virtual_operand_p (arg
))
7427 tree sprime
= eliminate_avail (b
, arg
);
7428 if (sprime
&& may_propagate_copy (arg
, sprime
,
7429 !(e
->flags
& EDGE_ABNORMAL
)))
7430 propagate_value (use_p
, sprime
);
7433 vn_context_bb
= NULL
;
7438 /* Make no longer available leaders no longer available. */
7441 eliminate_dom_walker::after_dom_children (basic_block
)
7444 while ((entry
= avail_stack
.pop ()) != NULL_TREE
)
7446 tree valnum
= VN_INFO (entry
)->valnum
;
7447 tree old
= avail
[SSA_NAME_VERSION (valnum
)];
7449 avail
[SSA_NAME_VERSION (valnum
)] = NULL_TREE
;
7451 avail
[SSA_NAME_VERSION (valnum
)] = entry
;
7455 /* Remove queued stmts and perform delayed cleanups. */
7458 eliminate_dom_walker::eliminate_cleanup (bool region_p
)
7460 statistics_counter_event (cfun
, "Eliminated", eliminations
);
7461 statistics_counter_event (cfun
, "Insertions", insertions
);
7463 /* We cannot remove stmts during BB walk, especially not release SSA
7464 names there as this confuses the VN machinery. The stmts ending
7465 up in to_remove are either stores or simple copies.
7466 Remove stmts in reverse order to make debug stmt creation possible. */
7467 while (!to_remove
.is_empty ())
7469 bool do_release_defs
= true;
7470 gimple
*stmt
= to_remove
.pop ();
7472 /* When we are value-numbering a region we do not require exit PHIs to
7473 be present so we have to make sure to deal with uses outside of the
7474 region of stmts that we thought are eliminated.
7475 ??? Note we may be confused by uses in dead regions we didn't run
7476 elimination on. Rather than checking individual uses we accept
7477 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
7478 contains such example). */
7481 if (gphi
*phi
= dyn_cast
<gphi
*> (stmt
))
7483 tree lhs
= gimple_phi_result (phi
);
7484 if (!has_zero_uses (lhs
))
7486 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7487 fprintf (dump_file
, "Keeping eliminated stmt live "
7488 "as copy because of out-of-region uses\n");
7489 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
7490 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
7491 gimple_stmt_iterator gsi
7492 = gsi_after_labels (gimple_bb (stmt
));
7493 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
7494 do_release_defs
= false;
7497 else if (tree lhs
= gimple_get_lhs (stmt
))
7498 if (TREE_CODE (lhs
) == SSA_NAME
7499 && !has_zero_uses (lhs
))
7501 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7502 fprintf (dump_file
, "Keeping eliminated stmt live "
7503 "as copy because of out-of-region uses\n");
7504 tree sprime
= eliminate_avail (gimple_bb (stmt
), lhs
);
7505 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
7506 if (is_gimple_assign (stmt
))
7508 gimple_assign_set_rhs_from_tree (&gsi
, sprime
);
7509 stmt
= gsi_stmt (gsi
);
7511 if (maybe_clean_or_replace_eh_stmt (stmt
, stmt
))
7512 bitmap_set_bit (need_eh_cleanup
, gimple_bb (stmt
)->index
);
7517 gimple
*copy
= gimple_build_assign (lhs
, sprime
);
7518 gsi_insert_before (&gsi
, copy
, GSI_SAME_STMT
);
7519 do_release_defs
= false;
7524 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7526 fprintf (dump_file
, "Removing dead stmt ");
7527 print_gimple_stmt (dump_file
, stmt
, 0, TDF_NONE
);
7530 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
7531 if (gimple_code (stmt
) == GIMPLE_PHI
)
7532 remove_phi_node (&gsi
, do_release_defs
);
7535 basic_block bb
= gimple_bb (stmt
);
7536 unlink_stmt_vdef (stmt
);
7537 if (gsi_remove (&gsi
, true))
7538 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
7539 if (is_gimple_call (stmt
) && stmt_can_make_abnormal_goto (stmt
))
7540 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
7541 if (do_release_defs
)
7542 release_defs (stmt
);
7545 /* Removing a stmt may expose a forwarder block. */
7546 el_todo
|= TODO_cleanup_cfg
;
7549 /* Fixup stmts that became noreturn calls. This may require splitting
7550 blocks and thus isn't possible during the dominator walk. Do this
7551 in reverse order so we don't inadvertedly remove a stmt we want to
7552 fixup by visiting a dominating now noreturn call first. */
7553 while (!to_fixup
.is_empty ())
7555 gimple
*stmt
= to_fixup
.pop ();
7557 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7559 fprintf (dump_file
, "Fixing up noreturn call ");
7560 print_gimple_stmt (dump_file
, stmt
, 0);
7563 if (fixup_noreturn_call (stmt
))
7564 el_todo
|= TODO_cleanup_cfg
;
7567 bool do_eh_cleanup
= !bitmap_empty_p (need_eh_cleanup
);
7568 bool do_ab_cleanup
= !bitmap_empty_p (need_ab_cleanup
);
7571 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
7574 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
7576 if (do_eh_cleanup
|| do_ab_cleanup
)
7577 el_todo
|= TODO_cleanup_cfg
;
7582 /* Eliminate fully redundant computations. */
7585 eliminate_with_rpo_vn (bitmap inserted_exprs
)
7587 eliminate_dom_walker
walker (CDI_DOMINATORS
, inserted_exprs
);
7589 eliminate_dom_walker
*saved_rpo_avail
= rpo_avail
;
7590 rpo_avail
= &walker
;
7591 walker
.walk (cfun
->cfg
->x_entry_block_ptr
);
7592 rpo_avail
= saved_rpo_avail
;
7594 return walker
.eliminate_cleanup ();
7598 do_rpo_vn_1 (function
*fn
, edge entry
, bitmap exit_bbs
,
7599 bool iterate
, bool eliminate
, bool skip_entry_phis
,
7600 vn_lookup_kind kind
);
7603 run_rpo_vn (vn_lookup_kind kind
)
7605 do_rpo_vn_1 (cfun
, NULL
, NULL
, true, false, false, kind
);
7607 /* ??? Prune requirement of these. */
7608 constant_to_value_id
= new hash_table
<vn_constant_hasher
> (23);
7610 /* Initialize the value ids and prune out remaining VN_TOPs
7614 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7616 vn_ssa_aux_t info
= VN_INFO (name
);
7618 || info
->valnum
== VN_TOP
)
7619 info
->valnum
= name
;
7620 if (info
->valnum
== name
)
7621 info
->value_id
= get_next_value_id ();
7622 else if (is_gimple_min_invariant (info
->valnum
))
7623 info
->value_id
= get_or_alloc_constant_value_id (info
->valnum
);
7627 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7629 vn_ssa_aux_t info
= VN_INFO (name
);
7630 if (TREE_CODE (info
->valnum
) == SSA_NAME
7631 && info
->valnum
!= name
7632 && info
->value_id
!= VN_INFO (info
->valnum
)->value_id
)
7633 info
->value_id
= VN_INFO (info
->valnum
)->value_id
;
7636 set_hashtable_value_ids ();
7638 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7640 fprintf (dump_file
, "Value numbers:\n");
7641 FOR_EACH_SSA_NAME (i
, name
, cfun
)
7643 if (VN_INFO (name
)->visited
7644 && SSA_VAL (name
) != name
)
7646 print_generic_expr (dump_file
, name
);
7647 fprintf (dump_file
, " = ");
7648 print_generic_expr (dump_file
, SSA_VAL (name
));
7649 fprintf (dump_file
, " (%04d)\n", VN_INFO (name
)->value_id
);
7655 /* Free VN associated data structures. */
7660 free_vn_table (valid_info
);
7661 XDELETE (valid_info
);
7662 obstack_free (&vn_tables_obstack
, NULL
);
7663 obstack_free (&vn_tables_insert_obstack
, NULL
);
7665 vn_ssa_aux_iterator_type it
;
7667 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash
, info
, vn_ssa_aux_t
, it
)
7668 if (info
->needs_insertion
)
7669 release_ssa_name (info
->name
);
7670 obstack_free (&vn_ssa_aux_obstack
, NULL
);
7671 delete vn_ssa_aux_hash
;
7673 delete constant_to_value_id
;
7674 constant_to_value_id
= NULL
;
7677 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
7680 vn_lookup_simplify_result (gimple_match_op
*res_op
)
7682 if (!res_op
->code
.is_tree_code ())
7684 tree
*ops
= res_op
->ops
;
7685 unsigned int length
= res_op
->num_ops
;
7686 if (res_op
->code
== CONSTRUCTOR
7687 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7688 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7689 && TREE_CODE (res_op
->ops
[0]) == CONSTRUCTOR
)
7691 length
= CONSTRUCTOR_NELTS (res_op
->ops
[0]);
7692 ops
= XALLOCAVEC (tree
, length
);
7693 for (unsigned i
= 0; i
< length
; ++i
)
7694 ops
[i
] = CONSTRUCTOR_ELT (res_op
->ops
[0], i
)->value
;
7696 vn_nary_op_t vnresult
= NULL
;
7697 tree res
= vn_nary_op_lookup_pieces (length
, (tree_code
) res_op
->code
,
7698 res_op
->type
, ops
, &vnresult
);
7699 /* If this is used from expression simplification make sure to
7700 return an available expression. */
7701 if (res
&& TREE_CODE (res
) == SSA_NAME
&& mprts_hook
&& rpo_avail
)
7702 res
= rpo_avail
->eliminate_avail (vn_context_bb
, res
);
7706 /* Return a leader for OPs value that is valid at BB. */
7709 rpo_elim::eliminate_avail (basic_block bb
, tree op
)
7712 tree valnum
= SSA_VAL (op
, &visited
);
7713 /* If we didn't visit OP then it must be defined outside of the
7714 region we process and also dominate it. So it is available. */
7717 if (TREE_CODE (valnum
) == SSA_NAME
)
7719 if (SSA_NAME_IS_DEFAULT_DEF (valnum
))
7721 vn_ssa_aux_t valnum_info
= VN_INFO (valnum
);
7723 if (!valnum_info
->visited
)
7725 vn_avail
*av
= valnum_info
->avail
;
7728 if (av
->location
== bb
->index
)
7729 /* On tramp3d 90% of the cases are here. */
7730 return ssa_name (av
->leader
);
7733 basic_block abb
= BASIC_BLOCK_FOR_FN (cfun
, av
->location
);
7734 /* ??? During elimination we have to use availability at the
7735 definition site of a use we try to replace. This
7736 is required to not run into inconsistencies because
7737 of dominated_by_p_w_unex behavior and removing a definition
7738 while not replacing all uses.
7739 ??? We could try to consistently walk dominators
7740 ignoring non-executable regions. The nearest common
7741 dominator of bb and abb is where we can stop walking. We
7742 may also be able to "pre-compute" (bits of) the next immediate
7743 (non-)dominator during the RPO walk when marking edges as
7745 if (dominated_by_p_w_unex (bb
, abb
, true))
7747 tree leader
= ssa_name (av
->leader
);
7748 /* Prevent eliminations that break loop-closed SSA. */
7749 if (loops_state_satisfies_p (LOOP_CLOSED_SSA
)
7750 && ! SSA_NAME_IS_DEFAULT_DEF (leader
)
7751 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7752 (leader
))->loop_father
,
7755 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7757 print_generic_expr (dump_file
, leader
);
7758 fprintf (dump_file
, " is available for ");
7759 print_generic_expr (dump_file
, valnum
);
7760 fprintf (dump_file
, "\n");
7762 /* On tramp3d 99% of the _remaining_ cases succeed at
7766 /* ??? Can we somehow skip to the immediate dominator
7767 RPO index (bb_to_rpo)? Again, maybe not worth, on
7768 tramp3d the worst number of elements in the vector is 9. */
7773 else if (valnum
!= VN_TOP
)
7774 /* valnum is is_gimple_min_invariant. */
7779 /* Make LEADER a leader for its value at BB. */
7782 rpo_elim::eliminate_push_avail (basic_block bb
, tree leader
)
7784 tree valnum
= VN_INFO (leader
)->valnum
;
7785 if (valnum
== VN_TOP
7786 || is_gimple_min_invariant (valnum
))
7788 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7790 fprintf (dump_file
, "Making available beyond BB%d ", bb
->index
);
7791 print_generic_expr (dump_file
, leader
);
7792 fprintf (dump_file
, " for value ");
7793 print_generic_expr (dump_file
, valnum
);
7794 fprintf (dump_file
, "\n");
7796 vn_ssa_aux_t value
= VN_INFO (valnum
);
7798 if (m_avail_freelist
)
7800 av
= m_avail_freelist
;
7801 m_avail_freelist
= m_avail_freelist
->next
;
7804 av
= XOBNEW (&vn_ssa_aux_obstack
, vn_avail
);
7805 av
->location
= bb
->index
;
7806 av
->leader
= SSA_NAME_VERSION (leader
);
7807 av
->next
= value
->avail
;
7808 av
->next_undo
= last_pushed_avail
;
7809 last_pushed_avail
= value
;
7813 /* Valueization hook for RPO VN plus required state. */
7816 rpo_vn_valueize (tree name
)
7818 if (TREE_CODE (name
) == SSA_NAME
)
7820 vn_ssa_aux_t val
= VN_INFO (name
);
7823 tree tem
= val
->valnum
;
7824 if (tem
!= VN_TOP
&& tem
!= name
)
7826 if (TREE_CODE (tem
) != SSA_NAME
)
7828 /* For all values we only valueize to an available leader
7829 which means we can use SSA name info without restriction. */
7830 tem
= rpo_avail
->eliminate_avail (vn_context_bb
, tem
);
7839 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7840 inverted condition. */
7843 insert_related_predicates_on_edge (enum tree_code code
, tree
*ops
, edge pred_e
)
7848 /* a < b -> a {!,<}= b */
7849 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7850 ops
, boolean_true_node
, 0, pred_e
);
7851 vn_nary_op_insert_pieces_predicated (2, LE_EXPR
, boolean_type_node
,
7852 ops
, boolean_true_node
, 0, pred_e
);
7853 /* a < b -> ! a {>,=} b */
7854 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7855 ops
, boolean_false_node
, 0, pred_e
);
7856 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7857 ops
, boolean_false_node
, 0, pred_e
);
7860 /* a > b -> a {!,>}= b */
7861 vn_nary_op_insert_pieces_predicated (2, NE_EXPR
, boolean_type_node
,
7862 ops
, boolean_true_node
, 0, pred_e
);
7863 vn_nary_op_insert_pieces_predicated (2, GE_EXPR
, boolean_type_node
,
7864 ops
, boolean_true_node
, 0, pred_e
);
7865 /* a > b -> ! a {<,=} b */
7866 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7867 ops
, boolean_false_node
, 0, pred_e
);
7868 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR
, boolean_type_node
,
7869 ops
, boolean_false_node
, 0, pred_e
);
7872 /* a == b -> ! a {<,>} b */
7873 vn_nary_op_insert_pieces_predicated (2, LT_EXPR
, boolean_type_node
,
7874 ops
, boolean_false_node
, 0, pred_e
);
7875 vn_nary_op_insert_pieces_predicated (2, GT_EXPR
, boolean_type_node
,
7876 ops
, boolean_false_node
, 0, pred_e
);
7881 /* Nothing besides inverted condition. */
7887 /* Main stmt worker for RPO VN, process BB. */
7890 process_bb (rpo_elim
&avail
, basic_block bb
,
7891 bool bb_visited
, bool iterate_phis
, bool iterate
, bool eliminate
,
7892 bool do_region
, bitmap exit_bbs
, bool skip_phis
)
7900 /* If we are in loop-closed SSA preserve this state. This is
7901 relevant when called on regions from outside of FRE/PRE. */
7902 bool lc_phi_nodes
= false;
7904 && loops_state_satisfies_p (LOOP_CLOSED_SSA
))
7905 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
7906 if (e
->src
->loop_father
!= e
->dest
->loop_father
7907 && flow_loop_nested_p (e
->dest
->loop_father
,
7908 e
->src
->loop_father
))
7910 lc_phi_nodes
= true;
7914 /* When we visit a loop header substitute into loop info. */
7915 if (!iterate
&& eliminate
&& bb
->loop_father
->header
== bb
)
7917 /* Keep fields in sync with substitute_in_loop_info. */
7918 if (bb
->loop_father
->nb_iterations
)
7919 bb
->loop_father
->nb_iterations
7920 = simplify_replace_tree (bb
->loop_father
->nb_iterations
,
7921 NULL_TREE
, NULL_TREE
, &vn_valueize_for_srt
);
7924 /* Value-number all defs in the basic-block. */
7926 for (gphi_iterator gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
);
7929 gphi
*phi
= gsi
.phi ();
7930 tree res
= PHI_RESULT (phi
);
7931 vn_ssa_aux_t res_info
= VN_INFO (res
);
7934 gcc_assert (!res_info
->visited
);
7935 res_info
->valnum
= VN_TOP
;
7936 res_info
->visited
= true;
7939 /* When not iterating force backedge values to varying. */
7940 visit_stmt (phi
, !iterate_phis
);
7941 if (virtual_operand_p (res
))
7945 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7946 how we handle backedges and availability.
7947 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7948 tree val
= res_info
->valnum
;
7949 if (res
!= val
&& !iterate
&& eliminate
)
7951 if (tree leader
= avail
.eliminate_avail (bb
, res
))
7954 /* Preserve loop-closed SSA form. */
7956 || is_gimple_min_invariant (leader
)))
7958 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
7960 fprintf (dump_file
, "Replaced redundant PHI node "
7962 print_generic_expr (dump_file
, res
);
7963 fprintf (dump_file
, " with ");
7964 print_generic_expr (dump_file
, leader
);
7965 fprintf (dump_file
, "\n");
7967 avail
.eliminations
++;
7969 if (may_propagate_copy (res
, leader
))
7971 /* Schedule for removal. */
7972 avail
.to_remove
.safe_push (phi
);
7975 /* ??? Else generate a copy stmt. */
7979 /* Only make defs available that not already are. But make
7980 sure loop-closed SSA PHI node defs are picked up for
7984 || ! avail
.eliminate_avail (bb
, res
))
7985 avail
.eliminate_push_avail (bb
, res
);
7988 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7989 we do this when processing the last stmt as we have to do this
7990 before elimination which otherwise forces GIMPLE_CONDs to
7991 if (1 != 0) style when seeing non-executable edges. */
7992 if (gsi_end_p (gsi_start_bb (bb
)))
7994 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
7996 if (!(e
->flags
& EDGE_EXECUTABLE
))
7998 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8000 "marking outgoing edge %d -> %d executable\n",
8001 e
->src
->index
, e
->dest
->index
);
8002 e
->flags
|= EDGE_EXECUTABLE
;
8003 e
->dest
->flags
|= BB_EXECUTABLE
;
8005 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
8007 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8009 "marking destination block %d reachable\n",
8011 e
->dest
->flags
|= BB_EXECUTABLE
;
8015 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
8016 !gsi_end_p (gsi
); gsi_next (&gsi
))
8022 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_ALL_DEFS
)
8024 vn_ssa_aux_t op_info
= VN_INFO (op
);
8025 gcc_assert (!op_info
->visited
);
8026 op_info
->valnum
= VN_TOP
;
8027 op_info
->visited
= true;
8030 /* We somehow have to deal with uses that are not defined
8031 in the processed region. Forcing unvisited uses to
8032 varying here doesn't play well with def-use following during
8033 expression simplification, so we deal with this by checking
8034 the visited flag in SSA_VAL. */
8037 visit_stmt (gsi_stmt (gsi
));
8039 gimple
*last
= gsi_stmt (gsi
);
8041 switch (gimple_code (last
))
8044 e
= find_taken_edge (bb
, vn_valueize (gimple_switch_index
8045 (as_a
<gswitch
*> (last
))));
8049 tree lhs
= vn_valueize (gimple_cond_lhs (last
));
8050 tree rhs
= vn_valueize (gimple_cond_rhs (last
));
8051 tree val
= gimple_simplify (gimple_cond_code (last
),
8052 boolean_type_node
, lhs
, rhs
,
8054 /* If the condition didn't simplfy see if we have recorded
8055 an expression from sofar taken edges. */
8056 if (! val
|| TREE_CODE (val
) != INTEGER_CST
)
8058 vn_nary_op_t vnresult
;
8062 val
= vn_nary_op_lookup_pieces (2, gimple_cond_code (last
),
8063 boolean_type_node
, ops
,
8065 /* Did we get a predicated value? */
8066 if (! val
&& vnresult
&& vnresult
->predicated_values
)
8068 val
= vn_nary_op_get_predicated_value (vnresult
, bb
);
8069 if (val
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
8071 fprintf (dump_file
, "Got predicated value ");
8072 print_generic_expr (dump_file
, val
, TDF_NONE
);
8073 fprintf (dump_file
, " for ");
8074 print_gimple_stmt (dump_file
, last
, TDF_SLIM
);
8079 e
= find_taken_edge (bb
, val
);
8082 /* If we didn't manage to compute the taken edge then
8083 push predicated expressions for the condition itself
8084 and related conditions to the hashtables. This allows
8085 simplification of redundant conditions which is
8086 important as early cleanup. */
8087 edge true_e
, false_e
;
8088 extract_true_false_edges_from_block (bb
, &true_e
, &false_e
);
8089 enum tree_code code
= gimple_cond_code (last
);
8090 enum tree_code icode
8091 = invert_tree_comparison (code
, HONOR_NANS (lhs
));
8095 if ((do_region
&& bitmap_bit_p (exit_bbs
, true_e
->dest
->index
))
8096 || !can_track_predicate_on_edge (true_e
))
8098 if ((do_region
&& bitmap_bit_p (exit_bbs
, false_e
->dest
->index
))
8099 || !can_track_predicate_on_edge (false_e
))
8102 vn_nary_op_insert_pieces_predicated
8103 (2, code
, boolean_type_node
, ops
,
8104 boolean_true_node
, 0, true_e
);
8106 vn_nary_op_insert_pieces_predicated
8107 (2, code
, boolean_type_node
, ops
,
8108 boolean_false_node
, 0, false_e
);
8109 if (icode
!= ERROR_MARK
)
8112 vn_nary_op_insert_pieces_predicated
8113 (2, icode
, boolean_type_node
, ops
,
8114 boolean_false_node
, 0, true_e
);
8116 vn_nary_op_insert_pieces_predicated
8117 (2, icode
, boolean_type_node
, ops
,
8118 boolean_true_node
, 0, false_e
);
8120 /* Relax for non-integers, inverted condition handled
8122 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
)))
8125 insert_related_predicates_on_edge (code
, ops
, true_e
);
8127 insert_related_predicates_on_edge (icode
, ops
, false_e
);
8133 e
= find_taken_edge (bb
, vn_valueize (gimple_goto_dest (last
)));
8140 todo
= TODO_cleanup_cfg
;
8141 if (!(e
->flags
& EDGE_EXECUTABLE
))
8143 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8145 "marking known outgoing %sedge %d -> %d executable\n",
8146 e
->flags
& EDGE_DFS_BACK
? "back-" : "",
8147 e
->src
->index
, e
->dest
->index
);
8148 e
->flags
|= EDGE_EXECUTABLE
;
8149 e
->dest
->flags
|= BB_EXECUTABLE
;
8151 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
8153 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8155 "marking destination block %d reachable\n",
8157 e
->dest
->flags
|= BB_EXECUTABLE
;
8160 else if (gsi_one_before_end_p (gsi
))
8162 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8164 if (!(e
->flags
& EDGE_EXECUTABLE
))
8166 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8168 "marking outgoing edge %d -> %d executable\n",
8169 e
->src
->index
, e
->dest
->index
);
8170 e
->flags
|= EDGE_EXECUTABLE
;
8171 e
->dest
->flags
|= BB_EXECUTABLE
;
8173 else if (!(e
->dest
->flags
& BB_EXECUTABLE
))
8175 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8177 "marking destination block %d reachable\n",
8179 e
->dest
->flags
|= BB_EXECUTABLE
;
8184 /* Eliminate. That also pushes to avail. */
8185 if (eliminate
&& ! iterate
)
8186 avail
.eliminate_stmt (bb
, &gsi
);
8188 /* If not eliminating, make all not already available defs
8189 available. But avoid picking up dead defs. */
8190 FOR_EACH_SSA_TREE_OPERAND (op
, gsi_stmt (gsi
), i
, SSA_OP_DEF
)
8191 if (! has_zero_uses (op
)
8192 && ! avail
.eliminate_avail (bb
, op
))
8193 avail
.eliminate_push_avail (bb
, op
);
8196 /* Eliminate in destination PHI arguments. Always substitute in dest
8197 PHIs, even for non-executable edges. This handles region
8199 if (!iterate
&& eliminate
)
8200 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8201 for (gphi_iterator gsi
= gsi_start_phis (e
->dest
);
8202 !gsi_end_p (gsi
); gsi_next (&gsi
))
8204 gphi
*phi
= gsi
.phi ();
8205 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
8206 tree arg
= USE_FROM_PTR (use_p
);
8207 if (TREE_CODE (arg
) != SSA_NAME
8208 || virtual_operand_p (arg
))
8211 if (SSA_NAME_IS_DEFAULT_DEF (arg
))
8213 sprime
= SSA_VAL (arg
);
8214 gcc_assert (TREE_CODE (sprime
) != SSA_NAME
8215 || SSA_NAME_IS_DEFAULT_DEF (sprime
));
8218 /* Look for sth available at the definition block of the argument.
8219 This avoids inconsistencies between availability there which
8220 decides if the stmt can be removed and availability at the
8221 use site. The SSA property ensures that things available
8222 at the definition are also available at uses. */
8223 sprime
= avail
.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg
)),
8227 && may_propagate_copy (arg
, sprime
, !(e
->flags
& EDGE_ABNORMAL
)))
8228 propagate_value (use_p
, sprime
);
8231 vn_context_bb
= NULL
;
8235 /* Unwind state per basic-block. */
8239 /* Times this block has been visited. */
8241 /* Whether to handle this as iteration point or whether to treat
8242 incoming backedge PHI values as varying. */
8244 /* Maximum RPO index this block is reachable from. */
8248 vn_reference_t ref_top
;
8250 vn_nary_op_t nary_top
;
8251 vn_avail
*avail_top
;
8254 /* Unwind the RPO VN state for iteration. */
8257 do_unwind (unwind_state
*to
, rpo_elim
&avail
)
8259 gcc_assert (to
->iterate
);
8260 for (; last_inserted_nary
!= to
->nary_top
;
8261 last_inserted_nary
= last_inserted_nary
->next
)
8264 slot
= valid_info
->nary
->find_slot_with_hash
8265 (last_inserted_nary
, last_inserted_nary
->hashcode
, NO_INSERT
);
8266 /* Predication causes the need to restore previous state. */
8267 if ((*slot
)->unwind_to
)
8268 *slot
= (*slot
)->unwind_to
;
8270 valid_info
->nary
->clear_slot (slot
);
8272 for (; last_inserted_phi
!= to
->phi_top
;
8273 last_inserted_phi
= last_inserted_phi
->next
)
8276 slot
= valid_info
->phis
->find_slot_with_hash
8277 (last_inserted_phi
, last_inserted_phi
->hashcode
, NO_INSERT
);
8278 valid_info
->phis
->clear_slot (slot
);
8280 for (; last_inserted_ref
!= to
->ref_top
;
8281 last_inserted_ref
= last_inserted_ref
->next
)
8283 vn_reference_t
*slot
;
8284 slot
= valid_info
->references
->find_slot_with_hash
8285 (last_inserted_ref
, last_inserted_ref
->hashcode
, NO_INSERT
);
8286 (*slot
)->operands
.release ();
8287 valid_info
->references
->clear_slot (slot
);
8289 obstack_free (&vn_tables_obstack
, to
->ob_top
);
8291 /* Prune [rpo_idx, ] from avail. */
8292 for (; last_pushed_avail
&& last_pushed_avail
->avail
!= to
->avail_top
;)
8294 vn_ssa_aux_t val
= last_pushed_avail
;
8295 vn_avail
*av
= val
->avail
;
8296 val
->avail
= av
->next
;
8297 last_pushed_avail
= av
->next_undo
;
8298 av
->next
= avail
.m_avail_freelist
;
8299 avail
.m_avail_freelist
= av
;
8303 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
8304 If ITERATE is true then treat backedges optimistically as not
8305 executed and iterate. If ELIMINATE is true then perform
8306 elimination, otherwise leave that to the caller. If SKIP_ENTRY_PHIS
8307 is true then force PHI nodes in ENTRY->dest to VARYING. */
8310 do_rpo_vn_1 (function
*fn
, edge entry
, bitmap exit_bbs
,
8311 bool iterate
, bool eliminate
, bool skip_entry_phis
,
8312 vn_lookup_kind kind
)
8315 default_vn_walk_kind
= kind
;
8317 /* We currently do not support region-based iteration when
8318 elimination is requested. */
8319 gcc_assert (!entry
|| !iterate
|| !eliminate
);
8320 /* When iterating we need loop info up-to-date. */
8321 gcc_assert (!iterate
|| !loops_state_satisfies_p (LOOPS_NEED_FIXUP
));
8323 bool do_region
= entry
!= NULL
;
8326 entry
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn
));
8327 exit_bbs
= BITMAP_ALLOC (NULL
);
8328 bitmap_set_bit (exit_bbs
, EXIT_BLOCK
);
8331 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
8332 re-mark those that are contained in the region. */
8335 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
8336 e
->flags
&= ~EDGE_DFS_BACK
;
8338 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
);
8339 auto_vec
<std::pair
<int, int> > toplevel_scc_extents
;
8340 int n
= rev_post_order_and_mark_dfs_back_seme
8341 (fn
, entry
, exit_bbs
, true, rpo
, !iterate
? &toplevel_scc_extents
: NULL
);
8344 BITMAP_FREE (exit_bbs
);
8346 /* If there are any non-DFS_BACK edges into entry->dest skip
8347 processing PHI nodes for that block. This supports
8348 value-numbering loop bodies w/o the actual loop. */
8349 FOR_EACH_EDGE (e
, ei
, entry
->dest
->preds
)
8351 && !(e
->flags
& EDGE_DFS_BACK
))
8353 if (e
!= NULL
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
8354 fprintf (dump_file
, "Region does not contain all edges into "
8355 "the entry block, skipping its PHIs.\n");
8356 skip_entry_phis
|= e
!= NULL
;
8358 int *bb_to_rpo
= XNEWVEC (int, last_basic_block_for_fn (fn
));
8359 for (int i
= 0; i
< n
; ++i
)
8360 bb_to_rpo
[rpo
[i
]] = i
;
8362 unwind_state
*rpo_state
= XNEWVEC (unwind_state
, n
);
8364 rpo_elim
avail (entry
->dest
);
8367 /* Verify we have no extra entries into the region. */
8368 if (flag_checking
&& do_region
)
8370 auto_bb_flag
bb_in_region (fn
);
8371 for (int i
= 0; i
< n
; ++i
)
8373 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8374 bb
->flags
|= bb_in_region
;
8376 /* We can't merge the first two loops because we cannot rely
8377 on EDGE_DFS_BACK for edges not within the region. But if
8378 we decide to always have the bb_in_region flag we can
8379 do the checking during the RPO walk itself (but then it's
8380 also easy to handle MEME conservatively). */
8381 for (int i
= 0; i
< n
; ++i
)
8383 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8386 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8387 gcc_assert (e
== entry
8388 || (skip_entry_phis
&& bb
== entry
->dest
)
8389 || (e
->src
->flags
& bb_in_region
));
8391 for (int i
= 0; i
< n
; ++i
)
8393 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8394 bb
->flags
&= ~bb_in_region
;
8398 /* Create the VN state. For the initial size of the various hashtables
8399 use a heuristic based on region size and number of SSA names. */
8400 unsigned region_size
= (((unsigned HOST_WIDE_INT
)n
* num_ssa_names
)
8401 / (n_basic_blocks_for_fn (fn
) - NUM_FIXED_BLOCKS
));
8402 VN_TOP
= create_tmp_var_raw (void_type_node
, "vn_top");
8404 next_constant_value_id
= -1;
8406 vn_ssa_aux_hash
= new hash_table
<vn_ssa_aux_hasher
> (region_size
* 2);
8407 gcc_obstack_init (&vn_ssa_aux_obstack
);
8409 gcc_obstack_init (&vn_tables_obstack
);
8410 gcc_obstack_init (&vn_tables_insert_obstack
);
8411 valid_info
= XCNEW (struct vn_tables_s
);
8412 allocate_vn_table (valid_info
, region_size
);
8413 last_inserted_ref
= NULL
;
8414 last_inserted_phi
= NULL
;
8415 last_inserted_nary
= NULL
;
8416 last_pushed_avail
= NULL
;
8418 vn_valueize
= rpo_vn_valueize
;
8420 /* Initialize the unwind state and edge/BB executable state. */
8421 unsigned curr_scc
= 0;
8422 for (int i
= 0; i
< n
; ++i
)
8424 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8425 rpo_state
[i
].visited
= 0;
8426 rpo_state
[i
].max_rpo
= i
;
8427 if (!iterate
&& curr_scc
< toplevel_scc_extents
.length ())
8429 if (i
>= toplevel_scc_extents
[curr_scc
].first
8430 && i
<= toplevel_scc_extents
[curr_scc
].second
)
8431 rpo_state
[i
].max_rpo
= toplevel_scc_extents
[curr_scc
].second
;
8432 if (i
== toplevel_scc_extents
[curr_scc
].second
)
8435 bb
->flags
&= ~BB_EXECUTABLE
;
8436 bool has_backedges
= false;
8439 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8441 if (e
->flags
& EDGE_DFS_BACK
)
8442 has_backedges
= true;
8443 e
->flags
&= ~EDGE_EXECUTABLE
;
8444 if (iterate
|| e
== entry
|| (skip_entry_phis
&& bb
== entry
->dest
))
8447 rpo_state
[i
].iterate
= iterate
&& has_backedges
;
8449 entry
->flags
|= EDGE_EXECUTABLE
;
8450 entry
->dest
->flags
|= BB_EXECUTABLE
;
8452 /* As heuristic to improve compile-time we handle only the N innermost
8453 loops and the outermost one optimistically. */
8456 unsigned max_depth
= param_rpo_vn_max_loop_depth
;
8457 for (auto loop
: loops_list (cfun
, LI_ONLY_INNERMOST
))
8458 if (loop_depth (loop
) > max_depth
)
8459 for (unsigned i
= 2;
8460 i
< loop_depth (loop
) - max_depth
; ++i
)
8462 basic_block header
= superloop_at_depth (loop
, i
)->header
;
8463 bool non_latch_backedge
= false;
8466 FOR_EACH_EDGE (e
, ei
, header
->preds
)
8467 if (e
->flags
& EDGE_DFS_BACK
)
8469 /* There can be a non-latch backedge into the header
8470 which is part of an outer irreducible region. We
8471 cannot avoid iterating this block then. */
8472 if (!dominated_by_p (CDI_DOMINATORS
,
8475 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8476 fprintf (dump_file
, "non-latch backedge %d -> %d "
8477 "forces iteration of loop %d\n",
8478 e
->src
->index
, e
->dest
->index
, loop
->num
);
8479 non_latch_backedge
= true;
8482 e
->flags
|= EDGE_EXECUTABLE
;
8484 rpo_state
[bb_to_rpo
[header
->index
]].iterate
= non_latch_backedge
;
8491 /* Go and process all blocks, iterating as necessary. */
8494 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
8496 /* If the block has incoming backedges remember unwind state. This
8497 is required even for non-executable blocks since in irreducible
8498 regions we might reach them via the backedge and re-start iterating
8500 Note we can individually mark blocks with incoming backedges to
8501 not iterate where we then handle PHIs conservatively. We do that
8502 heuristically to reduce compile-time for degenerate cases. */
8503 if (rpo_state
[idx
].iterate
)
8505 rpo_state
[idx
].ob_top
= obstack_alloc (&vn_tables_obstack
, 0);
8506 rpo_state
[idx
].ref_top
= last_inserted_ref
;
8507 rpo_state
[idx
].phi_top
= last_inserted_phi
;
8508 rpo_state
[idx
].nary_top
= last_inserted_nary
;
8509 rpo_state
[idx
].avail_top
8510 = last_pushed_avail
? last_pushed_avail
->avail
: NULL
;
8513 if (!(bb
->flags
& BB_EXECUTABLE
))
8515 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8516 fprintf (dump_file
, "Block %d: BB%d found not executable\n",
8522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8523 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
8525 todo
|= process_bb (avail
, bb
,
8526 rpo_state
[idx
].visited
!= 0,
8527 rpo_state
[idx
].iterate
,
8528 iterate
, eliminate
, do_region
, exit_bbs
, false);
8529 rpo_state
[idx
].visited
++;
8531 /* Verify if changed values flow over executable outgoing backedges
8532 and those change destination PHI values (that's the thing we
8533 can easily verify). Reduce over all such edges to the farthest
8535 int iterate_to
= -1;
8538 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8539 if ((e
->flags
& (EDGE_DFS_BACK
|EDGE_EXECUTABLE
))
8540 == (EDGE_DFS_BACK
|EDGE_EXECUTABLE
)
8541 && rpo_state
[bb_to_rpo
[e
->dest
->index
]].iterate
)
8543 int destidx
= bb_to_rpo
[e
->dest
->index
];
8544 if (!rpo_state
[destidx
].visited
)
8546 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8547 fprintf (dump_file
, "Unvisited destination %d\n",
8549 if (iterate_to
== -1 || destidx
< iterate_to
)
8550 iterate_to
= destidx
;
8553 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8554 fprintf (dump_file
, "Looking for changed values of backedge"
8555 " %d->%d destination PHIs\n",
8556 e
->src
->index
, e
->dest
->index
);
8557 vn_context_bb
= e
->dest
;
8559 for (gsi
= gsi_start_phis (e
->dest
);
8560 !gsi_end_p (gsi
); gsi_next (&gsi
))
8562 bool inserted
= false;
8563 /* While we'd ideally just iterate on value changes
8564 we CSE PHIs and do that even across basic-block
8565 boundaries. So even hashtable state changes can
8566 be important (which is roughly equivalent to
8567 PHI argument value changes). To not excessively
8568 iterate because of that we track whether a PHI
8569 was CSEd to with GF_PLF_1. */
8570 bool phival_changed
;
8571 if ((phival_changed
= visit_phi (gsi
.phi (),
8573 || (inserted
&& gimple_plf (gsi
.phi (), GF_PLF_1
)))
8576 && dump_file
&& (dump_flags
& TDF_DETAILS
))
8577 fprintf (dump_file
, "PHI was CSEd and hashtable "
8578 "state (changed)\n");
8579 if (iterate_to
== -1 || destidx
< iterate_to
)
8580 iterate_to
= destidx
;
8584 vn_context_bb
= NULL
;
8586 if (iterate_to
!= -1)
8588 do_unwind (&rpo_state
[iterate_to
], avail
);
8590 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8591 fprintf (dump_file
, "Iterating to %d BB%d\n",
8592 iterate_to
, rpo
[iterate_to
]);
8602 /* Process all blocks greedily with a worklist that enforces RPO
8603 processing of reachable blocks. */
8604 auto_bitmap worklist
;
8605 bitmap_set_bit (worklist
, 0);
8606 while (!bitmap_empty_p (worklist
))
8608 int idx
= bitmap_clear_first_set_bit (worklist
);
8609 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[idx
]);
8610 gcc_assert ((bb
->flags
& BB_EXECUTABLE
)
8611 && !rpo_state
[idx
].visited
);
8613 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8614 fprintf (dump_file
, "Processing block %d: BB%d\n", idx
, bb
->index
);
8616 /* When we run into predecessor edges where we cannot trust its
8617 executable state mark them executable so PHI processing will
8619 ??? Do we need to force arguments flowing over that edge
8620 to be varying or will they even always be? */
8623 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
8624 if (!(e
->flags
& EDGE_EXECUTABLE
)
8625 && (bb
== entry
->dest
8626 || (!rpo_state
[bb_to_rpo
[e
->src
->index
]].visited
8627 && (rpo_state
[bb_to_rpo
[e
->src
->index
]].max_rpo
8630 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
8631 fprintf (dump_file
, "Cannot trust state of predecessor "
8632 "edge %d -> %d, marking executable\n",
8633 e
->src
->index
, e
->dest
->index
);
8634 e
->flags
|= EDGE_EXECUTABLE
;
8638 todo
|= process_bb (avail
, bb
, false, false, false, eliminate
,
8639 do_region
, exit_bbs
,
8640 skip_entry_phis
&& bb
== entry
->dest
);
8641 rpo_state
[idx
].visited
++;
8643 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
8644 if ((e
->flags
& EDGE_EXECUTABLE
)
8645 && e
->dest
->index
!= EXIT_BLOCK
8646 && (!do_region
|| !bitmap_bit_p (exit_bbs
, e
->dest
->index
))
8647 && !rpo_state
[bb_to_rpo
[e
->dest
->index
]].visited
)
8648 bitmap_set_bit (worklist
, bb_to_rpo
[e
->dest
->index
]);
8652 /* If statistics or dump file active. */
8654 unsigned max_visited
= 1;
8655 for (int i
= 0; i
< n
; ++i
)
8657 basic_block bb
= BASIC_BLOCK_FOR_FN (fn
, rpo
[i
]);
8658 if (bb
->flags
& BB_EXECUTABLE
)
8660 statistics_histogram_event (cfun
, "RPO block visited times",
8661 rpo_state
[i
].visited
);
8662 if (rpo_state
[i
].visited
> max_visited
)
8663 max_visited
= rpo_state
[i
].visited
;
8665 unsigned nvalues
= 0, navail
= 0;
8666 for (hash_table
<vn_ssa_aux_hasher
>::iterator i
= vn_ssa_aux_hash
->begin ();
8667 i
!= vn_ssa_aux_hash
->end (); ++i
)
8670 vn_avail
*av
= (*i
)->avail
;
8677 statistics_counter_event (cfun
, "RPO blocks", n
);
8678 statistics_counter_event (cfun
, "RPO blocks visited", nblk
);
8679 statistics_counter_event (cfun
, "RPO blocks executable", nex
);
8680 statistics_histogram_event (cfun
, "RPO iterations", 10*nblk
/ nex
);
8681 statistics_histogram_event (cfun
, "RPO num values", nvalues
);
8682 statistics_histogram_event (cfun
, "RPO num avail", navail
);
8683 statistics_histogram_event (cfun
, "RPO num lattice",
8684 vn_ssa_aux_hash
->elements ());
8685 if (dump_file
&& (dump_flags
& (TDF_DETAILS
|TDF_STATS
)))
8687 fprintf (dump_file
, "RPO iteration over %d blocks visited %" PRIu64
8688 " blocks in total discovering %d executable blocks iterating "
8689 "%d.%d times, a block was visited max. %u times\n",
8691 (int)((10*nblk
/ nex
)/10), (int)((10*nblk
/ nex
)%10),
8693 fprintf (dump_file
, "RPO tracked %d values available at %d locations "
8694 "and %" PRIu64
" lattice elements\n",
8695 nvalues
, navail
, (uint64_t) vn_ssa_aux_hash
->elements ());
8700 /* When !iterate we already performed elimination during the RPO
8704 /* Elimination for region-based VN needs to be done within the
8706 gcc_assert (! do_region
);
8707 /* Note we can't use avail.walk here because that gets confused
8708 by the existing availability and it will be less efficient
8710 todo
|= eliminate_with_rpo_vn (NULL
);
8713 todo
|= avail
.eliminate_cleanup (do_region
);
8719 XDELETEVEC (bb_to_rpo
);
8721 XDELETEVEC (rpo_state
);
8726 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8727 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8728 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8730 If ITERATE is true then treat backedges optimistically as not
8731 executed and iterate. If ELIMINATE is true then perform
8732 elimination, otherwise leave that to the caller.
8733 If SKIP_ENTRY_PHIS is true then force PHI nodes in ENTRY->dest to VARYING.
8734 KIND specifies the amount of work done for handling memory operations. */
8737 do_rpo_vn (function
*fn
, edge entry
, bitmap exit_bbs
,
8738 bool iterate
, bool eliminate
, bool skip_entry_phis
,
8739 vn_lookup_kind kind
)
8741 auto_timevar
tv (TV_TREE_RPO_VN
);
8742 unsigned todo
= do_rpo_vn_1 (fn
, entry
, exit_bbs
, iterate
, eliminate
,
8743 skip_entry_phis
, kind
);
8751 const pass_data pass_data_fre
=
8753 GIMPLE_PASS
, /* type */
8755 OPTGROUP_NONE
, /* optinfo_flags */
8756 TV_TREE_FRE
, /* tv_id */
8757 ( PROP_cfg
| PROP_ssa
), /* properties_required */
8758 0, /* properties_provided */
8759 0, /* properties_destroyed */
8760 0, /* todo_flags_start */
8761 0, /* todo_flags_finish */
8764 class pass_fre
: public gimple_opt_pass
8767 pass_fre (gcc::context
*ctxt
)
8768 : gimple_opt_pass (pass_data_fre
, ctxt
), may_iterate (true)
8771 /* opt_pass methods: */
8772 opt_pass
* clone () final override
{ return new pass_fre (m_ctxt
); }
8773 void set_pass_param (unsigned int n
, bool param
) final override
8775 gcc_assert (n
== 0);
8776 may_iterate
= param
;
8778 bool gate (function
*) final override
8780 return flag_tree_fre
!= 0 && (may_iterate
|| optimize
> 1);
8782 unsigned int execute (function
*) final override
;
8786 }; // class pass_fre
8789 pass_fre::execute (function
*fun
)
8793 /* At -O[1g] use the cheap non-iterating mode. */
8794 bool iterate_p
= may_iterate
&& (optimize
> 1);
8795 calculate_dominance_info (CDI_DOMINATORS
);
8797 loop_optimizer_init (AVOID_CFG_MODIFICATIONS
);
8799 todo
= do_rpo_vn_1 (fun
, NULL
, NULL
, iterate_p
, true, false, VN_WALKREWRITE
);
8803 loop_optimizer_finalize ();
8805 if (scev_initialized_p ())
8808 /* For late FRE after IVOPTs and unrolling, see if we can
8809 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8811 todo
|= TODO_update_address_taken
;
8819 make_pass_fre (gcc::context
*ctxt
)
8821 return new pass_fre (ctxt
);
8824 #undef BB_EXECUTABLE