1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
34 #include "internal-fn.h"
39 static bool hashable_expr_equal_p (const struct hashable_expr
*,
40 const struct hashable_expr
*);
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
50 avail_exprs_stack::pop_to_marker ()
52 /* Remove all the expressions made available in this block. */
53 while (m_stack
.length () > 0)
55 std::pair
<expr_hash_elt_t
, expr_hash_elt_t
> victim
= m_stack
.pop ();
58 if (victim
.first
== NULL
)
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
66 fprintf (dump_file
, "<<<< ");
67 victim
.first
->print (dump_file
);
70 slot
= m_avail_exprs
->find_slot (victim
.first
, NO_INSERT
);
71 gcc_assert (slot
&& *slot
== victim
.first
);
72 if (victim
.second
!= NULL
)
75 *slot
= victim
.second
;
78 m_avail_exprs
->clear_slot (slot
);
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
86 avail_exprs_stack::record_expr (class expr_hash_elt
*elt1
,
87 class expr_hash_elt
*elt2
,
90 if (elt1
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
92 fprintf (dump_file
, "%c>>> ", type
);
93 elt1
->print (dump_file
);
96 m_stack
.safe_push (std::pair
<expr_hash_elt_t
, expr_hash_elt_t
> (elt1
, elt2
));
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
103 vuse_eq (ao_ref
*, tree vuse1
, void *data
)
105 tree vuse2
= (tree
) data
;
112 /* We looked for STMT in the hash table, but did not find it.
114 If STMT is an assignment from a binary operator, we may know something
115 about the operands relationship to each other which would allow
116 us to derive a constant value for the RHS of STMT. */
119 avail_exprs_stack::simplify_binary_operation (gimple
*stmt
,
120 class expr_hash_elt element
)
122 if (is_gimple_assign (stmt
))
124 struct hashable_expr
*expr
= element
.expr ();
125 if (expr
->kind
== EXPR_BINARY
)
127 enum tree_code code
= expr
->ops
.binary
.op
;
131 /* For these cases, if we know the operands
132 are equal, then we know the result. */
149 /* Build a simple equality expr and query the hash table
151 struct hashable_expr expr
;
152 expr
.type
= boolean_type_node
;
153 expr
.kind
= EXPR_BINARY
;
154 expr
.ops
.binary
.op
= EQ_EXPR
;
155 expr
.ops
.binary
.opnd0
= gimple_assign_rhs1 (stmt
);
156 expr
.ops
.binary
.opnd1
= gimple_assign_rhs2 (stmt
);
157 class expr_hash_elt
element2 (&expr
, NULL_TREE
);
159 = m_avail_exprs
->find_slot (&element2
, NO_INSERT
);
160 tree result_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
162 /* If the query was successful and returned a nonzero
163 result, then we know that the operands of the binary
164 expression are the same. In many cases this allows
165 us to compute a constant result of the expression
166 at compile time, even if we do not know the exact
167 values of the operands. */
168 if (slot
&& *slot
&& integer_onep ((*slot
)->lhs ()))
176 return gimple_assign_rhs1 (stmt
);
179 /* This is unsafe for certain floats even in non-IEEE
180 formats. In IEEE, it is unsafe because it does
182 if (FLOAT_TYPE_P (result_type
)
183 && HONOR_NANS (result_type
))
191 return build_zero_cst (result_type
);
198 /* Avoid _Fract types where we can't build 1. */
199 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type
)))
201 return build_one_cst (result_type
);
218 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
219 If found, return its LHS. Otherwise insert STMT in the table and
222 Also, when an expression is first inserted in the table, it is also
223 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
224 we finish processing this block and its children. */
227 avail_exprs_stack::lookup_avail_expr (gimple
*stmt
, bool insert
, bool tbaa_p
)
229 expr_hash_elt
**slot
;
232 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
233 if (gimple_code (stmt
) == GIMPLE_PHI
)
234 lhs
= gimple_phi_result (stmt
);
236 lhs
= gimple_get_lhs (stmt
);
238 class expr_hash_elt
element (stmt
, lhs
);
240 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
242 fprintf (dump_file
, "LKUP ");
243 element
.print (dump_file
);
246 /* Don't bother remembering constant assignments and copy operations.
247 Constants and copy operations are handled by the constant/copy propagator
249 if (element
.expr()->kind
== EXPR_SINGLE
250 && (TREE_CODE (element
.expr()->ops
.single
.rhs
) == SSA_NAME
251 || is_gimple_min_invariant (element
.expr()->ops
.single
.rhs
)))
254 /* Finally try to find the expression in the main expression hash table. */
255 slot
= m_avail_exprs
->find_slot (&element
, (insert
? INSERT
: NO_INSERT
));
260 else if (*slot
== NULL
)
262 /* If we did not find the expression in the hash table, we may still
263 be able to produce a result for some expressions. */
264 tree retval
= avail_exprs_stack::simplify_binary_operation (stmt
,
267 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
268 We must initialize *SLOT to a real entry, even if we found a
269 way to prove ELEMENT was a constant after not finding ELEMENT
272 An uninitialized or empty slot is an indication no prior objects
273 entered into the hash table had a hash collection with ELEMENT.
275 If we fail to do so and had such entries in the table, they
276 would become unreachable. */
277 class expr_hash_elt
*element2
= new expr_hash_elt (element
);
280 record_expr (element2
, NULL
, '2');
284 /* If we found a redundant memory operation do an alias walk to
285 check if we can re-use it. */
286 if (gimple_vuse (stmt
) != (*slot
)->vop ())
288 tree vuse1
= (*slot
)->vop ();
289 tree vuse2
= gimple_vuse (stmt
);
290 /* If we have a load of a register and a candidate in the
291 hash with vuse1 then try to reach its stmt by walking
292 up the virtual use-def chain using walk_non_aliased_vuses.
293 But don't do this when removing expressions from the hash. */
295 unsigned limit
= PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
);
297 && gimple_assign_single_p (stmt
)
298 && TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
299 && (ao_ref_init (&ref
, gimple_assign_rhs1 (stmt
)),
300 ref
.base_alias_set
= ref
.ref_alias_set
= tbaa_p
? -1 : 0, true)
301 && walk_non_aliased_vuses (&ref
, vuse2
, vuse_eq
, NULL
, NULL
,
302 limit
, vuse1
) != NULL
))
306 class expr_hash_elt
*element2
= new expr_hash_elt (element
);
308 /* Insert the expr into the hash by replacing the current
309 entry and recording the value to restore in the
310 avail_exprs_stack. */
311 record_expr (element2
, *slot
, '2');
318 /* Extract the LHS of the assignment so that it can be used as the current
319 definition of another variable. */
320 lhs
= (*slot
)->lhs ();
322 /* Valueize the result. */
323 if (TREE_CODE (lhs
) == SSA_NAME
)
325 tree tem
= SSA_NAME_VALUE (lhs
);
330 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
332 fprintf (dump_file
, "FIND: ");
333 print_generic_expr (dump_file
, lhs
);
334 fprintf (dump_file
, "\n");
340 /* Enter condition equivalence P into the hash table.
342 This indicates that a conditional expression has a known
346 avail_exprs_stack::record_cond (cond_equivalence
*p
)
348 class expr_hash_elt
*element
= new expr_hash_elt (&p
->cond
, p
->value
);
349 expr_hash_elt
**slot
;
351 slot
= m_avail_exprs
->find_slot_with_hash (element
, element
->hash (), INSERT
);
355 record_expr (element
, NULL
, '1');
361 /* Generate a hash value for a pair of expressions. This can be used
362 iteratively by passing a previous result in HSTATE.
364 The same hash value is always returned for a given pair of expressions,
365 regardless of the order in which they are presented. This is useful in
366 hashing the operands of commutative functions. */
372 add_expr_commutative (const_tree t1
, const_tree t2
, hash
&hstate
)
376 inchash::add_expr (t1
, one
);
377 inchash::add_expr (t2
, two
);
378 hstate
.add_commutative (one
, two
);
381 /* Compute a hash value for a hashable_expr value EXPR and a
382 previously accumulated hash value VAL. If two hashable_expr
383 values compare equal with hashable_expr_equal_p, they must
384 hash to the same value, given an identical value of VAL.
385 The logic is intended to follow inchash::add_expr in tree.c. */
388 add_hashable_expr (const struct hashable_expr
*expr
, hash
&hstate
)
393 inchash::add_expr (expr
->ops
.single
.rhs
, hstate
);
397 hstate
.add_object (expr
->ops
.unary
.op
);
399 /* Make sure to include signedness in the hash computation.
400 Don't hash the type, that can lead to having nodes which
401 compare equal according to operand_equal_p, but which
402 have different hash codes. */
403 if (CONVERT_EXPR_CODE_P (expr
->ops
.unary
.op
)
404 || expr
->ops
.unary
.op
== NON_LVALUE_EXPR
)
405 hstate
.add_int (TYPE_UNSIGNED (expr
->type
));
407 inchash::add_expr (expr
->ops
.unary
.opnd
, hstate
);
411 hstate
.add_object (expr
->ops
.binary
.op
);
412 if (commutative_tree_code (expr
->ops
.binary
.op
))
413 inchash::add_expr_commutative (expr
->ops
.binary
.opnd0
,
414 expr
->ops
.binary
.opnd1
, hstate
);
417 inchash::add_expr (expr
->ops
.binary
.opnd0
, hstate
);
418 inchash::add_expr (expr
->ops
.binary
.opnd1
, hstate
);
423 hstate
.add_object (expr
->ops
.ternary
.op
);
424 if (commutative_ternary_tree_code (expr
->ops
.ternary
.op
))
425 inchash::add_expr_commutative (expr
->ops
.ternary
.opnd0
,
426 expr
->ops
.ternary
.opnd1
, hstate
);
429 inchash::add_expr (expr
->ops
.ternary
.opnd0
, hstate
);
430 inchash::add_expr (expr
->ops
.ternary
.opnd1
, hstate
);
432 inchash::add_expr (expr
->ops
.ternary
.opnd2
, hstate
);
438 enum tree_code code
= CALL_EXPR
;
441 hstate
.add_object (code
);
442 fn_from
= expr
->ops
.call
.fn_from
;
443 if (gimple_call_internal_p (fn_from
))
444 hstate
.merge_hash ((hashval_t
) gimple_call_internal_fn (fn_from
));
446 inchash::add_expr (gimple_call_fn (fn_from
), hstate
);
447 for (i
= 0; i
< expr
->ops
.call
.nargs
; i
++)
448 inchash::add_expr (expr
->ops
.call
.args
[i
], hstate
);
456 for (i
= 0; i
< expr
->ops
.phi
.nargs
; i
++)
457 inchash::add_expr (expr
->ops
.phi
.args
[i
], hstate
);
468 /* Hashing and equality functions. We compute a value number for expressions
469 using the code of the expression and the SSA numbers of its operands. */
472 avail_expr_hash (class expr_hash_elt
*p
)
474 const struct hashable_expr
*expr
= p
->expr ();
475 inchash::hash hstate
;
477 if (expr
->kind
== EXPR_SINGLE
)
479 /* T could potentially be a switch index or a goto dest. */
480 tree t
= expr
->ops
.single
.rhs
;
481 if (TREE_CODE (t
) == MEM_REF
|| handled_component_p (t
))
483 /* Make equivalent statements of both these kinds hash together.
484 Dealing with both MEM_REF and ARRAY_REF allows us not to care
485 about equivalence with other statements not considered here. */
487 poly_int64 offset
, size
, max_size
;
488 tree base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
,
490 /* Strictly, we could try to normalize variable-sized accesses too,
491 but here we just deal with the common case. */
492 if (known_size_p (max_size
)
493 && known_eq (size
, max_size
))
495 enum tree_code code
= MEM_REF
;
496 hstate
.add_object (code
);
497 inchash::add_expr (base
, hstate
,
498 TREE_CODE (base
) == MEM_REF
499 ? OEP_ADDRESS_OF
: 0);
500 hstate
.add_object (offset
);
501 hstate
.add_object (size
);
502 return hstate
.end ();
507 inchash::add_hashable_expr (expr
, hstate
);
509 return hstate
.end ();
512 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
513 to each other. (That is, they return the value of the same bit of memory.)
515 Return TRUE if the two are so equivalent; FALSE if not (which could still
516 mean the two are equivalent by other means). */
519 equal_mem_array_ref_p (tree t0
, tree t1
)
521 if (TREE_CODE (t0
) != MEM_REF
&& ! handled_component_p (t0
))
523 if (TREE_CODE (t1
) != MEM_REF
&& ! handled_component_p (t1
))
526 if (!types_compatible_p (TREE_TYPE (t0
), TREE_TYPE (t1
)))
529 poly_int64 off0
, sz0
, max0
;
530 tree base0
= get_ref_base_and_extent (t0
, &off0
, &sz0
, &max0
, &rev0
);
531 if (!known_size_p (max0
)
532 || maybe_ne (sz0
, max0
))
536 poly_int64 off1
, sz1
, max1
;
537 tree base1
= get_ref_base_and_extent (t1
, &off1
, &sz1
, &max1
, &rev1
);
538 if (!known_size_p (max1
)
539 || maybe_ne (sz1
, max1
))
542 if (rev0
!= rev1
|| maybe_ne (sz0
, sz1
) || maybe_ne (off0
, off1
))
545 return operand_equal_p (base0
, base1
,
546 (TREE_CODE (base0
) == MEM_REF
547 || TREE_CODE (base0
) == TARGET_MEM_REF
)
548 && (TREE_CODE (base1
) == MEM_REF
549 || TREE_CODE (base1
) == TARGET_MEM_REF
)
550 ? OEP_ADDRESS_OF
: 0);
553 /* Compare two hashable_expr structures for equivalence. They are
554 considered equivalent when the expressions they denote must
555 necessarily be equal. The logic is intended to follow that of
556 operand_equal_p in fold-const.c */
559 hashable_expr_equal_p (const struct hashable_expr
*expr0
,
560 const struct hashable_expr
*expr1
)
562 tree type0
= expr0
->type
;
563 tree type1
= expr1
->type
;
565 /* If either type is NULL, there is nothing to check. */
566 if ((type0
== NULL_TREE
) ^ (type1
== NULL_TREE
))
569 /* If both types don't have the same signedness, precision, and mode,
570 then we can't consider them equal. */
572 && (TREE_CODE (type0
) == ERROR_MARK
573 || TREE_CODE (type1
) == ERROR_MARK
574 || TYPE_UNSIGNED (type0
) != TYPE_UNSIGNED (type1
)
575 || TYPE_PRECISION (type0
) != TYPE_PRECISION (type1
)
576 || TYPE_MODE (type0
) != TYPE_MODE (type1
)))
579 if (expr0
->kind
!= expr1
->kind
)
585 return equal_mem_array_ref_p (expr0
->ops
.single
.rhs
,
586 expr1
->ops
.single
.rhs
)
587 || operand_equal_p (expr0
->ops
.single
.rhs
,
588 expr1
->ops
.single
.rhs
, 0);
590 if (expr0
->ops
.unary
.op
!= expr1
->ops
.unary
.op
)
593 if ((CONVERT_EXPR_CODE_P (expr0
->ops
.unary
.op
)
594 || expr0
->ops
.unary
.op
== NON_LVALUE_EXPR
)
595 && TYPE_UNSIGNED (expr0
->type
) != TYPE_UNSIGNED (expr1
->type
))
598 return operand_equal_p (expr0
->ops
.unary
.opnd
,
599 expr1
->ops
.unary
.opnd
, 0);
602 if (expr0
->ops
.binary
.op
!= expr1
->ops
.binary
.op
)
605 if (operand_equal_p (expr0
->ops
.binary
.opnd0
,
606 expr1
->ops
.binary
.opnd0
, 0)
607 && operand_equal_p (expr0
->ops
.binary
.opnd1
,
608 expr1
->ops
.binary
.opnd1
, 0))
611 /* For commutative ops, allow the other order. */
612 return (commutative_tree_code (expr0
->ops
.binary
.op
)
613 && operand_equal_p (expr0
->ops
.binary
.opnd0
,
614 expr1
->ops
.binary
.opnd1
, 0)
615 && operand_equal_p (expr0
->ops
.binary
.opnd1
,
616 expr1
->ops
.binary
.opnd0
, 0));
619 if (expr0
->ops
.ternary
.op
!= expr1
->ops
.ternary
.op
620 || !operand_equal_p (expr0
->ops
.ternary
.opnd2
,
621 expr1
->ops
.ternary
.opnd2
, 0))
624 /* BIT_INSERT_EXPR has an implict operand as the type precision
625 of op1. Need to check to make sure they are the same. */
626 if (expr0
->ops
.ternary
.op
== BIT_INSERT_EXPR
627 && TREE_CODE (expr0
->ops
.ternary
.opnd1
) == INTEGER_CST
628 && TREE_CODE (expr1
->ops
.ternary
.opnd1
) == INTEGER_CST
629 && TYPE_PRECISION (TREE_TYPE (expr0
->ops
.ternary
.opnd1
))
630 != TYPE_PRECISION (TREE_TYPE (expr1
->ops
.ternary
.opnd1
)))
633 if (operand_equal_p (expr0
->ops
.ternary
.opnd0
,
634 expr1
->ops
.ternary
.opnd0
, 0)
635 && operand_equal_p (expr0
->ops
.ternary
.opnd1
,
636 expr1
->ops
.ternary
.opnd1
, 0))
639 /* For commutative ops, allow the other order. */
640 return (commutative_ternary_tree_code (expr0
->ops
.ternary
.op
)
641 && operand_equal_p (expr0
->ops
.ternary
.opnd0
,
642 expr1
->ops
.ternary
.opnd1
, 0)
643 && operand_equal_p (expr0
->ops
.ternary
.opnd1
,
644 expr1
->ops
.ternary
.opnd0
, 0));
650 /* If the calls are to different functions, then they
651 clearly cannot be equal. */
652 if (!gimple_call_same_target_p (expr0
->ops
.call
.fn_from
,
653 expr1
->ops
.call
.fn_from
))
656 if (! expr0
->ops
.call
.pure
)
659 if (expr0
->ops
.call
.nargs
!= expr1
->ops
.call
.nargs
)
662 for (i
= 0; i
< expr0
->ops
.call
.nargs
; i
++)
663 if (! operand_equal_p (expr0
->ops
.call
.args
[i
],
664 expr1
->ops
.call
.args
[i
], 0))
667 if (stmt_could_throw_p (cfun
, expr0
->ops
.call
.fn_from
))
669 int lp0
= lookup_stmt_eh_lp (expr0
->ops
.call
.fn_from
);
670 int lp1
= lookup_stmt_eh_lp (expr1
->ops
.call
.fn_from
);
671 if ((lp0
> 0 || lp1
> 0) && lp0
!= lp1
)
682 if (expr0
->ops
.phi
.nargs
!= expr1
->ops
.phi
.nargs
)
685 for (i
= 0; i
< expr0
->ops
.phi
.nargs
; i
++)
686 if (! operand_equal_p (expr0
->ops
.phi
.args
[i
],
687 expr1
->ops
.phi
.args
[i
], 0))
698 /* Given a statement STMT, construct a hash table element. */
700 expr_hash_elt::expr_hash_elt (gimple
*stmt
, tree orig_lhs
)
702 enum gimple_code code
= gimple_code (stmt
);
703 struct hashable_expr
*expr
= this->expr ();
705 if (code
== GIMPLE_ASSIGN
)
707 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
709 switch (get_gimple_rhs_class (subcode
))
711 case GIMPLE_SINGLE_RHS
:
712 expr
->kind
= EXPR_SINGLE
;
713 expr
->type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
714 expr
->ops
.single
.rhs
= gimple_assign_rhs1 (stmt
);
716 case GIMPLE_UNARY_RHS
:
717 expr
->kind
= EXPR_UNARY
;
718 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
719 if (CONVERT_EXPR_CODE_P (subcode
))
721 expr
->ops
.unary
.op
= subcode
;
722 expr
->ops
.unary
.opnd
= gimple_assign_rhs1 (stmt
);
724 case GIMPLE_BINARY_RHS
:
725 expr
->kind
= EXPR_BINARY
;
726 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
727 expr
->ops
.binary
.op
= subcode
;
728 expr
->ops
.binary
.opnd0
= gimple_assign_rhs1 (stmt
);
729 expr
->ops
.binary
.opnd1
= gimple_assign_rhs2 (stmt
);
731 case GIMPLE_TERNARY_RHS
:
732 expr
->kind
= EXPR_TERNARY
;
733 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
734 expr
->ops
.ternary
.op
= subcode
;
735 expr
->ops
.ternary
.opnd0
= gimple_assign_rhs1 (stmt
);
736 expr
->ops
.ternary
.opnd1
= gimple_assign_rhs2 (stmt
);
737 expr
->ops
.ternary
.opnd2
= gimple_assign_rhs3 (stmt
);
743 else if (code
== GIMPLE_COND
)
745 expr
->type
= boolean_type_node
;
746 expr
->kind
= EXPR_BINARY
;
747 expr
->ops
.binary
.op
= gimple_cond_code (stmt
);
748 expr
->ops
.binary
.opnd0
= gimple_cond_lhs (stmt
);
749 expr
->ops
.binary
.opnd1
= gimple_cond_rhs (stmt
);
751 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
753 size_t nargs
= gimple_call_num_args (call_stmt
);
756 gcc_assert (gimple_call_lhs (call_stmt
));
758 expr
->type
= TREE_TYPE (gimple_call_lhs (call_stmt
));
759 expr
->kind
= EXPR_CALL
;
760 expr
->ops
.call
.fn_from
= call_stmt
;
762 if (gimple_call_flags (call_stmt
) & (ECF_CONST
| ECF_PURE
))
763 expr
->ops
.call
.pure
= true;
765 expr
->ops
.call
.pure
= false;
767 expr
->ops
.call
.nargs
= nargs
;
768 expr
->ops
.call
.args
= XCNEWVEC (tree
, nargs
);
769 for (i
= 0; i
< nargs
; i
++)
770 expr
->ops
.call
.args
[i
] = gimple_call_arg (call_stmt
, i
);
772 else if (gswitch
*swtch_stmt
= dyn_cast
<gswitch
*> (stmt
))
774 expr
->type
= TREE_TYPE (gimple_switch_index (swtch_stmt
));
775 expr
->kind
= EXPR_SINGLE
;
776 expr
->ops
.single
.rhs
= gimple_switch_index (swtch_stmt
);
778 else if (code
== GIMPLE_GOTO
)
780 expr
->type
= TREE_TYPE (gimple_goto_dest (stmt
));
781 expr
->kind
= EXPR_SINGLE
;
782 expr
->ops
.single
.rhs
= gimple_goto_dest (stmt
);
784 else if (code
== GIMPLE_PHI
)
786 size_t nargs
= gimple_phi_num_args (stmt
);
789 expr
->type
= TREE_TYPE (gimple_phi_result (stmt
));
790 expr
->kind
= EXPR_PHI
;
791 expr
->ops
.phi
.nargs
= nargs
;
792 expr
->ops
.phi
.args
= XCNEWVEC (tree
, nargs
);
793 for (i
= 0; i
< nargs
; i
++)
794 expr
->ops
.phi
.args
[i
] = gimple_phi_arg_def (stmt
, i
);
800 m_vop
= gimple_vuse (stmt
);
801 m_hash
= avail_expr_hash (this);
805 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
806 construct a hash table element. */
808 expr_hash_elt::expr_hash_elt (struct hashable_expr
*orig
, tree orig_lhs
)
813 m_hash
= avail_expr_hash (this);
817 /* Copy constructor for a hash table element. */
819 expr_hash_elt::expr_hash_elt (class expr_hash_elt
&old_elt
)
821 m_expr
= old_elt
.m_expr
;
822 m_lhs
= old_elt
.m_lhs
;
823 m_vop
= old_elt
.m_vop
;
824 m_hash
= old_elt
.m_hash
;
827 /* Now deep copy the malloc'd space for CALL and PHI args. */
828 if (old_elt
.m_expr
.kind
== EXPR_CALL
)
830 size_t nargs
= old_elt
.m_expr
.ops
.call
.nargs
;
833 m_expr
.ops
.call
.args
= XCNEWVEC (tree
, nargs
);
834 for (i
= 0; i
< nargs
; i
++)
835 m_expr
.ops
.call
.args
[i
] = old_elt
.m_expr
.ops
.call
.args
[i
];
837 else if (old_elt
.m_expr
.kind
== EXPR_PHI
)
839 size_t nargs
= old_elt
.m_expr
.ops
.phi
.nargs
;
842 m_expr
.ops
.phi
.args
= XCNEWVEC (tree
, nargs
);
843 for (i
= 0; i
< nargs
; i
++)
844 m_expr
.ops
.phi
.args
[i
] = old_elt
.m_expr
.ops
.phi
.args
[i
];
848 /* Calls and PHIs have a variable number of arguments that are allocated
849 on the heap. Thus we have to have a special dtor to release them. */
851 expr_hash_elt::~expr_hash_elt ()
853 if (m_expr
.kind
== EXPR_CALL
)
854 free (m_expr
.ops
.call
.args
);
855 else if (m_expr
.kind
== EXPR_PHI
)
856 free (m_expr
.ops
.phi
.args
);
859 /* Print a diagnostic dump of an expression hash table entry. */
862 expr_hash_elt::print (FILE *stream
)
864 fprintf (stream
, "STMT ");
868 print_generic_expr (stream
, m_lhs
);
869 fprintf (stream
, " = ");
875 print_generic_expr (stream
, m_expr
.ops
.single
.rhs
);
879 fprintf (stream
, "%s ", get_tree_code_name (m_expr
.ops
.unary
.op
));
880 print_generic_expr (stream
, m_expr
.ops
.unary
.opnd
);
884 print_generic_expr (stream
, m_expr
.ops
.binary
.opnd0
);
885 fprintf (stream
, " %s ", get_tree_code_name (m_expr
.ops
.binary
.op
));
886 print_generic_expr (stream
, m_expr
.ops
.binary
.opnd1
);
890 fprintf (stream
, " %s <", get_tree_code_name (m_expr
.ops
.ternary
.op
));
891 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd0
);
892 fputs (", ", stream
);
893 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd1
);
894 fputs (", ", stream
);
895 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd2
);
902 size_t nargs
= m_expr
.ops
.call
.nargs
;
905 fn_from
= m_expr
.ops
.call
.fn_from
;
906 if (gimple_call_internal_p (fn_from
))
907 fprintf (stream
, ".%s",
908 internal_fn_name (gimple_call_internal_fn (fn_from
)));
910 print_generic_expr (stream
, gimple_call_fn (fn_from
));
911 fprintf (stream
, " (");
912 for (i
= 0; i
< nargs
; i
++)
914 print_generic_expr (stream
, m_expr
.ops
.call
.args
[i
]);
916 fprintf (stream
, ", ");
918 fprintf (stream
, ")");
925 size_t nargs
= m_expr
.ops
.phi
.nargs
;
927 fprintf (stream
, "PHI <");
928 for (i
= 0; i
< nargs
; i
++)
930 print_generic_expr (stream
, m_expr
.ops
.phi
.args
[i
]);
932 fprintf (stream
, ", ");
934 fprintf (stream
, ">");
941 fprintf (stream
, " with ");
942 print_generic_expr (stream
, m_vop
);
945 fprintf (stream
, "\n");
948 /* Pop entries off the stack until we hit the NULL marker.
949 For each entry popped, use the SRC/DEST pair to restore
950 SRC to its prior value. */
953 const_and_copies::pop_to_marker (void)
955 while (m_stack
.length () > 0)
957 tree prev_value
, dest
;
959 dest
= m_stack
.pop ();
961 /* A NULL value indicates we should stop unwinding, otherwise
962 pop off the next entry as they're recorded in pairs. */
966 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
968 fprintf (dump_file
, "<<<< COPY ");
969 print_generic_expr (dump_file
, dest
);
970 fprintf (dump_file
, " = ");
971 print_generic_expr (dump_file
, SSA_NAME_VALUE (dest
));
972 fprintf (dump_file
, "\n");
975 prev_value
= m_stack
.pop ();
976 set_ssa_name_value (dest
, prev_value
);
980 /* Record that X has the value Y and that X's previous value is PREV_X.
982 This variant does not follow the value chain for Y. */
985 const_and_copies::record_const_or_copy_raw (tree x
, tree y
, tree prev_x
)
987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
989 fprintf (dump_file
, "0>>> COPY ");
990 print_generic_expr (dump_file
, x
);
991 fprintf (dump_file
, " = ");
992 print_generic_expr (dump_file
, y
);
993 fprintf (dump_file
, "\n");
996 set_ssa_name_value (x
, y
);
998 m_stack
.quick_push (prev_x
);
999 m_stack
.quick_push (x
);
1002 /* Record that X has the value Y. */
1005 const_and_copies::record_const_or_copy (tree x
, tree y
)
1007 record_const_or_copy (x
, y
, SSA_NAME_VALUE (x
));
1010 /* Record that X has the value Y and that X's previous value is PREV_X.
1012 This variant follow's Y value chain. */
1015 const_and_copies::record_const_or_copy (tree x
, tree y
, tree prev_x
)
1017 /* Y may be NULL if we are invalidating entries in the table. */
1018 if (y
&& TREE_CODE (y
) == SSA_NAME
)
1020 tree tmp
= SSA_NAME_VALUE (y
);
1024 record_const_or_copy_raw (x
, y
, prev_x
);
1028 expr_elt_hasher::equal (const value_type
&p1
, const compare_type
&p2
)
1030 const struct hashable_expr
*expr1
= p1
->expr ();
1031 const struct expr_hash_elt
*stamp1
= p1
->stamp ();
1032 const struct hashable_expr
*expr2
= p2
->expr ();
1033 const struct expr_hash_elt
*stamp2
= p2
->stamp ();
1035 /* This case should apply only when removing entries from the table. */
1036 if (stamp1
== stamp2
)
1039 if (p1
->hash () != p2
->hash ())
1042 /* In case of a collision, both RHS have to be identical and have the
1043 same VUSE operands. */
1044 if (hashable_expr_equal_p (expr1
, expr2
)
1045 && types_compatible_p (expr1
->type
, expr2
->type
))
1051 /* Given a conditional expression COND as a tree, initialize
1052 a hashable_expr expression EXPR. The conditional must be a
1053 comparison or logical negation. A constant or a variable is
1057 initialize_expr_from_cond (tree cond
, struct hashable_expr
*expr
)
1059 expr
->type
= boolean_type_node
;
1061 if (COMPARISON_CLASS_P (cond
))
1063 expr
->kind
= EXPR_BINARY
;
1064 expr
->ops
.binary
.op
= TREE_CODE (cond
);
1065 expr
->ops
.binary
.opnd0
= TREE_OPERAND (cond
, 0);
1066 expr
->ops
.binary
.opnd1
= TREE_OPERAND (cond
, 1);
1068 else if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
1070 expr
->kind
= EXPR_UNARY
;
1071 expr
->ops
.unary
.op
= TRUTH_NOT_EXPR
;
1072 expr
->ops
.unary
.opnd
= TREE_OPERAND (cond
, 0);
1078 /* Build a cond_equivalence record indicating that the comparison
1079 CODE holds between operands OP0 and OP1 and push it to **P. */
1082 build_and_record_new_cond (enum tree_code code
,
1084 vec
<cond_equivalence
> *p
,
1088 struct hashable_expr
*cond
= &c
.cond
;
1090 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
1092 cond
->type
= boolean_type_node
;
1093 cond
->kind
= EXPR_BINARY
;
1094 cond
->ops
.binary
.op
= code
;
1095 cond
->ops
.binary
.opnd0
= op0
;
1096 cond
->ops
.binary
.opnd1
= op1
;
1098 c
.value
= val
? boolean_true_node
: boolean_false_node
;
1102 /* Record that COND is true and INVERTED is false into the edge information
1103 structure. Also record that any conditions dominated by COND are true
1106 For example, if a < b is true, then a <= b must also be true. */
1109 record_conditions (vec
<cond_equivalence
> *p
, tree cond
, tree inverted
)
1114 if (!COMPARISON_CLASS_P (cond
))
1117 op0
= TREE_OPERAND (cond
, 0);
1118 op1
= TREE_OPERAND (cond
, 1);
1120 switch (TREE_CODE (cond
))
1124 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1126 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1127 build_and_record_new_cond (LTGT_EXPR
, op0
, op1
, p
);
1130 build_and_record_new_cond ((TREE_CODE (cond
) == LT_EXPR
1131 ? LE_EXPR
: GE_EXPR
),
1133 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1134 build_and_record_new_cond (EQ_EXPR
, op0
, op1
, p
, false);
1139 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1141 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1146 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1148 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1150 build_and_record_new_cond (LE_EXPR
, op0
, op1
, p
);
1151 build_and_record_new_cond (GE_EXPR
, op0
, op1
, p
);
1154 case UNORDERED_EXPR
:
1155 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1156 build_and_record_new_cond (UNLE_EXPR
, op0
, op1
, p
);
1157 build_and_record_new_cond (UNGE_EXPR
, op0
, op1
, p
);
1158 build_and_record_new_cond (UNEQ_EXPR
, op0
, op1
, p
);
1159 build_and_record_new_cond (UNLT_EXPR
, op0
, op1
, p
);
1160 build_and_record_new_cond (UNGT_EXPR
, op0
, op1
, p
);
1165 build_and_record_new_cond ((TREE_CODE (cond
) == UNLT_EXPR
1166 ? UNLE_EXPR
: UNGE_EXPR
),
1168 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1172 build_and_record_new_cond (UNLE_EXPR
, op0
, op1
, p
);
1173 build_and_record_new_cond (UNGE_EXPR
, op0
, op1
, p
);
1177 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1178 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1185 /* Now store the original true and false conditions into the first
1187 initialize_expr_from_cond (cond
, &c
.cond
);
1188 c
.value
= boolean_true_node
;
1191 /* It is possible for INVERTED to be the negation of a comparison,
1192 and not a valid RHS or GIMPLE_COND condition. This happens because
1193 invert_truthvalue may return such an expression when asked to invert
1194 a floating-point comparison. These comparisons are not assumed to
1195 obey the trichotomy law. */
1196 initialize_expr_from_cond (inverted
, &c
.cond
);
1197 c
.value
= boolean_false_node
;