1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
24 #include "basic-block.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
34 #include "internal-fn.h"
39 static bool hashable_expr_equal_p (const struct hashable_expr
*,
40 const struct hashable_expr
*);
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
50 avail_exprs_stack::pop_to_marker ()
52 /* Remove all the expressions made available in this block. */
53 while (m_stack
.length () > 0)
55 std::pair
<expr_hash_elt_t
, expr_hash_elt_t
> victim
= m_stack
.pop ();
58 if (victim
.first
== NULL
)
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
66 fprintf (dump_file
, "<<<< ");
67 victim
.first
->print (dump_file
);
70 slot
= m_avail_exprs
->find_slot (victim
.first
, NO_INSERT
);
71 gcc_assert (slot
&& *slot
== victim
.first
);
72 if (victim
.second
!= NULL
)
75 *slot
= victim
.second
;
78 m_avail_exprs
->clear_slot (slot
);
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
86 avail_exprs_stack::record_expr (class expr_hash_elt
*elt1
,
87 class expr_hash_elt
*elt2
,
90 if (elt1
&& dump_file
&& (dump_flags
& TDF_DETAILS
))
92 fprintf (dump_file
, "%c>>> ", type
);
93 elt1
->print (dump_file
);
96 m_stack
.safe_push (std::pair
<expr_hash_elt_t
, expr_hash_elt_t
> (elt1
, elt2
));
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
103 vuse_eq (ao_ref
*, tree vuse1
, unsigned int cnt
, void *data
)
105 tree vuse2
= (tree
) data
;
109 /* This bounds the stmt walks we perform on reference lookups
110 to O(1) instead of O(N) where N is the number of dominating
111 stores leading to a candidate. We re-use the SCCVN param
112 for this as it is basically the same complexity. */
113 if (cnt
> (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS
))
119 /* We looked for STMT in the hash table, but did not find it.
121 If STMT is an assignment from a binary operator, we may know something
122 about the operands relationship to each other which would allow
123 us to derive a constant value for the RHS of STMT. */
126 avail_exprs_stack::simplify_binary_operation (gimple
*stmt
,
127 class expr_hash_elt element
)
129 if (is_gimple_assign (stmt
))
131 struct hashable_expr
*expr
= element
.expr ();
132 if (expr
->kind
== EXPR_BINARY
)
134 enum tree_code code
= expr
->ops
.binary
.op
;
138 /* For these cases, if we know the operands
139 are equal, then we know the result. */
156 /* Build a simple equality expr and query the hash table
158 struct hashable_expr expr
;
159 expr
.type
= boolean_type_node
;
160 expr
.kind
= EXPR_BINARY
;
161 expr
.ops
.binary
.op
= EQ_EXPR
;
162 expr
.ops
.binary
.opnd0
= gimple_assign_rhs1 (stmt
);
163 expr
.ops
.binary
.opnd1
= gimple_assign_rhs2 (stmt
);
164 class expr_hash_elt
element2 (&expr
, NULL_TREE
);
166 = m_avail_exprs
->find_slot (&element2
, NO_INSERT
);
167 tree result_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
169 /* If the query was successful and returned a nonzero
170 result, then we know that the operands of the binary
171 expression are the same. In many cases this allows
172 us to compute a constant result of the expression
173 at compile time, even if we do not know the exact
174 values of the operands. */
175 if (slot
&& *slot
&& integer_onep ((*slot
)->lhs ()))
183 return gimple_assign_rhs1 (stmt
);
191 return build_zero_cst (result_type
);
198 return build_one_cst (result_type
);
215 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
216 If found, return its LHS. Otherwise insert STMT in the table and
219 Also, when an expression is first inserted in the table, it is also
220 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
221 we finish processing this block and its children. */
224 avail_exprs_stack::lookup_avail_expr (gimple
*stmt
, bool insert
, bool tbaa_p
)
226 expr_hash_elt
**slot
;
229 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
230 if (gimple_code (stmt
) == GIMPLE_PHI
)
231 lhs
= gimple_phi_result (stmt
);
233 lhs
= gimple_get_lhs (stmt
);
235 class expr_hash_elt
element (stmt
, lhs
);
237 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
239 fprintf (dump_file
, "LKUP ");
240 element
.print (dump_file
);
243 /* Don't bother remembering constant assignments and copy operations.
244 Constants and copy operations are handled by the constant/copy propagator
246 if (element
.expr()->kind
== EXPR_SINGLE
247 && (TREE_CODE (element
.expr()->ops
.single
.rhs
) == SSA_NAME
248 || is_gimple_min_invariant (element
.expr()->ops
.single
.rhs
)))
251 /* Finally try to find the expression in the main expression hash table. */
252 slot
= m_avail_exprs
->find_slot (&element
, (insert
? INSERT
: NO_INSERT
));
257 else if (*slot
== NULL
)
259 /* If we did not find the expression in the hash table, we may still
260 be able to produce a result for some expressions. */
261 tree alt
= avail_exprs_stack::simplify_binary_operation (stmt
, element
);
265 class expr_hash_elt
*element2
= new expr_hash_elt (element
);
268 record_expr (element2
, NULL
, '2');
272 /* If we found a redundant memory operation do an alias walk to
273 check if we can re-use it. */
274 if (gimple_vuse (stmt
) != (*slot
)->vop ())
276 tree vuse1
= (*slot
)->vop ();
277 tree vuse2
= gimple_vuse (stmt
);
278 /* If we have a load of a register and a candidate in the
279 hash with vuse1 then try to reach its stmt by walking
280 up the virtual use-def chain using walk_non_aliased_vuses.
281 But don't do this when removing expressions from the hash. */
284 && gimple_assign_single_p (stmt
)
285 && TREE_CODE (gimple_assign_lhs (stmt
)) == SSA_NAME
286 && (ao_ref_init (&ref
, gimple_assign_rhs1 (stmt
)),
287 ref
.base_alias_set
= ref
.ref_alias_set
= tbaa_p
? -1 : 0, true)
288 && walk_non_aliased_vuses (&ref
, vuse2
,
289 vuse_eq
, NULL
, NULL
, vuse1
) != NULL
))
293 class expr_hash_elt
*element2
= new expr_hash_elt (element
);
295 /* Insert the expr into the hash by replacing the current
296 entry and recording the value to restore in the
297 avail_exprs_stack. */
298 record_expr (element2
, *slot
, '2');
305 /* Extract the LHS of the assignment so that it can be used as the current
306 definition of another variable. */
307 lhs
= (*slot
)->lhs ();
309 /* Valueize the result. */
310 if (TREE_CODE (lhs
) == SSA_NAME
)
312 tree tem
= SSA_NAME_VALUE (lhs
);
317 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
319 fprintf (dump_file
, "FIND: ");
320 print_generic_expr (dump_file
, lhs
);
321 fprintf (dump_file
, "\n");
327 /* Enter condition equivalence P into the hash table.
329 This indicates that a conditional expression has a known
333 avail_exprs_stack::record_cond (cond_equivalence
*p
)
335 class expr_hash_elt
*element
= new expr_hash_elt (&p
->cond
, p
->value
);
336 expr_hash_elt
**slot
;
338 slot
= m_avail_exprs
->find_slot_with_hash (element
, element
->hash (), INSERT
);
342 record_expr (element
, NULL
, '1');
348 /* Generate a hash value for a pair of expressions. This can be used
349 iteratively by passing a previous result in HSTATE.
351 The same hash value is always returned for a given pair of expressions,
352 regardless of the order in which they are presented. This is useful in
353 hashing the operands of commutative functions. */
359 add_expr_commutative (const_tree t1
, const_tree t2
, hash
&hstate
)
363 inchash::add_expr (t1
, one
);
364 inchash::add_expr (t2
, two
);
365 hstate
.add_commutative (one
, two
);
368 /* Compute a hash value for a hashable_expr value EXPR and a
369 previously accumulated hash value VAL. If two hashable_expr
370 values compare equal with hashable_expr_equal_p, they must
371 hash to the same value, given an identical value of VAL.
372 The logic is intended to follow inchash::add_expr in tree.c. */
375 add_hashable_expr (const struct hashable_expr
*expr
, hash
&hstate
)
380 inchash::add_expr (expr
->ops
.single
.rhs
, hstate
);
384 hstate
.add_object (expr
->ops
.unary
.op
);
386 /* Make sure to include signedness in the hash computation.
387 Don't hash the type, that can lead to having nodes which
388 compare equal according to operand_equal_p, but which
389 have different hash codes. */
390 if (CONVERT_EXPR_CODE_P (expr
->ops
.unary
.op
)
391 || expr
->ops
.unary
.op
== NON_LVALUE_EXPR
)
392 hstate
.add_int (TYPE_UNSIGNED (expr
->type
));
394 inchash::add_expr (expr
->ops
.unary
.opnd
, hstate
);
398 hstate
.add_object (expr
->ops
.binary
.op
);
399 if (commutative_tree_code (expr
->ops
.binary
.op
))
400 inchash::add_expr_commutative (expr
->ops
.binary
.opnd0
,
401 expr
->ops
.binary
.opnd1
, hstate
);
404 inchash::add_expr (expr
->ops
.binary
.opnd0
, hstate
);
405 inchash::add_expr (expr
->ops
.binary
.opnd1
, hstate
);
410 hstate
.add_object (expr
->ops
.ternary
.op
);
411 if (commutative_ternary_tree_code (expr
->ops
.ternary
.op
))
412 inchash::add_expr_commutative (expr
->ops
.ternary
.opnd0
,
413 expr
->ops
.ternary
.opnd1
, hstate
);
416 inchash::add_expr (expr
->ops
.ternary
.opnd0
, hstate
);
417 inchash::add_expr (expr
->ops
.ternary
.opnd1
, hstate
);
419 inchash::add_expr (expr
->ops
.ternary
.opnd2
, hstate
);
425 enum tree_code code
= CALL_EXPR
;
428 hstate
.add_object (code
);
429 fn_from
= expr
->ops
.call
.fn_from
;
430 if (gimple_call_internal_p (fn_from
))
431 hstate
.merge_hash ((hashval_t
) gimple_call_internal_fn (fn_from
));
433 inchash::add_expr (gimple_call_fn (fn_from
), hstate
);
434 for (i
= 0; i
< expr
->ops
.call
.nargs
; i
++)
435 inchash::add_expr (expr
->ops
.call
.args
[i
], hstate
);
443 for (i
= 0; i
< expr
->ops
.phi
.nargs
; i
++)
444 inchash::add_expr (expr
->ops
.phi
.args
[i
], hstate
);
455 /* Hashing and equality functions. We compute a value number for expressions
456 using the code of the expression and the SSA numbers of its operands. */
459 avail_expr_hash (class expr_hash_elt
*p
)
461 const struct hashable_expr
*expr
= p
->expr ();
462 inchash::hash hstate
;
464 if (expr
->kind
== EXPR_SINGLE
)
466 /* T could potentially be a switch index or a goto dest. */
467 tree t
= expr
->ops
.single
.rhs
;
468 if (TREE_CODE (t
) == MEM_REF
|| handled_component_p (t
))
470 /* Make equivalent statements of both these kinds hash together.
471 Dealing with both MEM_REF and ARRAY_REF allows us not to care
472 about equivalence with other statements not considered here. */
474 HOST_WIDE_INT offset
, size
, max_size
;
475 tree base
= get_ref_base_and_extent (t
, &offset
, &size
, &max_size
,
477 /* Strictly, we could try to normalize variable-sized accesses too,
478 but here we just deal with the common case. */
482 enum tree_code code
= MEM_REF
;
483 hstate
.add_object (code
);
484 inchash::add_expr (base
, hstate
);
485 hstate
.add_object (offset
);
486 hstate
.add_object (size
);
487 return hstate
.end ();
492 inchash::add_hashable_expr (expr
, hstate
);
494 return hstate
.end ();
497 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
498 to each other. (That is, they return the value of the same bit of memory.)
500 Return TRUE if the two are so equivalent; FALSE if not (which could still
501 mean the two are equivalent by other means). */
504 equal_mem_array_ref_p (tree t0
, tree t1
)
506 if (TREE_CODE (t0
) != MEM_REF
&& ! handled_component_p (t0
))
508 if (TREE_CODE (t1
) != MEM_REF
&& ! handled_component_p (t1
))
511 if (!types_compatible_p (TREE_TYPE (t0
), TREE_TYPE (t1
)))
514 HOST_WIDE_INT off0
, sz0
, max0
;
515 tree base0
= get_ref_base_and_extent (t0
, &off0
, &sz0
, &max0
, &rev0
);
521 HOST_WIDE_INT off1
, sz1
, max1
;
522 tree base1
= get_ref_base_and_extent (t1
, &off1
, &sz1
, &max1
, &rev1
);
530 /* Types were compatible, so this is a sanity check. */
531 gcc_assert (sz0
== sz1
);
533 return (off0
== off1
) && operand_equal_p (base0
, base1
, 0);
536 /* Compare two hashable_expr structures for equivalence. They are
537 considered equivalent when the expressions they denote must
538 necessarily be equal. The logic is intended to follow that of
539 operand_equal_p in fold-const.c */
542 hashable_expr_equal_p (const struct hashable_expr
*expr0
,
543 const struct hashable_expr
*expr1
)
545 tree type0
= expr0
->type
;
546 tree type1
= expr1
->type
;
548 /* If either type is NULL, there is nothing to check. */
549 if ((type0
== NULL_TREE
) ^ (type1
== NULL_TREE
))
552 /* If both types don't have the same signedness, precision, and mode,
553 then we can't consider them equal. */
555 && (TREE_CODE (type0
) == ERROR_MARK
556 || TREE_CODE (type1
) == ERROR_MARK
557 || TYPE_UNSIGNED (type0
) != TYPE_UNSIGNED (type1
)
558 || TYPE_PRECISION (type0
) != TYPE_PRECISION (type1
)
559 || TYPE_MODE (type0
) != TYPE_MODE (type1
)))
562 if (expr0
->kind
!= expr1
->kind
)
568 return equal_mem_array_ref_p (expr0
->ops
.single
.rhs
,
569 expr1
->ops
.single
.rhs
)
570 || operand_equal_p (expr0
->ops
.single
.rhs
,
571 expr1
->ops
.single
.rhs
, 0);
573 if (expr0
->ops
.unary
.op
!= expr1
->ops
.unary
.op
)
576 if ((CONVERT_EXPR_CODE_P (expr0
->ops
.unary
.op
)
577 || expr0
->ops
.unary
.op
== NON_LVALUE_EXPR
)
578 && TYPE_UNSIGNED (expr0
->type
) != TYPE_UNSIGNED (expr1
->type
))
581 return operand_equal_p (expr0
->ops
.unary
.opnd
,
582 expr1
->ops
.unary
.opnd
, 0);
585 if (expr0
->ops
.binary
.op
!= expr1
->ops
.binary
.op
)
588 if (operand_equal_p (expr0
->ops
.binary
.opnd0
,
589 expr1
->ops
.binary
.opnd0
, 0)
590 && operand_equal_p (expr0
->ops
.binary
.opnd1
,
591 expr1
->ops
.binary
.opnd1
, 0))
594 /* For commutative ops, allow the other order. */
595 return (commutative_tree_code (expr0
->ops
.binary
.op
)
596 && operand_equal_p (expr0
->ops
.binary
.opnd0
,
597 expr1
->ops
.binary
.opnd1
, 0)
598 && operand_equal_p (expr0
->ops
.binary
.opnd1
,
599 expr1
->ops
.binary
.opnd0
, 0));
602 if (expr0
->ops
.ternary
.op
!= expr1
->ops
.ternary
.op
603 || !operand_equal_p (expr0
->ops
.ternary
.opnd2
,
604 expr1
->ops
.ternary
.opnd2
, 0))
607 /* BIT_INSERT_EXPR has an implict operand as the type precision
608 of op1. Need to check to make sure they are the same. */
609 if (expr0
->ops
.ternary
.op
== BIT_INSERT_EXPR
610 && TREE_CODE (expr0
->ops
.ternary
.opnd1
) == INTEGER_CST
611 && TREE_CODE (expr1
->ops
.ternary
.opnd1
) == INTEGER_CST
612 && TYPE_PRECISION (TREE_TYPE (expr0
->ops
.ternary
.opnd1
))
613 != TYPE_PRECISION (TREE_TYPE (expr1
->ops
.ternary
.opnd1
)))
616 if (operand_equal_p (expr0
->ops
.ternary
.opnd0
,
617 expr1
->ops
.ternary
.opnd0
, 0)
618 && operand_equal_p (expr0
->ops
.ternary
.opnd1
,
619 expr1
->ops
.ternary
.opnd1
, 0))
622 /* For commutative ops, allow the other order. */
623 return (commutative_ternary_tree_code (expr0
->ops
.ternary
.op
)
624 && operand_equal_p (expr0
->ops
.ternary
.opnd0
,
625 expr1
->ops
.ternary
.opnd1
, 0)
626 && operand_equal_p (expr0
->ops
.ternary
.opnd1
,
627 expr1
->ops
.ternary
.opnd0
, 0));
633 /* If the calls are to different functions, then they
634 clearly cannot be equal. */
635 if (!gimple_call_same_target_p (expr0
->ops
.call
.fn_from
,
636 expr1
->ops
.call
.fn_from
))
639 if (! expr0
->ops
.call
.pure
)
642 if (expr0
->ops
.call
.nargs
!= expr1
->ops
.call
.nargs
)
645 for (i
= 0; i
< expr0
->ops
.call
.nargs
; i
++)
646 if (! operand_equal_p (expr0
->ops
.call
.args
[i
],
647 expr1
->ops
.call
.args
[i
], 0))
650 if (stmt_could_throw_p (expr0
->ops
.call
.fn_from
))
652 int lp0
= lookup_stmt_eh_lp (expr0
->ops
.call
.fn_from
);
653 int lp1
= lookup_stmt_eh_lp (expr1
->ops
.call
.fn_from
);
654 if ((lp0
> 0 || lp1
> 0) && lp0
!= lp1
)
665 if (expr0
->ops
.phi
.nargs
!= expr1
->ops
.phi
.nargs
)
668 for (i
= 0; i
< expr0
->ops
.phi
.nargs
; i
++)
669 if (! operand_equal_p (expr0
->ops
.phi
.args
[i
],
670 expr1
->ops
.phi
.args
[i
], 0))
681 /* Given a statement STMT, construct a hash table element. */
683 expr_hash_elt::expr_hash_elt (gimple
*stmt
, tree orig_lhs
)
685 enum gimple_code code
= gimple_code (stmt
);
686 struct hashable_expr
*expr
= this->expr ();
688 if (code
== GIMPLE_ASSIGN
)
690 enum tree_code subcode
= gimple_assign_rhs_code (stmt
);
692 switch (get_gimple_rhs_class (subcode
))
694 case GIMPLE_SINGLE_RHS
:
695 expr
->kind
= EXPR_SINGLE
;
696 expr
->type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
697 expr
->ops
.single
.rhs
= gimple_assign_rhs1 (stmt
);
699 case GIMPLE_UNARY_RHS
:
700 expr
->kind
= EXPR_UNARY
;
701 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
702 if (CONVERT_EXPR_CODE_P (subcode
))
704 expr
->ops
.unary
.op
= subcode
;
705 expr
->ops
.unary
.opnd
= gimple_assign_rhs1 (stmt
);
707 case GIMPLE_BINARY_RHS
:
708 expr
->kind
= EXPR_BINARY
;
709 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
710 expr
->ops
.binary
.op
= subcode
;
711 expr
->ops
.binary
.opnd0
= gimple_assign_rhs1 (stmt
);
712 expr
->ops
.binary
.opnd1
= gimple_assign_rhs2 (stmt
);
714 case GIMPLE_TERNARY_RHS
:
715 expr
->kind
= EXPR_TERNARY
;
716 expr
->type
= TREE_TYPE (gimple_assign_lhs (stmt
));
717 expr
->ops
.ternary
.op
= subcode
;
718 expr
->ops
.ternary
.opnd0
= gimple_assign_rhs1 (stmt
);
719 expr
->ops
.ternary
.opnd1
= gimple_assign_rhs2 (stmt
);
720 expr
->ops
.ternary
.opnd2
= gimple_assign_rhs3 (stmt
);
726 else if (code
== GIMPLE_COND
)
728 expr
->type
= boolean_type_node
;
729 expr
->kind
= EXPR_BINARY
;
730 expr
->ops
.binary
.op
= gimple_cond_code (stmt
);
731 expr
->ops
.binary
.opnd0
= gimple_cond_lhs (stmt
);
732 expr
->ops
.binary
.opnd1
= gimple_cond_rhs (stmt
);
734 else if (gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
))
736 size_t nargs
= gimple_call_num_args (call_stmt
);
739 gcc_assert (gimple_call_lhs (call_stmt
));
741 expr
->type
= TREE_TYPE (gimple_call_lhs (call_stmt
));
742 expr
->kind
= EXPR_CALL
;
743 expr
->ops
.call
.fn_from
= call_stmt
;
745 if (gimple_call_flags (call_stmt
) & (ECF_CONST
| ECF_PURE
))
746 expr
->ops
.call
.pure
= true;
748 expr
->ops
.call
.pure
= false;
750 expr
->ops
.call
.nargs
= nargs
;
751 expr
->ops
.call
.args
= XCNEWVEC (tree
, nargs
);
752 for (i
= 0; i
< nargs
; i
++)
753 expr
->ops
.call
.args
[i
] = gimple_call_arg (call_stmt
, i
);
755 else if (gswitch
*swtch_stmt
= dyn_cast
<gswitch
*> (stmt
))
757 expr
->type
= TREE_TYPE (gimple_switch_index (swtch_stmt
));
758 expr
->kind
= EXPR_SINGLE
;
759 expr
->ops
.single
.rhs
= gimple_switch_index (swtch_stmt
);
761 else if (code
== GIMPLE_GOTO
)
763 expr
->type
= TREE_TYPE (gimple_goto_dest (stmt
));
764 expr
->kind
= EXPR_SINGLE
;
765 expr
->ops
.single
.rhs
= gimple_goto_dest (stmt
);
767 else if (code
== GIMPLE_PHI
)
769 size_t nargs
= gimple_phi_num_args (stmt
);
772 expr
->type
= TREE_TYPE (gimple_phi_result (stmt
));
773 expr
->kind
= EXPR_PHI
;
774 expr
->ops
.phi
.nargs
= nargs
;
775 expr
->ops
.phi
.args
= XCNEWVEC (tree
, nargs
);
776 for (i
= 0; i
< nargs
; i
++)
777 expr
->ops
.phi
.args
[i
] = gimple_phi_arg_def (stmt
, i
);
783 m_vop
= gimple_vuse (stmt
);
784 m_hash
= avail_expr_hash (this);
788 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
789 construct a hash table element. */
791 expr_hash_elt::expr_hash_elt (struct hashable_expr
*orig
, tree orig_lhs
)
796 m_hash
= avail_expr_hash (this);
800 /* Copy constructor for a hash table element. */
802 expr_hash_elt::expr_hash_elt (class expr_hash_elt
&old_elt
)
804 m_expr
= old_elt
.m_expr
;
805 m_lhs
= old_elt
.m_lhs
;
806 m_vop
= old_elt
.m_vop
;
807 m_hash
= old_elt
.m_hash
;
810 /* Now deep copy the malloc'd space for CALL and PHI args. */
811 if (old_elt
.m_expr
.kind
== EXPR_CALL
)
813 size_t nargs
= old_elt
.m_expr
.ops
.call
.nargs
;
816 m_expr
.ops
.call
.args
= XCNEWVEC (tree
, nargs
);
817 for (i
= 0; i
< nargs
; i
++)
818 m_expr
.ops
.call
.args
[i
] = old_elt
.m_expr
.ops
.call
.args
[i
];
820 else if (old_elt
.m_expr
.kind
== EXPR_PHI
)
822 size_t nargs
= old_elt
.m_expr
.ops
.phi
.nargs
;
825 m_expr
.ops
.phi
.args
= XCNEWVEC (tree
, nargs
);
826 for (i
= 0; i
< nargs
; i
++)
827 m_expr
.ops
.phi
.args
[i
] = old_elt
.m_expr
.ops
.phi
.args
[i
];
831 /* Calls and PHIs have a variable number of arguments that are allocated
832 on the heap. Thus we have to have a special dtor to release them. */
834 expr_hash_elt::~expr_hash_elt ()
836 if (m_expr
.kind
== EXPR_CALL
)
837 free (m_expr
.ops
.call
.args
);
838 else if (m_expr
.kind
== EXPR_PHI
)
839 free (m_expr
.ops
.phi
.args
);
842 /* Print a diagnostic dump of an expression hash table entry. */
845 expr_hash_elt::print (FILE *stream
)
847 fprintf (stream
, "STMT ");
851 print_generic_expr (stream
, m_lhs
);
852 fprintf (stream
, " = ");
858 print_generic_expr (stream
, m_expr
.ops
.single
.rhs
);
862 fprintf (stream
, "%s ", get_tree_code_name (m_expr
.ops
.unary
.op
));
863 print_generic_expr (stream
, m_expr
.ops
.unary
.opnd
);
867 print_generic_expr (stream
, m_expr
.ops
.binary
.opnd0
);
868 fprintf (stream
, " %s ", get_tree_code_name (m_expr
.ops
.binary
.op
));
869 print_generic_expr (stream
, m_expr
.ops
.binary
.opnd1
);
873 fprintf (stream
, " %s <", get_tree_code_name (m_expr
.ops
.ternary
.op
));
874 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd0
);
875 fputs (", ", stream
);
876 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd1
);
877 fputs (", ", stream
);
878 print_generic_expr (stream
, m_expr
.ops
.ternary
.opnd2
);
885 size_t nargs
= m_expr
.ops
.call
.nargs
;
888 fn_from
= m_expr
.ops
.call
.fn_from
;
889 if (gimple_call_internal_p (fn_from
))
890 fputs (internal_fn_name (gimple_call_internal_fn (fn_from
)),
893 print_generic_expr (stream
, gimple_call_fn (fn_from
));
894 fprintf (stream
, " (");
895 for (i
= 0; i
< nargs
; i
++)
897 print_generic_expr (stream
, m_expr
.ops
.call
.args
[i
]);
899 fprintf (stream
, ", ");
901 fprintf (stream
, ")");
908 size_t nargs
= m_expr
.ops
.phi
.nargs
;
910 fprintf (stream
, "PHI <");
911 for (i
= 0; i
< nargs
; i
++)
913 print_generic_expr (stream
, m_expr
.ops
.phi
.args
[i
]);
915 fprintf (stream
, ", ");
917 fprintf (stream
, ">");
924 fprintf (stream
, " with ");
925 print_generic_expr (stream
, m_vop
);
928 fprintf (stream
, "\n");
931 /* Pop entries off the stack until we hit the NULL marker.
932 For each entry popped, use the SRC/DEST pair to restore
933 SRC to its prior value. */
936 const_and_copies::pop_to_marker (void)
938 while (m_stack
.length () > 0)
940 tree prev_value
, dest
;
942 dest
= m_stack
.pop ();
944 /* A NULL value indicates we should stop unwinding, otherwise
945 pop off the next entry as they're recorded in pairs. */
949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
951 fprintf (dump_file
, "<<<< COPY ");
952 print_generic_expr (dump_file
, dest
);
953 fprintf (dump_file
, " = ");
954 print_generic_expr (dump_file
, SSA_NAME_VALUE (dest
));
955 fprintf (dump_file
, "\n");
958 prev_value
= m_stack
.pop ();
959 set_ssa_name_value (dest
, prev_value
);
963 /* Record that X has the value Y and that X's previous value is PREV_X.
965 This variant does not follow the value chain for Y. */
968 const_and_copies::record_const_or_copy_raw (tree x
, tree y
, tree prev_x
)
970 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
972 fprintf (dump_file
, "0>>> COPY ");
973 print_generic_expr (dump_file
, x
);
974 fprintf (dump_file
, " = ");
975 print_generic_expr (dump_file
, y
);
976 fprintf (dump_file
, "\n");
979 set_ssa_name_value (x
, y
);
981 m_stack
.quick_push (prev_x
);
982 m_stack
.quick_push (x
);
985 /* Record that X has the value Y. */
988 const_and_copies::record_const_or_copy (tree x
, tree y
)
990 record_const_or_copy (x
, y
, SSA_NAME_VALUE (x
));
993 /* Record that X has the value Y and that X's previous value is PREV_X.
995 This variant follow's Y value chain. */
998 const_and_copies::record_const_or_copy (tree x
, tree y
, tree prev_x
)
1000 /* Y may be NULL if we are invalidating entries in the table. */
1001 if (y
&& TREE_CODE (y
) == SSA_NAME
)
1003 tree tmp
= SSA_NAME_VALUE (y
);
1007 record_const_or_copy_raw (x
, y
, prev_x
);
1011 expr_elt_hasher::equal (const value_type
&p1
, const compare_type
&p2
)
1013 const struct hashable_expr
*expr1
= p1
->expr ();
1014 const struct expr_hash_elt
*stamp1
= p1
->stamp ();
1015 const struct hashable_expr
*expr2
= p2
->expr ();
1016 const struct expr_hash_elt
*stamp2
= p2
->stamp ();
1018 /* This case should apply only when removing entries from the table. */
1019 if (stamp1
== stamp2
)
1022 if (p1
->hash () != p2
->hash ())
1025 /* In case of a collision, both RHS have to be identical and have the
1026 same VUSE operands. */
1027 if (hashable_expr_equal_p (expr1
, expr2
)
1028 && types_compatible_p (expr1
->type
, expr2
->type
))
1034 /* Given a conditional expression COND as a tree, initialize
1035 a hashable_expr expression EXPR. The conditional must be a
1036 comparison or logical negation. A constant or a variable is
1040 initialize_expr_from_cond (tree cond
, struct hashable_expr
*expr
)
1042 expr
->type
= boolean_type_node
;
1044 if (COMPARISON_CLASS_P (cond
))
1046 expr
->kind
= EXPR_BINARY
;
1047 expr
->ops
.binary
.op
= TREE_CODE (cond
);
1048 expr
->ops
.binary
.opnd0
= TREE_OPERAND (cond
, 0);
1049 expr
->ops
.binary
.opnd1
= TREE_OPERAND (cond
, 1);
1051 else if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
1053 expr
->kind
= EXPR_UNARY
;
1054 expr
->ops
.unary
.op
= TRUTH_NOT_EXPR
;
1055 expr
->ops
.unary
.opnd
= TREE_OPERAND (cond
, 0);
1061 /* Build a cond_equivalence record indicating that the comparison
1062 CODE holds between operands OP0 and OP1 and push it to **P. */
1065 build_and_record_new_cond (enum tree_code code
,
1067 vec
<cond_equivalence
> *p
,
1071 struct hashable_expr
*cond
= &c
.cond
;
1073 gcc_assert (TREE_CODE_CLASS (code
) == tcc_comparison
);
1075 cond
->type
= boolean_type_node
;
1076 cond
->kind
= EXPR_BINARY
;
1077 cond
->ops
.binary
.op
= code
;
1078 cond
->ops
.binary
.opnd0
= op0
;
1079 cond
->ops
.binary
.opnd1
= op1
;
1081 c
.value
= val
? boolean_true_node
: boolean_false_node
;
1085 /* Record that COND is true and INVERTED is false into the edge information
1086 structure. Also record that any conditions dominated by COND are true
1089 For example, if a < b is true, then a <= b must also be true. */
1092 record_conditions (vec
<cond_equivalence
> *p
, tree cond
, tree inverted
)
1097 if (!COMPARISON_CLASS_P (cond
))
1100 op0
= TREE_OPERAND (cond
, 0);
1101 op1
= TREE_OPERAND (cond
, 1);
1103 switch (TREE_CODE (cond
))
1107 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1109 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1110 build_and_record_new_cond (LTGT_EXPR
, op0
, op1
, p
);
1113 build_and_record_new_cond ((TREE_CODE (cond
) == LT_EXPR
1114 ? LE_EXPR
: GE_EXPR
),
1116 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1117 build_and_record_new_cond (EQ_EXPR
, op0
, op1
, p
, false);
1122 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1124 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1129 if (FLOAT_TYPE_P (TREE_TYPE (op0
)))
1131 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1133 build_and_record_new_cond (LE_EXPR
, op0
, op1
, p
);
1134 build_and_record_new_cond (GE_EXPR
, op0
, op1
, p
);
1137 case UNORDERED_EXPR
:
1138 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1139 build_and_record_new_cond (UNLE_EXPR
, op0
, op1
, p
);
1140 build_and_record_new_cond (UNGE_EXPR
, op0
, op1
, p
);
1141 build_and_record_new_cond (UNEQ_EXPR
, op0
, op1
, p
);
1142 build_and_record_new_cond (UNLT_EXPR
, op0
, op1
, p
);
1143 build_and_record_new_cond (UNGT_EXPR
, op0
, op1
, p
);
1148 build_and_record_new_cond ((TREE_CODE (cond
) == UNLT_EXPR
1149 ? UNLE_EXPR
: UNGE_EXPR
),
1151 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1155 build_and_record_new_cond (UNLE_EXPR
, op0
, op1
, p
);
1156 build_and_record_new_cond (UNGE_EXPR
, op0
, op1
, p
);
1160 build_and_record_new_cond (NE_EXPR
, op0
, op1
, p
);
1161 build_and_record_new_cond (ORDERED_EXPR
, op0
, op1
, p
);
1168 /* Now store the original true and false conditions into the first
1170 initialize_expr_from_cond (cond
, &c
.cond
);
1171 c
.value
= boolean_true_node
;
1174 /* It is possible for INVERTED to be the negation of a comparison,
1175 and not a valid RHS or GIMPLE_COND condition. This happens because
1176 invert_truthvalue may return such an expression when asked to invert
1177 a floating-point comparison. These comparisons are not assumed to
1178 obey the trichotomy law. */
1179 initialize_expr_from_cond (inverted
, &c
.cond
);
1180 c
.value
= boolean_false_node
;