Fix for ICE with -g on testcase with incomplete types.
[official-gcc.git] / gcc / tree-ssa-scopedtables.c
blob7be4848967cf02b23e133c99a4fec9bcc23dfd99
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "alias.h"
25 #include "tree.h"
26 #include "tree-pretty-print.h"
27 #include "tree-pass.h"
28 #include "tree-ssa-scopedtables.h"
29 #include "tree-ssa-threadedge.h"
30 #include "tree-ssa-dom.h"
31 #include "function.h"
32 #include "stor-layout.h"
33 #include "fold-const.h"
34 #include "basic-block.h"
35 #include "tree-eh.h"
36 #include "internal-fn.h"
37 #include "gimple.h"
38 #include "dumpfile.h"
40 static bool hashable_expr_equal_p (const struct hashable_expr *,
41 const struct hashable_expr *);
43 /* Initialize local stacks for this optimizer and record equivalences
44 upon entry to BB. Equivalences can come from the edge traversed to
45 reach BB or they may come from PHI nodes at the start of BB. */
47 /* Pop items off the unwinding stack, removing each from the hash table
48 until a marker is encountered. */
50 void
51 avail_exprs_stack::pop_to_marker ()
53 /* Remove all the expressions made available in this block. */
54 while (m_stack.length () > 0)
56 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
57 expr_hash_elt **slot;
59 if (victim.first == NULL)
60 break;
62 /* This must precede the actual removal from the hash table,
63 as ELEMENT and the table entry may share a call argument
64 vector which will be freed during removal. */
65 if (dump_file && (dump_flags & TDF_DETAILS))
67 fprintf (dump_file, "<<<< ");
68 victim.first->print (dump_file);
71 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
72 gcc_assert (slot && *slot == victim.first);
73 if (victim.second != NULL)
75 delete *slot;
76 *slot = victim.second;
78 else
79 m_avail_exprs->clear_slot (slot);
83 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
84 from the hash table. */
86 void
87 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
88 class expr_hash_elt *elt2,
89 char type)
91 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
93 fprintf (dump_file, "%c>>> ", type);
94 elt1->print (dump_file);
97 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
100 /* Generate a hash value for a pair of expressions. This can be used
101 iteratively by passing a previous result in HSTATE.
103 The same hash value is always returned for a given pair of expressions,
104 regardless of the order in which they are presented. This is useful in
105 hashing the operands of commutative functions. */
107 namespace inchash
110 static void
111 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
113 hash one, two;
115 inchash::add_expr (t1, one);
116 inchash::add_expr (t2, two);
117 hstate.add_commutative (one, two);
120 /* Compute a hash value for a hashable_expr value EXPR and a
121 previously accumulated hash value VAL. If two hashable_expr
122 values compare equal with hashable_expr_equal_p, they must
123 hash to the same value, given an identical value of VAL.
124 The logic is intended to follow inchash::add_expr in tree.c. */
126 static void
127 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
129 switch (expr->kind)
131 case EXPR_SINGLE:
132 inchash::add_expr (expr->ops.single.rhs, hstate);
133 break;
135 case EXPR_UNARY:
136 hstate.add_object (expr->ops.unary.op);
138 /* Make sure to include signedness in the hash computation.
139 Don't hash the type, that can lead to having nodes which
140 compare equal according to operand_equal_p, but which
141 have different hash codes. */
142 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
143 || expr->ops.unary.op == NON_LVALUE_EXPR)
144 hstate.add_int (TYPE_UNSIGNED (expr->type));
146 inchash::add_expr (expr->ops.unary.opnd, hstate);
147 break;
149 case EXPR_BINARY:
150 hstate.add_object (expr->ops.binary.op);
151 if (commutative_tree_code (expr->ops.binary.op))
152 inchash::add_expr_commutative (expr->ops.binary.opnd0,
153 expr->ops.binary.opnd1, hstate);
154 else
156 inchash::add_expr (expr->ops.binary.opnd0, hstate);
157 inchash::add_expr (expr->ops.binary.opnd1, hstate);
159 break;
161 case EXPR_TERNARY:
162 hstate.add_object (expr->ops.ternary.op);
163 if (commutative_ternary_tree_code (expr->ops.ternary.op))
164 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
165 expr->ops.ternary.opnd1, hstate);
166 else
168 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
169 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
171 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
172 break;
174 case EXPR_CALL:
176 size_t i;
177 enum tree_code code = CALL_EXPR;
178 gcall *fn_from;
180 hstate.add_object (code);
181 fn_from = expr->ops.call.fn_from;
182 if (gimple_call_internal_p (fn_from))
183 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
184 else
185 inchash::add_expr (gimple_call_fn (fn_from), hstate);
186 for (i = 0; i < expr->ops.call.nargs; i++)
187 inchash::add_expr (expr->ops.call.args[i], hstate);
189 break;
191 case EXPR_PHI:
193 size_t i;
195 for (i = 0; i < expr->ops.phi.nargs; i++)
196 inchash::add_expr (expr->ops.phi.args[i], hstate);
198 break;
200 default:
201 gcc_unreachable ();
207 /* Hashing and equality functions. We compute a value number for expressions
208 using the code of the expression and the SSA numbers of its operands. */
210 static hashval_t
211 avail_expr_hash (class expr_hash_elt *p)
213 const struct hashable_expr *expr = p->expr ();
214 inchash::hash hstate;
216 inchash::add_hashable_expr (expr, hstate);
218 return hstate.end ();
221 /* Compare two hashable_expr structures for equivalence. They are
222 considered equivalent when the expressions they denote must
223 necessarily be equal. The logic is intended to follow that of
224 operand_equal_p in fold-const.c */
226 static bool
227 hashable_expr_equal_p (const struct hashable_expr *expr0,
228 const struct hashable_expr *expr1)
230 tree type0 = expr0->type;
231 tree type1 = expr1->type;
233 /* If either type is NULL, there is nothing to check. */
234 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
235 return false;
237 /* If both types don't have the same signedness, precision, and mode,
238 then we can't consider them equal. */
239 if (type0 != type1
240 && (TREE_CODE (type0) == ERROR_MARK
241 || TREE_CODE (type1) == ERROR_MARK
242 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
243 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
244 || TYPE_MODE (type0) != TYPE_MODE (type1)))
245 return false;
247 if (expr0->kind != expr1->kind)
248 return false;
250 switch (expr0->kind)
252 case EXPR_SINGLE:
253 return operand_equal_p (expr0->ops.single.rhs,
254 expr1->ops.single.rhs, 0);
256 case EXPR_UNARY:
257 if (expr0->ops.unary.op != expr1->ops.unary.op)
258 return false;
260 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
261 || expr0->ops.unary.op == NON_LVALUE_EXPR)
262 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
263 return false;
265 return operand_equal_p (expr0->ops.unary.opnd,
266 expr1->ops.unary.opnd, 0);
268 case EXPR_BINARY:
269 if (expr0->ops.binary.op != expr1->ops.binary.op)
270 return false;
272 if (operand_equal_p (expr0->ops.binary.opnd0,
273 expr1->ops.binary.opnd0, 0)
274 && operand_equal_p (expr0->ops.binary.opnd1,
275 expr1->ops.binary.opnd1, 0))
276 return true;
278 /* For commutative ops, allow the other order. */
279 return (commutative_tree_code (expr0->ops.binary.op)
280 && operand_equal_p (expr0->ops.binary.opnd0,
281 expr1->ops.binary.opnd1, 0)
282 && operand_equal_p (expr0->ops.binary.opnd1,
283 expr1->ops.binary.opnd0, 0));
285 case EXPR_TERNARY:
286 if (expr0->ops.ternary.op != expr1->ops.ternary.op
287 || !operand_equal_p (expr0->ops.ternary.opnd2,
288 expr1->ops.ternary.opnd2, 0))
289 return false;
291 if (operand_equal_p (expr0->ops.ternary.opnd0,
292 expr1->ops.ternary.opnd0, 0)
293 && operand_equal_p (expr0->ops.ternary.opnd1,
294 expr1->ops.ternary.opnd1, 0))
295 return true;
297 /* For commutative ops, allow the other order. */
298 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
299 && operand_equal_p (expr0->ops.ternary.opnd0,
300 expr1->ops.ternary.opnd1, 0)
301 && operand_equal_p (expr0->ops.ternary.opnd1,
302 expr1->ops.ternary.opnd0, 0));
304 case EXPR_CALL:
306 size_t i;
308 /* If the calls are to different functions, then they
309 clearly cannot be equal. */
310 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
311 expr1->ops.call.fn_from))
312 return false;
314 if (! expr0->ops.call.pure)
315 return false;
317 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
318 return false;
320 for (i = 0; i < expr0->ops.call.nargs; i++)
321 if (! operand_equal_p (expr0->ops.call.args[i],
322 expr1->ops.call.args[i], 0))
323 return false;
325 if (stmt_could_throw_p (expr0->ops.call.fn_from))
327 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
328 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
329 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
330 return false;
333 return true;
336 case EXPR_PHI:
338 size_t i;
340 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
341 return false;
343 for (i = 0; i < expr0->ops.phi.nargs; i++)
344 if (! operand_equal_p (expr0->ops.phi.args[i],
345 expr1->ops.phi.args[i], 0))
346 return false;
348 return true;
351 default:
352 gcc_unreachable ();
356 /* Given a statement STMT, construct a hash table element. */
358 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
360 enum gimple_code code = gimple_code (stmt);
361 struct hashable_expr *expr = this->expr ();
363 if (code == GIMPLE_ASSIGN)
365 enum tree_code subcode = gimple_assign_rhs_code (stmt);
367 switch (get_gimple_rhs_class (subcode))
369 case GIMPLE_SINGLE_RHS:
370 expr->kind = EXPR_SINGLE;
371 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
372 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
373 break;
374 case GIMPLE_UNARY_RHS:
375 expr->kind = EXPR_UNARY;
376 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
377 if (CONVERT_EXPR_CODE_P (subcode))
378 subcode = NOP_EXPR;
379 expr->ops.unary.op = subcode;
380 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
381 break;
382 case GIMPLE_BINARY_RHS:
383 expr->kind = EXPR_BINARY;
384 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
385 expr->ops.binary.op = subcode;
386 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
387 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
388 break;
389 case GIMPLE_TERNARY_RHS:
390 expr->kind = EXPR_TERNARY;
391 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
392 expr->ops.ternary.op = subcode;
393 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
394 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
395 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
396 break;
397 default:
398 gcc_unreachable ();
401 else if (code == GIMPLE_COND)
403 expr->type = boolean_type_node;
404 expr->kind = EXPR_BINARY;
405 expr->ops.binary.op = gimple_cond_code (stmt);
406 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
407 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
409 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
411 size_t nargs = gimple_call_num_args (call_stmt);
412 size_t i;
414 gcc_assert (gimple_call_lhs (call_stmt));
416 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
417 expr->kind = EXPR_CALL;
418 expr->ops.call.fn_from = call_stmt;
420 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
421 expr->ops.call.pure = true;
422 else
423 expr->ops.call.pure = false;
425 expr->ops.call.nargs = nargs;
426 expr->ops.call.args = XCNEWVEC (tree, nargs);
427 for (i = 0; i < nargs; i++)
428 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
430 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
432 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
433 expr->kind = EXPR_SINGLE;
434 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
436 else if (code == GIMPLE_GOTO)
438 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
439 expr->kind = EXPR_SINGLE;
440 expr->ops.single.rhs = gimple_goto_dest (stmt);
442 else if (code == GIMPLE_PHI)
444 size_t nargs = gimple_phi_num_args (stmt);
445 size_t i;
447 expr->type = TREE_TYPE (gimple_phi_result (stmt));
448 expr->kind = EXPR_PHI;
449 expr->ops.phi.nargs = nargs;
450 expr->ops.phi.args = XCNEWVEC (tree, nargs);
451 for (i = 0; i < nargs; i++)
452 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
454 else
455 gcc_unreachable ();
457 m_lhs = orig_lhs;
458 m_vop = gimple_vuse (stmt);
459 m_hash = avail_expr_hash (this);
460 m_stamp = this;
463 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
464 construct a hash table element. */
466 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
468 m_expr = *orig;
469 m_lhs = orig_lhs;
470 m_vop = NULL_TREE;
471 m_hash = avail_expr_hash (this);
472 m_stamp = this;
475 /* Copy constructor for a hash table element. */
477 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
479 m_expr = old_elt.m_expr;
480 m_lhs = old_elt.m_lhs;
481 m_vop = old_elt.m_vop;
482 m_hash = old_elt.m_hash;
483 m_stamp = this;
485 /* Now deep copy the malloc'd space for CALL and PHI args. */
486 if (old_elt.m_expr.kind == EXPR_CALL)
488 size_t nargs = old_elt.m_expr.ops.call.nargs;
489 size_t i;
491 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
492 for (i = 0; i < nargs; i++)
493 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
495 else if (old_elt.m_expr.kind == EXPR_PHI)
497 size_t nargs = old_elt.m_expr.ops.phi.nargs;
498 size_t i;
500 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
501 for (i = 0; i < nargs; i++)
502 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
506 /* Calls and PHIs have a variable number of arguments that are allocated
507 on the heap. Thus we have to have a special dtor to release them. */
509 expr_hash_elt::~expr_hash_elt ()
511 if (m_expr.kind == EXPR_CALL)
512 free (m_expr.ops.call.args);
513 else if (m_expr.kind == EXPR_PHI)
514 free (m_expr.ops.phi.args);
517 /* Print a diagnostic dump of an expression hash table entry. */
519 void
520 expr_hash_elt::print (FILE *stream)
522 fprintf (stream, "STMT ");
524 if (m_lhs)
526 print_generic_expr (stream, m_lhs, 0);
527 fprintf (stream, " = ");
530 switch (m_expr.kind)
532 case EXPR_SINGLE:
533 print_generic_expr (stream, m_expr.ops.single.rhs, 0);
534 break;
536 case EXPR_UNARY:
537 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
538 print_generic_expr (stream, m_expr.ops.unary.opnd, 0);
539 break;
541 case EXPR_BINARY:
542 print_generic_expr (stream, m_expr.ops.binary.opnd0, 0);
543 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
544 print_generic_expr (stream, m_expr.ops.binary.opnd1, 0);
545 break;
547 case EXPR_TERNARY:
548 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
549 print_generic_expr (stream, m_expr.ops.ternary.opnd0, 0);
550 fputs (", ", stream);
551 print_generic_expr (stream, m_expr.ops.ternary.opnd1, 0);
552 fputs (", ", stream);
553 print_generic_expr (stream, m_expr.ops.ternary.opnd2, 0);
554 fputs (">", stream);
555 break;
557 case EXPR_CALL:
559 size_t i;
560 size_t nargs = m_expr.ops.call.nargs;
561 gcall *fn_from;
563 fn_from = m_expr.ops.call.fn_from;
564 if (gimple_call_internal_p (fn_from))
565 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
566 stream);
567 else
568 print_generic_expr (stream, gimple_call_fn (fn_from), 0);
569 fprintf (stream, " (");
570 for (i = 0; i < nargs; i++)
572 print_generic_expr (stream, m_expr.ops.call.args[i], 0);
573 if (i + 1 < nargs)
574 fprintf (stream, ", ");
576 fprintf (stream, ")");
578 break;
580 case EXPR_PHI:
582 size_t i;
583 size_t nargs = m_expr.ops.phi.nargs;
585 fprintf (stream, "PHI <");
586 for (i = 0; i < nargs; i++)
588 print_generic_expr (stream, m_expr.ops.phi.args[i], 0);
589 if (i + 1 < nargs)
590 fprintf (stream, ", ");
592 fprintf (stream, ">");
594 break;
597 if (m_vop)
599 fprintf (stream, " with ");
600 print_generic_expr (stream, m_vop, 0);
603 fprintf (stream, "\n");
606 /* Pop entries off the stack until we hit the NULL marker.
607 For each entry popped, use the SRC/DEST pair to restore
608 SRC to its prior value. */
610 void
611 const_and_copies::pop_to_marker (void)
613 while (m_stack.length () > 0)
615 tree prev_value, dest;
617 dest = m_stack.pop ();
619 /* A NULL value indicates we should stop unwinding, otherwise
620 pop off the next entry as they're recorded in pairs. */
621 if (dest == NULL)
622 break;
624 if (dump_file && (dump_flags & TDF_DETAILS))
626 fprintf (dump_file, "<<<< COPY ");
627 print_generic_expr (dump_file, dest, 0);
628 fprintf (dump_file, " = ");
629 print_generic_expr (dump_file, SSA_NAME_VALUE (dest), 0);
630 fprintf (dump_file, "\n");
633 prev_value = m_stack.pop ();
634 set_ssa_name_value (dest, prev_value);
638 /* Record that X has the value Y. */
640 void
641 const_and_copies::record_const_or_copy (tree x, tree y)
643 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
646 /* Record that X has the value Y and that X's previous value is PREV_X. */
648 void
649 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
651 /* Y may be NULL if we are invalidating entries in the table. */
652 if (y && TREE_CODE (y) == SSA_NAME)
654 tree tmp = SSA_NAME_VALUE (y);
655 y = tmp ? tmp : y;
658 if (dump_file && (dump_flags & TDF_DETAILS))
660 fprintf (dump_file, "0>>> COPY ");
661 print_generic_expr (dump_file, x, 0);
662 fprintf (dump_file, " = ");
663 print_generic_expr (dump_file, y, 0);
664 fprintf (dump_file, "\n");
667 set_ssa_name_value (x, y);
668 m_stack.reserve (2);
669 m_stack.quick_push (prev_x);
670 m_stack.quick_push (x);
673 /* A new value has been assigned to LHS. If necessary, invalidate any
674 equivalences that are no longer valid. This includes invaliding
675 LHS and any objects that are currently equivalent to LHS.
677 Finding the objects that are currently marked as equivalent to LHS
678 is a bit tricky. We could walk the ssa names and see if any have
679 SSA_NAME_VALUE that is the same as LHS. That's expensive.
681 However, it's far more efficient to look at the unwinding stack as
682 that will have all context sensitive equivalences which are the only
683 ones that we really have to worry about here. */
684 void
685 const_and_copies::invalidate (tree lhs)
688 /* The stack is an unwinding stack. If the current element is NULL
689 then it's a "stop unwinding" marker. Else the current marker is
690 the SSA_NAME with an equivalence and the prior entry in the stack
691 is what the current element is equivalent to. */
692 for (int i = m_stack.length() - 1; i >= 0; i--)
694 /* Ignore the stop unwinding markers. */
695 if ((m_stack)[i] == NULL)
696 continue;
698 /* We want to check the current value of stack[i] to see if
699 it matches LHS. If so, then invalidate. */
700 if (SSA_NAME_VALUE ((m_stack)[i]) == lhs)
701 record_const_or_copy ((m_stack)[i], NULL_TREE);
703 /* Remember, we're dealing with two elements in this case. */
704 i--;
707 /* And invalidate any known value for LHS itself. */
708 if (SSA_NAME_VALUE (lhs))
709 record_const_or_copy (lhs, NULL_TREE);
712 bool
713 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
715 const struct hashable_expr *expr1 = p1->expr ();
716 const struct expr_hash_elt *stamp1 = p1->stamp ();
717 const struct hashable_expr *expr2 = p2->expr ();
718 const struct expr_hash_elt *stamp2 = p2->stamp ();
720 /* This case should apply only when removing entries from the table. */
721 if (stamp1 == stamp2)
722 return true;
724 if (p1->hash () != p2->hash ())
725 return false;
727 /* In case of a collision, both RHS have to be identical and have the
728 same VUSE operands. */
729 if (hashable_expr_equal_p (expr1, expr2)
730 && types_compatible_p (expr1->type, expr2->type))
731 return true;
733 return false;
736 /* Given a conditional expression COND as a tree, initialize
737 a hashable_expr expression EXPR. The conditional must be a
738 comparison or logical negation. A constant or a variable is
739 not permitted. */
741 void
742 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
744 expr->type = boolean_type_node;
746 if (COMPARISON_CLASS_P (cond))
748 expr->kind = EXPR_BINARY;
749 expr->ops.binary.op = TREE_CODE (cond);
750 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
751 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
753 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
755 expr->kind = EXPR_UNARY;
756 expr->ops.unary.op = TRUTH_NOT_EXPR;
757 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
759 else
760 gcc_unreachable ();