PR c++/83720 - ICE with lambda and LTO.
[official-gcc.git] / gcc / tree-ssa-scopedtables.c
blob47cca782595ff9faa9bcd8e56a948538715551f2
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
49 void
50 avail_exprs_stack::pop_to_marker ()
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
58 if (victim.first == NULL)
59 break;
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
74 delete *slot;
75 *slot = victim.second;
77 else
78 m_avail_exprs->clear_slot (slot);
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
109 /* This bounds the stmt walks we perform on reference lookups
110 to O(1) instead of O(N) where N is the number of dominating
111 stores leading to a candidate. We re-use the SCCVN param
112 for this as it is basically the same complexity. */
113 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
114 return (void *)-1;
116 return NULL;
119 /* We looked for STMT in the hash table, but did not find it.
121 If STMT is an assignment from a binary operator, we may know something
122 about the operands relationship to each other which would allow
123 us to derive a constant value for the RHS of STMT. */
125 tree
126 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
127 class expr_hash_elt element)
129 if (is_gimple_assign (stmt))
131 struct hashable_expr *expr = element.expr ();
132 if (expr->kind == EXPR_BINARY)
134 enum tree_code code = expr->ops.binary.op;
136 switch (code)
138 /* For these cases, if we know the operands
139 are equal, then we know the result. */
140 case MIN_EXPR:
141 case MAX_EXPR:
142 case BIT_IOR_EXPR:
143 case BIT_AND_EXPR:
144 case BIT_XOR_EXPR:
145 case MINUS_EXPR:
146 case TRUNC_DIV_EXPR:
147 case CEIL_DIV_EXPR:
148 case FLOOR_DIV_EXPR:
149 case ROUND_DIV_EXPR:
150 case EXACT_DIV_EXPR:
151 case TRUNC_MOD_EXPR:
152 case CEIL_MOD_EXPR:
153 case FLOOR_MOD_EXPR:
154 case ROUND_MOD_EXPR:
156 /* Build a simple equality expr and query the hash table
157 for it. */
158 struct hashable_expr expr;
159 expr.type = boolean_type_node;
160 expr.kind = EXPR_BINARY;
161 expr.ops.binary.op = EQ_EXPR;
162 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
163 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
164 class expr_hash_elt element2 (&expr, NULL_TREE);
165 expr_hash_elt **slot
166 = m_avail_exprs->find_slot (&element2, NO_INSERT);
167 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
169 /* If the query was successful and returned a nonzero
170 result, then we know that the operands of the binary
171 expression are the same. In many cases this allows
172 us to compute a constant result of the expression
173 at compile time, even if we do not know the exact
174 values of the operands. */
175 if (slot && *slot && integer_onep ((*slot)->lhs ()))
177 switch (code)
179 case MIN_EXPR:
180 case MAX_EXPR:
181 case BIT_IOR_EXPR:
182 case BIT_AND_EXPR:
183 return gimple_assign_rhs1 (stmt);
185 case BIT_XOR_EXPR:
186 case MINUS_EXPR:
187 case TRUNC_MOD_EXPR:
188 case CEIL_MOD_EXPR:
189 case FLOOR_MOD_EXPR:
190 case ROUND_MOD_EXPR:
191 return build_zero_cst (result_type);
193 case TRUNC_DIV_EXPR:
194 case CEIL_DIV_EXPR:
195 case FLOOR_DIV_EXPR:
196 case ROUND_DIV_EXPR:
197 case EXACT_DIV_EXPR:
198 return build_one_cst (result_type);
200 default:
201 gcc_unreachable ();
204 break;
207 default:
208 break;
212 return NULL_TREE;
215 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
216 If found, return its LHS. Otherwise insert STMT in the table and
217 return NULL_TREE.
219 Also, when an expression is first inserted in the table, it is also
220 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
221 we finish processing this block and its children. */
223 tree
224 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
226 expr_hash_elt **slot;
227 tree lhs;
229 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
230 if (gimple_code (stmt) == GIMPLE_PHI)
231 lhs = gimple_phi_result (stmt);
232 else
233 lhs = gimple_get_lhs (stmt);
235 class expr_hash_elt element (stmt, lhs);
237 if (dump_file && (dump_flags & TDF_DETAILS))
239 fprintf (dump_file, "LKUP ");
240 element.print (dump_file);
243 /* Don't bother remembering constant assignments and copy operations.
244 Constants and copy operations are handled by the constant/copy propagator
245 in optimize_stmt. */
246 if (element.expr()->kind == EXPR_SINGLE
247 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
248 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
249 return NULL_TREE;
251 /* Finally try to find the expression in the main expression hash table. */
252 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
253 if (slot == NULL)
255 return NULL_TREE;
257 else if (*slot == NULL)
259 /* If we did not find the expression in the hash table, we may still
260 be able to produce a result for some expressions. */
261 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
262 element);
264 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
265 We must initialize *SLOT to a real entry, even if we found a
266 way to prove ELEMENT was a constant after not finding ELEMENT
267 in the hash table.
269 An uninitialized or empty slot is an indication no prior objects
270 entered into the hash table had a hash collection with ELEMENT.
272 If we fail to do so and had such entries in the table, they
273 would become unreachable. */
274 class expr_hash_elt *element2 = new expr_hash_elt (element);
275 *slot = element2;
277 record_expr (element2, NULL, '2');
278 return retval;
281 /* If we found a redundant memory operation do an alias walk to
282 check if we can re-use it. */
283 if (gimple_vuse (stmt) != (*slot)->vop ())
285 tree vuse1 = (*slot)->vop ();
286 tree vuse2 = gimple_vuse (stmt);
287 /* If we have a load of a register and a candidate in the
288 hash with vuse1 then try to reach its stmt by walking
289 up the virtual use-def chain using walk_non_aliased_vuses.
290 But don't do this when removing expressions from the hash. */
291 ao_ref ref;
292 if (!(vuse1 && vuse2
293 && gimple_assign_single_p (stmt)
294 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
295 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
296 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
297 && walk_non_aliased_vuses (&ref, vuse2,
298 vuse_eq, NULL, NULL, vuse1) != NULL))
300 if (insert)
302 class expr_hash_elt *element2 = new expr_hash_elt (element);
304 /* Insert the expr into the hash by replacing the current
305 entry and recording the value to restore in the
306 avail_exprs_stack. */
307 record_expr (element2, *slot, '2');
308 *slot = element2;
310 return NULL_TREE;
314 /* Extract the LHS of the assignment so that it can be used as the current
315 definition of another variable. */
316 lhs = (*slot)->lhs ();
318 /* Valueize the result. */
319 if (TREE_CODE (lhs) == SSA_NAME)
321 tree tem = SSA_NAME_VALUE (lhs);
322 if (tem)
323 lhs = tem;
326 if (dump_file && (dump_flags & TDF_DETAILS))
328 fprintf (dump_file, "FIND: ");
329 print_generic_expr (dump_file, lhs);
330 fprintf (dump_file, "\n");
333 return lhs;
336 /* Enter condition equivalence P into the hash table.
338 This indicates that a conditional expression has a known
339 boolean value. */
341 void
342 avail_exprs_stack::record_cond (cond_equivalence *p)
344 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
345 expr_hash_elt **slot;
347 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
348 if (*slot == NULL)
350 *slot = element;
351 record_expr (element, NULL, '1');
353 else
354 delete element;
357 /* Generate a hash value for a pair of expressions. This can be used
358 iteratively by passing a previous result in HSTATE.
360 The same hash value is always returned for a given pair of expressions,
361 regardless of the order in which they are presented. This is useful in
362 hashing the operands of commutative functions. */
364 namespace inchash
367 static void
368 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
370 hash one, two;
372 inchash::add_expr (t1, one);
373 inchash::add_expr (t2, two);
374 hstate.add_commutative (one, two);
377 /* Compute a hash value for a hashable_expr value EXPR and a
378 previously accumulated hash value VAL. If two hashable_expr
379 values compare equal with hashable_expr_equal_p, they must
380 hash to the same value, given an identical value of VAL.
381 The logic is intended to follow inchash::add_expr in tree.c. */
383 static void
384 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
386 switch (expr->kind)
388 case EXPR_SINGLE:
389 inchash::add_expr (expr->ops.single.rhs, hstate);
390 break;
392 case EXPR_UNARY:
393 hstate.add_object (expr->ops.unary.op);
395 /* Make sure to include signedness in the hash computation.
396 Don't hash the type, that can lead to having nodes which
397 compare equal according to operand_equal_p, but which
398 have different hash codes. */
399 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
400 || expr->ops.unary.op == NON_LVALUE_EXPR)
401 hstate.add_int (TYPE_UNSIGNED (expr->type));
403 inchash::add_expr (expr->ops.unary.opnd, hstate);
404 break;
406 case EXPR_BINARY:
407 hstate.add_object (expr->ops.binary.op);
408 if (commutative_tree_code (expr->ops.binary.op))
409 inchash::add_expr_commutative (expr->ops.binary.opnd0,
410 expr->ops.binary.opnd1, hstate);
411 else
413 inchash::add_expr (expr->ops.binary.opnd0, hstate);
414 inchash::add_expr (expr->ops.binary.opnd1, hstate);
416 break;
418 case EXPR_TERNARY:
419 hstate.add_object (expr->ops.ternary.op);
420 if (commutative_ternary_tree_code (expr->ops.ternary.op))
421 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
422 expr->ops.ternary.opnd1, hstate);
423 else
425 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
426 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
428 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
429 break;
431 case EXPR_CALL:
433 size_t i;
434 enum tree_code code = CALL_EXPR;
435 gcall *fn_from;
437 hstate.add_object (code);
438 fn_from = expr->ops.call.fn_from;
439 if (gimple_call_internal_p (fn_from))
440 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
441 else
442 inchash::add_expr (gimple_call_fn (fn_from), hstate);
443 for (i = 0; i < expr->ops.call.nargs; i++)
444 inchash::add_expr (expr->ops.call.args[i], hstate);
446 break;
448 case EXPR_PHI:
450 size_t i;
452 for (i = 0; i < expr->ops.phi.nargs; i++)
453 inchash::add_expr (expr->ops.phi.args[i], hstate);
455 break;
457 default:
458 gcc_unreachable ();
464 /* Hashing and equality functions. We compute a value number for expressions
465 using the code of the expression and the SSA numbers of its operands. */
467 static hashval_t
468 avail_expr_hash (class expr_hash_elt *p)
470 const struct hashable_expr *expr = p->expr ();
471 inchash::hash hstate;
473 if (expr->kind == EXPR_SINGLE)
475 /* T could potentially be a switch index or a goto dest. */
476 tree t = expr->ops.single.rhs;
477 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
479 /* Make equivalent statements of both these kinds hash together.
480 Dealing with both MEM_REF and ARRAY_REF allows us not to care
481 about equivalence with other statements not considered here. */
482 bool reverse;
483 poly_int64 offset, size, max_size;
484 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
485 &reverse);
486 /* Strictly, we could try to normalize variable-sized accesses too,
487 but here we just deal with the common case. */
488 if (known_size_p (max_size)
489 && known_eq (size, max_size))
491 enum tree_code code = MEM_REF;
492 hstate.add_object (code);
493 inchash::add_expr (base, hstate);
494 hstate.add_object (offset);
495 hstate.add_object (size);
496 return hstate.end ();
501 inchash::add_hashable_expr (expr, hstate);
503 return hstate.end ();
506 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
507 to each other. (That is, they return the value of the same bit of memory.)
509 Return TRUE if the two are so equivalent; FALSE if not (which could still
510 mean the two are equivalent by other means). */
512 static bool
513 equal_mem_array_ref_p (tree t0, tree t1)
515 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
516 return false;
517 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
518 return false;
520 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
521 return false;
522 bool rev0;
523 poly_int64 off0, sz0, max0;
524 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
525 if (!known_size_p (max0)
526 || maybe_ne (sz0, max0))
527 return false;
529 bool rev1;
530 poly_int64 off1, sz1, max1;
531 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
532 if (!known_size_p (max1)
533 || maybe_ne (sz1, max1))
534 return false;
536 if (rev0 != rev1)
537 return false;
539 /* Types were compatible, so this is a sanity check. */
540 gcc_assert (known_eq (sz0, sz1));
542 return known_eq (off0, off1) && operand_equal_p (base0, base1, 0);
545 /* Compare two hashable_expr structures for equivalence. They are
546 considered equivalent when the expressions they denote must
547 necessarily be equal. The logic is intended to follow that of
548 operand_equal_p in fold-const.c */
550 static bool
551 hashable_expr_equal_p (const struct hashable_expr *expr0,
552 const struct hashable_expr *expr1)
554 tree type0 = expr0->type;
555 tree type1 = expr1->type;
557 /* If either type is NULL, there is nothing to check. */
558 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
559 return false;
561 /* If both types don't have the same signedness, precision, and mode,
562 then we can't consider them equal. */
563 if (type0 != type1
564 && (TREE_CODE (type0) == ERROR_MARK
565 || TREE_CODE (type1) == ERROR_MARK
566 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
567 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
568 || TYPE_MODE (type0) != TYPE_MODE (type1)))
569 return false;
571 if (expr0->kind != expr1->kind)
572 return false;
574 switch (expr0->kind)
576 case EXPR_SINGLE:
577 return equal_mem_array_ref_p (expr0->ops.single.rhs,
578 expr1->ops.single.rhs)
579 || operand_equal_p (expr0->ops.single.rhs,
580 expr1->ops.single.rhs, 0);
581 case EXPR_UNARY:
582 if (expr0->ops.unary.op != expr1->ops.unary.op)
583 return false;
585 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
586 || expr0->ops.unary.op == NON_LVALUE_EXPR)
587 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
588 return false;
590 return operand_equal_p (expr0->ops.unary.opnd,
591 expr1->ops.unary.opnd, 0);
593 case EXPR_BINARY:
594 if (expr0->ops.binary.op != expr1->ops.binary.op)
595 return false;
597 if (operand_equal_p (expr0->ops.binary.opnd0,
598 expr1->ops.binary.opnd0, 0)
599 && operand_equal_p (expr0->ops.binary.opnd1,
600 expr1->ops.binary.opnd1, 0))
601 return true;
603 /* For commutative ops, allow the other order. */
604 return (commutative_tree_code (expr0->ops.binary.op)
605 && operand_equal_p (expr0->ops.binary.opnd0,
606 expr1->ops.binary.opnd1, 0)
607 && operand_equal_p (expr0->ops.binary.opnd1,
608 expr1->ops.binary.opnd0, 0));
610 case EXPR_TERNARY:
611 if (expr0->ops.ternary.op != expr1->ops.ternary.op
612 || !operand_equal_p (expr0->ops.ternary.opnd2,
613 expr1->ops.ternary.opnd2, 0))
614 return false;
616 /* BIT_INSERT_EXPR has an implict operand as the type precision
617 of op1. Need to check to make sure they are the same. */
618 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
619 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
620 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
621 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
622 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
623 return false;
625 if (operand_equal_p (expr0->ops.ternary.opnd0,
626 expr1->ops.ternary.opnd0, 0)
627 && operand_equal_p (expr0->ops.ternary.opnd1,
628 expr1->ops.ternary.opnd1, 0))
629 return true;
631 /* For commutative ops, allow the other order. */
632 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
633 && operand_equal_p (expr0->ops.ternary.opnd0,
634 expr1->ops.ternary.opnd1, 0)
635 && operand_equal_p (expr0->ops.ternary.opnd1,
636 expr1->ops.ternary.opnd0, 0));
638 case EXPR_CALL:
640 size_t i;
642 /* If the calls are to different functions, then they
643 clearly cannot be equal. */
644 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
645 expr1->ops.call.fn_from))
646 return false;
648 if (! expr0->ops.call.pure)
649 return false;
651 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
652 return false;
654 for (i = 0; i < expr0->ops.call.nargs; i++)
655 if (! operand_equal_p (expr0->ops.call.args[i],
656 expr1->ops.call.args[i], 0))
657 return false;
659 if (stmt_could_throw_p (expr0->ops.call.fn_from))
661 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
662 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
663 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
664 return false;
667 return true;
670 case EXPR_PHI:
672 size_t i;
674 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
675 return false;
677 for (i = 0; i < expr0->ops.phi.nargs; i++)
678 if (! operand_equal_p (expr0->ops.phi.args[i],
679 expr1->ops.phi.args[i], 0))
680 return false;
682 return true;
685 default:
686 gcc_unreachable ();
690 /* Given a statement STMT, construct a hash table element. */
692 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
694 enum gimple_code code = gimple_code (stmt);
695 struct hashable_expr *expr = this->expr ();
697 if (code == GIMPLE_ASSIGN)
699 enum tree_code subcode = gimple_assign_rhs_code (stmt);
701 switch (get_gimple_rhs_class (subcode))
703 case GIMPLE_SINGLE_RHS:
704 expr->kind = EXPR_SINGLE;
705 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
706 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
707 break;
708 case GIMPLE_UNARY_RHS:
709 expr->kind = EXPR_UNARY;
710 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
711 if (CONVERT_EXPR_CODE_P (subcode))
712 subcode = NOP_EXPR;
713 expr->ops.unary.op = subcode;
714 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
715 break;
716 case GIMPLE_BINARY_RHS:
717 expr->kind = EXPR_BINARY;
718 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
719 expr->ops.binary.op = subcode;
720 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
721 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
722 break;
723 case GIMPLE_TERNARY_RHS:
724 expr->kind = EXPR_TERNARY;
725 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
726 expr->ops.ternary.op = subcode;
727 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
728 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
729 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
730 break;
731 default:
732 gcc_unreachable ();
735 else if (code == GIMPLE_COND)
737 expr->type = boolean_type_node;
738 expr->kind = EXPR_BINARY;
739 expr->ops.binary.op = gimple_cond_code (stmt);
740 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
741 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
743 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
745 size_t nargs = gimple_call_num_args (call_stmt);
746 size_t i;
748 gcc_assert (gimple_call_lhs (call_stmt));
750 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
751 expr->kind = EXPR_CALL;
752 expr->ops.call.fn_from = call_stmt;
754 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
755 expr->ops.call.pure = true;
756 else
757 expr->ops.call.pure = false;
759 expr->ops.call.nargs = nargs;
760 expr->ops.call.args = XCNEWVEC (tree, nargs);
761 for (i = 0; i < nargs; i++)
762 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
764 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
766 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
767 expr->kind = EXPR_SINGLE;
768 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
770 else if (code == GIMPLE_GOTO)
772 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
773 expr->kind = EXPR_SINGLE;
774 expr->ops.single.rhs = gimple_goto_dest (stmt);
776 else if (code == GIMPLE_PHI)
778 size_t nargs = gimple_phi_num_args (stmt);
779 size_t i;
781 expr->type = TREE_TYPE (gimple_phi_result (stmt));
782 expr->kind = EXPR_PHI;
783 expr->ops.phi.nargs = nargs;
784 expr->ops.phi.args = XCNEWVEC (tree, nargs);
785 for (i = 0; i < nargs; i++)
786 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
788 else
789 gcc_unreachable ();
791 m_lhs = orig_lhs;
792 m_vop = gimple_vuse (stmt);
793 m_hash = avail_expr_hash (this);
794 m_stamp = this;
797 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
798 construct a hash table element. */
800 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
802 m_expr = *orig;
803 m_lhs = orig_lhs;
804 m_vop = NULL_TREE;
805 m_hash = avail_expr_hash (this);
806 m_stamp = this;
809 /* Copy constructor for a hash table element. */
811 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
813 m_expr = old_elt.m_expr;
814 m_lhs = old_elt.m_lhs;
815 m_vop = old_elt.m_vop;
816 m_hash = old_elt.m_hash;
817 m_stamp = this;
819 /* Now deep copy the malloc'd space for CALL and PHI args. */
820 if (old_elt.m_expr.kind == EXPR_CALL)
822 size_t nargs = old_elt.m_expr.ops.call.nargs;
823 size_t i;
825 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
826 for (i = 0; i < nargs; i++)
827 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
829 else if (old_elt.m_expr.kind == EXPR_PHI)
831 size_t nargs = old_elt.m_expr.ops.phi.nargs;
832 size_t i;
834 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
835 for (i = 0; i < nargs; i++)
836 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
840 /* Calls and PHIs have a variable number of arguments that are allocated
841 on the heap. Thus we have to have a special dtor to release them. */
843 expr_hash_elt::~expr_hash_elt ()
845 if (m_expr.kind == EXPR_CALL)
846 free (m_expr.ops.call.args);
847 else if (m_expr.kind == EXPR_PHI)
848 free (m_expr.ops.phi.args);
851 /* Print a diagnostic dump of an expression hash table entry. */
853 void
854 expr_hash_elt::print (FILE *stream)
856 fprintf (stream, "STMT ");
858 if (m_lhs)
860 print_generic_expr (stream, m_lhs);
861 fprintf (stream, " = ");
864 switch (m_expr.kind)
866 case EXPR_SINGLE:
867 print_generic_expr (stream, m_expr.ops.single.rhs);
868 break;
870 case EXPR_UNARY:
871 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
872 print_generic_expr (stream, m_expr.ops.unary.opnd);
873 break;
875 case EXPR_BINARY:
876 print_generic_expr (stream, m_expr.ops.binary.opnd0);
877 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
878 print_generic_expr (stream, m_expr.ops.binary.opnd1);
879 break;
881 case EXPR_TERNARY:
882 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
883 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
884 fputs (", ", stream);
885 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
886 fputs (", ", stream);
887 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
888 fputs (">", stream);
889 break;
891 case EXPR_CALL:
893 size_t i;
894 size_t nargs = m_expr.ops.call.nargs;
895 gcall *fn_from;
897 fn_from = m_expr.ops.call.fn_from;
898 if (gimple_call_internal_p (fn_from))
899 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
900 stream);
901 else
902 print_generic_expr (stream, gimple_call_fn (fn_from));
903 fprintf (stream, " (");
904 for (i = 0; i < nargs; i++)
906 print_generic_expr (stream, m_expr.ops.call.args[i]);
907 if (i + 1 < nargs)
908 fprintf (stream, ", ");
910 fprintf (stream, ")");
912 break;
914 case EXPR_PHI:
916 size_t i;
917 size_t nargs = m_expr.ops.phi.nargs;
919 fprintf (stream, "PHI <");
920 for (i = 0; i < nargs; i++)
922 print_generic_expr (stream, m_expr.ops.phi.args[i]);
923 if (i + 1 < nargs)
924 fprintf (stream, ", ");
926 fprintf (stream, ">");
928 break;
931 if (m_vop)
933 fprintf (stream, " with ");
934 print_generic_expr (stream, m_vop);
937 fprintf (stream, "\n");
940 /* Pop entries off the stack until we hit the NULL marker.
941 For each entry popped, use the SRC/DEST pair to restore
942 SRC to its prior value. */
944 void
945 const_and_copies::pop_to_marker (void)
947 while (m_stack.length () > 0)
949 tree prev_value, dest;
951 dest = m_stack.pop ();
953 /* A NULL value indicates we should stop unwinding, otherwise
954 pop off the next entry as they're recorded in pairs. */
955 if (dest == NULL)
956 break;
958 if (dump_file && (dump_flags & TDF_DETAILS))
960 fprintf (dump_file, "<<<< COPY ");
961 print_generic_expr (dump_file, dest);
962 fprintf (dump_file, " = ");
963 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
964 fprintf (dump_file, "\n");
967 prev_value = m_stack.pop ();
968 set_ssa_name_value (dest, prev_value);
972 /* Record that X has the value Y and that X's previous value is PREV_X.
974 This variant does not follow the value chain for Y. */
976 void
977 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
979 if (dump_file && (dump_flags & TDF_DETAILS))
981 fprintf (dump_file, "0>>> COPY ");
982 print_generic_expr (dump_file, x);
983 fprintf (dump_file, " = ");
984 print_generic_expr (dump_file, y);
985 fprintf (dump_file, "\n");
988 set_ssa_name_value (x, y);
989 m_stack.reserve (2);
990 m_stack.quick_push (prev_x);
991 m_stack.quick_push (x);
994 /* Record that X has the value Y. */
996 void
997 const_and_copies::record_const_or_copy (tree x, tree y)
999 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1002 /* Record that X has the value Y and that X's previous value is PREV_X.
1004 This variant follow's Y value chain. */
1006 void
1007 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1009 /* Y may be NULL if we are invalidating entries in the table. */
1010 if (y && TREE_CODE (y) == SSA_NAME)
1012 tree tmp = SSA_NAME_VALUE (y);
1013 y = tmp ? tmp : y;
1016 record_const_or_copy_raw (x, y, prev_x);
1019 bool
1020 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1022 const struct hashable_expr *expr1 = p1->expr ();
1023 const struct expr_hash_elt *stamp1 = p1->stamp ();
1024 const struct hashable_expr *expr2 = p2->expr ();
1025 const struct expr_hash_elt *stamp2 = p2->stamp ();
1027 /* This case should apply only when removing entries from the table. */
1028 if (stamp1 == stamp2)
1029 return true;
1031 if (p1->hash () != p2->hash ())
1032 return false;
1034 /* In case of a collision, both RHS have to be identical and have the
1035 same VUSE operands. */
1036 if (hashable_expr_equal_p (expr1, expr2)
1037 && types_compatible_p (expr1->type, expr2->type))
1038 return true;
1040 return false;
1043 /* Given a conditional expression COND as a tree, initialize
1044 a hashable_expr expression EXPR. The conditional must be a
1045 comparison or logical negation. A constant or a variable is
1046 not permitted. */
1048 void
1049 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1051 expr->type = boolean_type_node;
1053 if (COMPARISON_CLASS_P (cond))
1055 expr->kind = EXPR_BINARY;
1056 expr->ops.binary.op = TREE_CODE (cond);
1057 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1058 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1060 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1062 expr->kind = EXPR_UNARY;
1063 expr->ops.unary.op = TRUTH_NOT_EXPR;
1064 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1066 else
1067 gcc_unreachable ();
1070 /* Build a cond_equivalence record indicating that the comparison
1071 CODE holds between operands OP0 and OP1 and push it to **P. */
1073 static void
1074 build_and_record_new_cond (enum tree_code code,
1075 tree op0, tree op1,
1076 vec<cond_equivalence> *p,
1077 bool val = true)
1079 cond_equivalence c;
1080 struct hashable_expr *cond = &c.cond;
1082 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1084 cond->type = boolean_type_node;
1085 cond->kind = EXPR_BINARY;
1086 cond->ops.binary.op = code;
1087 cond->ops.binary.opnd0 = op0;
1088 cond->ops.binary.opnd1 = op1;
1090 c.value = val ? boolean_true_node : boolean_false_node;
1091 p->safe_push (c);
1094 /* Record that COND is true and INVERTED is false into the edge information
1095 structure. Also record that any conditions dominated by COND are true
1096 as well.
1098 For example, if a < b is true, then a <= b must also be true. */
1100 void
1101 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1103 tree op0, op1;
1104 cond_equivalence c;
1106 if (!COMPARISON_CLASS_P (cond))
1107 return;
1109 op0 = TREE_OPERAND (cond, 0);
1110 op1 = TREE_OPERAND (cond, 1);
1112 switch (TREE_CODE (cond))
1114 case LT_EXPR:
1115 case GT_EXPR:
1116 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1118 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1119 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1122 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1123 ? LE_EXPR : GE_EXPR),
1124 op0, op1, p);
1125 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1126 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1127 break;
1129 case GE_EXPR:
1130 case LE_EXPR:
1131 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1133 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1135 break;
1137 case EQ_EXPR:
1138 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1140 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1142 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1143 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1144 break;
1146 case UNORDERED_EXPR:
1147 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1148 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1149 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1150 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1151 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1152 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1153 break;
1155 case UNLT_EXPR:
1156 case UNGT_EXPR:
1157 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1158 ? UNLE_EXPR : UNGE_EXPR),
1159 op0, op1, p);
1160 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1161 break;
1163 case UNEQ_EXPR:
1164 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1165 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1166 break;
1168 case LTGT_EXPR:
1169 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1170 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1171 break;
1173 default:
1174 break;
1177 /* Now store the original true and false conditions into the first
1178 two slots. */
1179 initialize_expr_from_cond (cond, &c.cond);
1180 c.value = boolean_true_node;
1181 p->safe_push (c);
1183 /* It is possible for INVERTED to be the negation of a comparison,
1184 and not a valid RHS or GIMPLE_COND condition. This happens because
1185 invert_truthvalue may return such an expression when asked to invert
1186 a floating-point comparison. These comparisons are not assumed to
1187 obey the trichotomy law. */
1188 initialize_expr_from_cond (inverted, &c.cond);
1189 c.value = boolean_false_node;
1190 p->safe_push (c);