c++: Implement modules ABI for vtable emissions
[official-gcc.git] / gcc / tree-ssa-scopedtables.cc
blobc367d37fa9b533886b76bd05fdbf817bdaf7c272
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
38 static bool hashable_expr_equal_p (const struct hashable_expr *,
39 const struct hashable_expr *);
41 /* Initialize local stacks for this optimizer and record equivalences
42 upon entry to BB. Equivalences can come from the edge traversed to
43 reach BB or they may come from PHI nodes at the start of BB. */
45 /* Pop items off the unwinding stack, removing each from the hash table
46 until a marker is encountered. */
48 void
49 avail_exprs_stack::pop_to_marker ()
51 /* Remove all the expressions made available in this block. */
52 while (m_stack.length () > 0)
54 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
55 expr_hash_elt **slot;
57 if (victim.first == NULL)
58 break;
60 /* This must precede the actual removal from the hash table,
61 as ELEMENT and the table entry may share a call argument
62 vector which will be freed during removal. */
63 if (dump_file && (dump_flags & TDF_DETAILS))
65 fprintf (dump_file, "<<<< ");
66 victim.first->print (dump_file);
69 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
70 gcc_assert (slot && *slot == victim.first);
71 if (victim.second != NULL)
73 delete *slot;
74 *slot = victim.second;
76 else
77 m_avail_exprs->clear_slot (slot);
81 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
82 from the hash table. */
84 void
85 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
86 class expr_hash_elt *elt2,
87 char type)
89 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91 fprintf (dump_file, "%c>>> ", type);
92 elt1->print (dump_file);
95 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
98 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
99 the desired memory state. */
101 static void *
102 vuse_eq (ao_ref *, tree vuse1, void *data)
104 tree vuse2 = (tree) data;
105 if (vuse1 == vuse2)
106 return data;
108 return NULL;
111 /* We looked for STMT in the hash table, but did not find it.
113 If STMT is an assignment from a binary operator, we may know something
114 about the operands relationship to each other which would allow
115 us to derive a constant value for the RHS of STMT. */
117 tree
118 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
119 class expr_hash_elt element)
121 if (is_gimple_assign (stmt))
123 struct hashable_expr *expr = element.expr ();
124 if (expr->kind == EXPR_BINARY)
126 enum tree_code code = expr->ops.binary.op;
128 switch (code)
130 /* For these cases, if we know some relationships
131 between the operands, then we can simplify. */
132 case MIN_EXPR:
133 case MAX_EXPR:
135 /* Build a simple equality expr and query the hash table
136 for it. */
137 struct hashable_expr expr;
138 expr.type = boolean_type_node;
139 expr.kind = EXPR_BINARY;
140 expr.ops.binary.op = LE_EXPR;
141 tree rhs1 = gimple_assign_rhs1 (stmt);
142 tree rhs2 = gimple_assign_rhs2 (stmt);
143 if (tree_swap_operands_p (rhs1, rhs2))
144 std::swap (rhs1, rhs2);
145 expr.ops.binary.opnd0 = rhs1;
146 expr.ops.binary.opnd1 = rhs2;
147 class expr_hash_elt element2 (&expr, NULL_TREE);
148 expr_hash_elt **slot
149 = m_avail_exprs->find_slot (&element2, NO_INSERT);
151 /* If the query was successful and returned a nonzero
152 result, then we know the result of the MIN/MAX, even
153 though it is not a constant value. */
154 if (slot && *slot && integer_onep ((*slot)->lhs ()))
155 return code == MIN_EXPR ? rhs1 : rhs2;
157 /* Try again, this time with GE_EXPR. */
158 expr.ops.binary.op = GE_EXPR;
159 class expr_hash_elt element3 (&expr, NULL_TREE);
160 slot = m_avail_exprs->find_slot (&element3, NO_INSERT);
162 /* If the query was successful and returned a nonzero
163 result, then we know the result of the MIN/MAX, even
164 though it is not a constant value. */
165 if (slot && *slot && integer_onep ((*slot)->lhs ()))
166 return code == MIN_EXPR ? rhs2 : rhs1;
168 break;
171 /* For these cases, if we know the operands
172 are equal, then we know the result. */
173 case BIT_IOR_EXPR:
174 case BIT_AND_EXPR:
175 case BIT_XOR_EXPR:
176 case MINUS_EXPR:
177 case TRUNC_DIV_EXPR:
178 case CEIL_DIV_EXPR:
179 case FLOOR_DIV_EXPR:
180 case ROUND_DIV_EXPR:
181 case EXACT_DIV_EXPR:
182 case TRUNC_MOD_EXPR:
183 case CEIL_MOD_EXPR:
184 case FLOOR_MOD_EXPR:
185 case ROUND_MOD_EXPR:
187 /* Build a simple equality expr and query the hash table
188 for it. */
189 struct hashable_expr expr;
190 expr.type = boolean_type_node;
191 expr.kind = EXPR_BINARY;
192 expr.ops.binary.op = EQ_EXPR;
193 tree rhs1 = gimple_assign_rhs1 (stmt);
194 tree rhs2 = gimple_assign_rhs2 (stmt);
195 if (tree_swap_operands_p (rhs1, rhs2))
196 std::swap (rhs1, rhs2);
197 expr.ops.binary.opnd0 = rhs1;
198 expr.ops.binary.opnd1 = rhs2;
199 class expr_hash_elt element2 (&expr, NULL_TREE);
200 expr_hash_elt **slot
201 = m_avail_exprs->find_slot (&element2, NO_INSERT);
202 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
204 /* If the query was successful and returned a nonzero
205 result, then we know that the operands of the binary
206 expression are the same. In many cases this allows
207 us to compute a constant result of the expression
208 at compile time, even if we do not know the exact
209 values of the operands. */
210 if (slot && *slot && integer_onep ((*slot)->lhs ()))
212 switch (code)
214 case BIT_IOR_EXPR:
215 case BIT_AND_EXPR:
216 return gimple_assign_rhs1 (stmt);
218 case MINUS_EXPR:
219 /* This is unsafe for certain floats even in non-IEEE
220 formats. In IEEE, it is unsafe because it does
221 wrong for NaNs. */
222 if (FLOAT_TYPE_P (result_type)
223 && HONOR_NANS (result_type))
224 break;
225 /* FALLTHRU */
226 case BIT_XOR_EXPR:
227 case TRUNC_MOD_EXPR:
228 case CEIL_MOD_EXPR:
229 case FLOOR_MOD_EXPR:
230 case ROUND_MOD_EXPR:
231 return build_zero_cst (result_type);
233 case TRUNC_DIV_EXPR:
234 case CEIL_DIV_EXPR:
235 case FLOOR_DIV_EXPR:
236 case ROUND_DIV_EXPR:
237 case EXACT_DIV_EXPR:
238 /* Avoid _Fract types where we can't build 1. */
239 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
240 break;
241 return build_one_cst (result_type);
243 default:
244 gcc_unreachable ();
247 break;
250 default:
251 break;
255 return NULL_TREE;
258 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
259 If found, return its LHS. Otherwise insert STMT in the table and
260 return NULL_TREE.
262 Also, when an expression is first inserted in the table, it is also
263 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
264 we finish processing this block and its children. */
266 tree
267 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p,
268 expr_hash_elt **elt)
270 expr_hash_elt **slot;
271 tree lhs;
273 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
274 if (gimple_code (stmt) == GIMPLE_PHI)
275 lhs = gimple_phi_result (stmt);
276 else
277 lhs = gimple_get_lhs (stmt);
279 class expr_hash_elt element (stmt, lhs);
281 if (dump_file && (dump_flags & TDF_DETAILS))
283 fprintf (dump_file, "LKUP ");
284 element.print (dump_file);
287 /* Don't bother remembering constant assignments and copy operations.
288 Constants and copy operations are handled by the constant/copy propagator
289 in optimize_stmt. */
290 if (element.expr()->kind == EXPR_SINGLE
291 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
292 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
293 return NULL_TREE;
295 /* Finally try to find the expression in the main expression hash table. */
296 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
297 if (slot == NULL)
299 return NULL_TREE;
301 else if (*slot == NULL)
303 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
304 We must initialize *SLOT to a real entry, even if we found a
305 way to prove ELEMENT was a constant after not finding ELEMENT
306 in the hash table.
308 An uninitialized or empty slot is an indication no prior objects
309 entered into the hash table had a hash collection with ELEMENT.
311 If we fail to do so and had such entries in the table, they
312 would become unreachable. */
313 class expr_hash_elt *element2 = new expr_hash_elt (element);
314 *slot = element2;
316 /* If we did not find the expression in the hash table, we may still
317 be able to produce a result for some expressions. */
318 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
319 element);
321 record_expr (element2, NULL, '2');
322 return retval;
325 /* If we found a redundant memory operation do an alias walk to
326 check if we can re-use it. */
327 if (gimple_vuse (stmt) != (*slot)->vop ())
329 tree vuse1 = (*slot)->vop ();
330 tree vuse2 = gimple_vuse (stmt);
331 /* If we have a load of a register and a candidate in the
332 hash with vuse1 then try to reach its stmt by walking
333 up the virtual use-def chain using walk_non_aliased_vuses.
334 But don't do this when removing expressions from the hash. */
335 ao_ref ref;
336 unsigned limit = param_sccvn_max_alias_queries_per_access;
337 if (!(vuse1 && vuse2
338 && gimple_assign_single_p (stmt)
339 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
340 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
341 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
342 && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
343 limit, vuse1) != NULL))
345 if (insert)
347 class expr_hash_elt *element2 = new expr_hash_elt (element);
349 /* Insert the expr into the hash by replacing the current
350 entry and recording the value to restore in the
351 avail_exprs_stack. */
352 record_expr (element2, *slot, '2');
353 *slot = element2;
355 return NULL_TREE;
359 /* Extract the LHS of the assignment so that it can be used as the current
360 definition of another variable. */
361 lhs = (*slot)->lhs ();
362 if (elt)
363 *elt = *slot;
365 /* Valueize the result. */
366 if (TREE_CODE (lhs) == SSA_NAME)
368 tree tem = SSA_NAME_VALUE (lhs);
369 if (tem)
370 lhs = tem;
373 if (dump_file && (dump_flags & TDF_DETAILS))
375 fprintf (dump_file, "FIND: ");
376 print_generic_expr (dump_file, lhs);
377 fprintf (dump_file, "\n");
380 return lhs;
383 /* Enter condition equivalence P into the hash table.
385 This indicates that a conditional expression has a known
386 boolean value. */
388 void
389 avail_exprs_stack::record_cond (cond_equivalence *p)
391 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
392 expr_hash_elt **slot;
394 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
395 if (*slot == NULL)
397 *slot = element;
398 record_expr (element, NULL, '1');
400 else
401 delete element;
404 /* Generate a hash value for a pair of expressions. This can be used
405 iteratively by passing a previous result in HSTATE.
407 The same hash value is always returned for a given pair of expressions,
408 regardless of the order in which they are presented. This is useful in
409 hashing the operands of commutative functions. */
411 namespace inchash
414 static void
415 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
417 hash one, two;
419 inchash::add_expr (t1, one);
420 inchash::add_expr (t2, two);
421 hstate.add_commutative (one, two);
424 /* Compute a hash value for a hashable_expr value EXPR and a
425 previously accumulated hash value VAL. If two hashable_expr
426 values compare equal with hashable_expr_equal_p, they must
427 hash to the same value, given an identical value of VAL.
428 The logic is intended to follow inchash::add_expr in tree.cc. */
430 static void
431 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
433 switch (expr->kind)
435 case EXPR_SINGLE:
436 inchash::add_expr (expr->ops.single.rhs, hstate);
437 break;
439 case EXPR_UNARY:
440 hstate.add_object (expr->ops.unary.op);
442 /* Make sure to include signedness in the hash computation.
443 Don't hash the type, that can lead to having nodes which
444 compare equal according to operand_equal_p, but which
445 have different hash codes. */
446 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
447 || expr->ops.unary.op == NON_LVALUE_EXPR)
448 hstate.add_int (TYPE_UNSIGNED (expr->type));
450 inchash::add_expr (expr->ops.unary.opnd, hstate);
451 break;
453 case EXPR_BINARY:
454 hstate.add_object (expr->ops.binary.op);
455 if (commutative_tree_code (expr->ops.binary.op))
456 inchash::add_expr_commutative (expr->ops.binary.opnd0,
457 expr->ops.binary.opnd1, hstate);
458 else
460 inchash::add_expr (expr->ops.binary.opnd0, hstate);
461 inchash::add_expr (expr->ops.binary.opnd1, hstate);
463 break;
465 case EXPR_TERNARY:
466 hstate.add_object (expr->ops.ternary.op);
467 if (commutative_ternary_tree_code (expr->ops.ternary.op))
468 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
469 expr->ops.ternary.opnd1, hstate);
470 else
472 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
473 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
475 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
476 break;
478 case EXPR_CALL:
480 size_t i;
481 enum tree_code code = CALL_EXPR;
482 gcall *fn_from;
484 hstate.add_object (code);
485 fn_from = expr->ops.call.fn_from;
486 if (gimple_call_internal_p (fn_from))
487 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
488 else
489 inchash::add_expr (gimple_call_fn (fn_from), hstate);
490 for (i = 0; i < expr->ops.call.nargs; i++)
491 inchash::add_expr (expr->ops.call.args[i], hstate);
493 break;
495 case EXPR_PHI:
497 size_t i;
499 for (i = 0; i < expr->ops.phi.nargs; i++)
500 inchash::add_expr (expr->ops.phi.args[i], hstate);
502 break;
504 default:
505 gcc_unreachable ();
511 /* Hashing and equality functions. We compute a value number for expressions
512 using the code of the expression and the SSA numbers of its operands. */
514 static hashval_t
515 avail_expr_hash (class expr_hash_elt *p)
517 const struct hashable_expr *expr = p->expr ();
518 inchash::hash hstate;
520 if (expr->kind == EXPR_SINGLE)
522 /* T could potentially be a switch index or a goto dest. */
523 tree t = expr->ops.single.rhs;
524 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
526 /* Make equivalent statements of both these kinds hash together.
527 Dealing with both MEM_REF and ARRAY_REF allows us not to care
528 about equivalence with other statements not considered here. */
529 bool reverse;
530 poly_int64 offset, size, max_size;
531 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
532 &reverse);
533 /* Strictly, we could try to normalize variable-sized accesses too,
534 but here we just deal with the common case. */
535 if (known_size_p (max_size)
536 && known_eq (size, max_size))
538 enum tree_code code = MEM_REF;
539 hstate.add_object (code);
540 inchash::add_expr (base, hstate,
541 TREE_CODE (base) == MEM_REF
542 ? OEP_ADDRESS_OF : 0);
543 hstate.add_object (offset);
544 hstate.add_object (size);
545 return hstate.end ();
550 inchash::add_hashable_expr (expr, hstate);
552 return hstate.end ();
555 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
556 to each other. (That is, they return the value of the same bit of memory.)
558 Return TRUE if the two are so equivalent; FALSE if not (which could still
559 mean the two are equivalent by other means). */
561 static bool
562 equal_mem_array_ref_p (tree t0, tree t1)
564 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
565 return false;
566 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
567 return false;
569 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
570 return false;
571 bool rev0;
572 poly_int64 off0, sz0, max0;
573 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
574 if (!known_size_p (max0)
575 || maybe_ne (sz0, max0))
576 return false;
578 bool rev1;
579 poly_int64 off1, sz1, max1;
580 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
581 if (!known_size_p (max1)
582 || maybe_ne (sz1, max1))
583 return false;
585 if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
586 return false;
588 return operand_equal_p (base0, base1,
589 (TREE_CODE (base0) == MEM_REF
590 || TREE_CODE (base0) == TARGET_MEM_REF)
591 && (TREE_CODE (base1) == MEM_REF
592 || TREE_CODE (base1) == TARGET_MEM_REF)
593 ? OEP_ADDRESS_OF : 0);
596 /* Compare two hashable_expr structures for equivalence. They are
597 considered equivalent when the expressions they denote must
598 necessarily be equal. The logic is intended to follow that of
599 operand_equal_p in fold-const.cc */
601 static bool
602 hashable_expr_equal_p (const struct hashable_expr *expr0,
603 const struct hashable_expr *expr1)
605 tree type0 = expr0->type;
606 tree type1 = expr1->type;
608 /* If either type is NULL, there is nothing to check. */
609 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
610 return false;
612 /* If both types don't have the same signedness, precision, and mode,
613 then we can't consider them equal. */
614 if (type0 != type1
615 && (TREE_CODE (type0) == ERROR_MARK
616 || TREE_CODE (type1) == ERROR_MARK
617 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
618 || element_precision (type0) != element_precision (type1)
619 || TYPE_MODE (type0) != TYPE_MODE (type1)))
620 return false;
622 if (expr0->kind != expr1->kind)
623 return false;
625 switch (expr0->kind)
627 case EXPR_SINGLE:
628 return equal_mem_array_ref_p (expr0->ops.single.rhs,
629 expr1->ops.single.rhs)
630 || operand_equal_p (expr0->ops.single.rhs,
631 expr1->ops.single.rhs, 0);
632 case EXPR_UNARY:
633 if (expr0->ops.unary.op != expr1->ops.unary.op)
634 return false;
636 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
637 || expr0->ops.unary.op == NON_LVALUE_EXPR)
638 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
639 return false;
641 return operand_equal_p (expr0->ops.unary.opnd,
642 expr1->ops.unary.opnd, 0);
644 case EXPR_BINARY:
645 if (expr0->ops.binary.op != expr1->ops.binary.op)
646 return false;
648 if (operand_equal_p (expr0->ops.binary.opnd0,
649 expr1->ops.binary.opnd0, 0)
650 && operand_equal_p (expr0->ops.binary.opnd1,
651 expr1->ops.binary.opnd1, 0))
652 return true;
654 /* For commutative ops, allow the other order. */
655 return (commutative_tree_code (expr0->ops.binary.op)
656 && operand_equal_p (expr0->ops.binary.opnd0,
657 expr1->ops.binary.opnd1, 0)
658 && operand_equal_p (expr0->ops.binary.opnd1,
659 expr1->ops.binary.opnd0, 0));
661 case EXPR_TERNARY:
662 if (expr0->ops.ternary.op != expr1->ops.ternary.op
663 || !operand_equal_p (expr0->ops.ternary.opnd2,
664 expr1->ops.ternary.opnd2, 0))
665 return false;
667 /* BIT_INSERT_EXPR has an implict operand as the type precision
668 of op1. Need to check to make sure they are the same. */
669 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
670 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
671 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
672 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
673 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
674 return false;
676 if (operand_equal_p (expr0->ops.ternary.opnd0,
677 expr1->ops.ternary.opnd0, 0)
678 && operand_equal_p (expr0->ops.ternary.opnd1,
679 expr1->ops.ternary.opnd1, 0))
680 return true;
682 /* For commutative ops, allow the other order. */
683 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
684 && operand_equal_p (expr0->ops.ternary.opnd0,
685 expr1->ops.ternary.opnd1, 0)
686 && operand_equal_p (expr0->ops.ternary.opnd1,
687 expr1->ops.ternary.opnd0, 0));
689 case EXPR_CALL:
691 size_t i;
693 /* If the calls are to different functions, then they
694 clearly cannot be equal. */
695 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
696 expr1->ops.call.fn_from))
697 return false;
699 if (! expr0->ops.call.pure)
700 return false;
702 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
703 return false;
705 for (i = 0; i < expr0->ops.call.nargs; i++)
706 if (! operand_equal_p (expr0->ops.call.args[i],
707 expr1->ops.call.args[i], 0))
708 return false;
710 if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
712 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
713 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
714 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
715 return false;
718 return true;
721 case EXPR_PHI:
723 size_t i;
725 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
726 return false;
728 for (i = 0; i < expr0->ops.phi.nargs; i++)
729 if (! operand_equal_p (expr0->ops.phi.args[i],
730 expr1->ops.phi.args[i], 0))
731 return false;
733 return true;
736 default:
737 gcc_unreachable ();
741 /* Given a statement STMT, construct a hash table element. */
743 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
745 enum gimple_code code = gimple_code (stmt);
746 struct hashable_expr *expr = this->expr ();
748 if (code == GIMPLE_ASSIGN)
750 enum tree_code subcode = gimple_assign_rhs_code (stmt);
752 switch (get_gimple_rhs_class (subcode))
754 case GIMPLE_SINGLE_RHS:
755 expr->kind = EXPR_SINGLE;
756 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
757 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
758 break;
759 case GIMPLE_UNARY_RHS:
760 expr->kind = EXPR_UNARY;
761 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
762 if (CONVERT_EXPR_CODE_P (subcode))
763 subcode = NOP_EXPR;
764 expr->ops.unary.op = subcode;
765 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
766 break;
767 case GIMPLE_BINARY_RHS:
768 expr->kind = EXPR_BINARY;
769 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
770 expr->ops.binary.op = subcode;
771 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
772 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
773 break;
774 case GIMPLE_TERNARY_RHS:
775 expr->kind = EXPR_TERNARY;
776 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
777 expr->ops.ternary.op = subcode;
778 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
779 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
780 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
781 break;
782 default:
783 gcc_unreachable ();
786 else if (code == GIMPLE_COND)
788 expr->type = boolean_type_node;
789 expr->kind = EXPR_BINARY;
790 expr->ops.binary.op = gimple_cond_code (stmt);
791 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
792 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
794 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
796 size_t nargs = gimple_call_num_args (call_stmt);
797 size_t i;
799 gcc_assert (gimple_call_lhs (call_stmt));
801 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
802 expr->kind = EXPR_CALL;
803 expr->ops.call.fn_from = call_stmt;
805 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
806 expr->ops.call.pure = true;
807 else
808 expr->ops.call.pure = false;
810 expr->ops.call.nargs = nargs;
811 expr->ops.call.args = XCNEWVEC (tree, nargs);
812 for (i = 0; i < nargs; i++)
813 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
815 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
817 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
818 expr->kind = EXPR_SINGLE;
819 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
821 else if (code == GIMPLE_GOTO)
823 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
824 expr->kind = EXPR_SINGLE;
825 expr->ops.single.rhs = gimple_goto_dest (stmt);
827 else if (code == GIMPLE_PHI)
829 size_t nargs = gimple_phi_num_args (stmt);
830 size_t i;
832 expr->type = TREE_TYPE (gimple_phi_result (stmt));
833 expr->kind = EXPR_PHI;
834 expr->ops.phi.nargs = nargs;
835 expr->ops.phi.args = XCNEWVEC (tree, nargs);
836 for (i = 0; i < nargs; i++)
837 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
839 else
840 gcc_unreachable ();
842 m_lhs = orig_lhs;
843 m_vop = gimple_vuse (stmt);
844 m_hash = avail_expr_hash (this);
845 m_stamp = this;
848 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
849 construct a hash table element. */
851 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
853 m_expr = *orig;
854 m_lhs = orig_lhs;
855 m_vop = NULL_TREE;
856 m_hash = avail_expr_hash (this);
857 m_stamp = this;
860 /* Copy constructor for a hash table element. */
862 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
864 m_expr = old_elt.m_expr;
865 m_lhs = old_elt.m_lhs;
866 m_vop = old_elt.m_vop;
867 m_hash = old_elt.m_hash;
868 m_stamp = this;
870 /* Now deep copy the malloc'd space for CALL and PHI args. */
871 if (old_elt.m_expr.kind == EXPR_CALL)
873 size_t nargs = old_elt.m_expr.ops.call.nargs;
874 size_t i;
876 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
877 for (i = 0; i < nargs; i++)
878 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
880 else if (old_elt.m_expr.kind == EXPR_PHI)
882 size_t nargs = old_elt.m_expr.ops.phi.nargs;
883 size_t i;
885 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
886 for (i = 0; i < nargs; i++)
887 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
891 /* Calls and PHIs have a variable number of arguments that are allocated
892 on the heap. Thus we have to have a special dtor to release them. */
894 expr_hash_elt::~expr_hash_elt ()
896 if (m_expr.kind == EXPR_CALL)
897 free (m_expr.ops.call.args);
898 else if (m_expr.kind == EXPR_PHI)
899 free (m_expr.ops.phi.args);
902 /* Print a diagnostic dump of an expression hash table entry. */
904 void
905 expr_hash_elt::print (FILE *stream)
907 fprintf (stream, "STMT ");
909 if (m_lhs)
911 print_generic_expr (stream, m_lhs);
912 fprintf (stream, " = ");
915 switch (m_expr.kind)
917 case EXPR_SINGLE:
918 print_generic_expr (stream, m_expr.ops.single.rhs);
919 break;
921 case EXPR_UNARY:
922 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
923 print_generic_expr (stream, m_expr.ops.unary.opnd);
924 break;
926 case EXPR_BINARY:
927 print_generic_expr (stream, m_expr.ops.binary.opnd0);
928 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
929 print_generic_expr (stream, m_expr.ops.binary.opnd1);
930 break;
932 case EXPR_TERNARY:
933 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
934 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
935 fputs (", ", stream);
936 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
937 fputs (", ", stream);
938 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
939 fputs (">", stream);
940 break;
942 case EXPR_CALL:
944 size_t i;
945 size_t nargs = m_expr.ops.call.nargs;
946 gcall *fn_from;
948 fn_from = m_expr.ops.call.fn_from;
949 if (gimple_call_internal_p (fn_from))
950 fprintf (stream, ".%s",
951 internal_fn_name (gimple_call_internal_fn (fn_from)));
952 else
953 print_generic_expr (stream, gimple_call_fn (fn_from));
954 fprintf (stream, " (");
955 for (i = 0; i < nargs; i++)
957 print_generic_expr (stream, m_expr.ops.call.args[i]);
958 if (i + 1 < nargs)
959 fprintf (stream, ", ");
961 fprintf (stream, ")");
963 break;
965 case EXPR_PHI:
967 size_t i;
968 size_t nargs = m_expr.ops.phi.nargs;
970 fprintf (stream, "PHI <");
971 for (i = 0; i < nargs; i++)
973 print_generic_expr (stream, m_expr.ops.phi.args[i]);
974 if (i + 1 < nargs)
975 fprintf (stream, ", ");
977 fprintf (stream, ">");
979 break;
982 if (m_vop)
984 fprintf (stream, " with ");
985 print_generic_expr (stream, m_vop);
988 fprintf (stream, "\n");
991 /* Pop entries off the stack until we hit the NULL marker.
992 For each entry popped, use the SRC/DEST pair to restore
993 SRC to its prior value. */
995 void
996 const_and_copies::pop_to_marker (void)
998 while (m_stack.length () > 0)
1000 tree prev_value, dest;
1002 dest = m_stack.pop ();
1004 /* A NULL value indicates we should stop unwinding, otherwise
1005 pop off the next entry as they're recorded in pairs. */
1006 if (dest == NULL)
1007 break;
1009 if (dump_file && (dump_flags & TDF_DETAILS))
1011 fprintf (dump_file, "<<<< COPY ");
1012 print_generic_expr (dump_file, dest);
1013 fprintf (dump_file, " = ");
1014 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
1015 fprintf (dump_file, "\n");
1018 prev_value = m_stack.pop ();
1019 set_ssa_name_value (dest, prev_value);
1023 /* Record that X has the value Y and that X's previous value is PREV_X.
1025 This variant does not follow the value chain for Y. */
1027 void
1028 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
1030 if (dump_file && (dump_flags & TDF_DETAILS))
1032 fprintf (dump_file, "0>>> COPY ");
1033 print_generic_expr (dump_file, x);
1034 fprintf (dump_file, " = ");
1035 print_generic_expr (dump_file, y);
1036 fprintf (dump_file, "\n");
1039 set_ssa_name_value (x, y);
1040 m_stack.reserve (2);
1041 m_stack.quick_push (prev_x);
1042 m_stack.quick_push (x);
1045 /* Record that X has the value Y. */
1047 void
1048 const_and_copies::record_const_or_copy (tree x, tree y)
1050 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1053 /* Record that X has the value Y and that X's previous value is PREV_X.
1055 This variant follow's Y value chain. */
1057 void
1058 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1060 /* Y may be NULL if we are invalidating entries in the table. */
1061 if (y && TREE_CODE (y) == SSA_NAME)
1063 tree tmp = SSA_NAME_VALUE (y);
1064 y = tmp ? tmp : y;
1067 record_const_or_copy_raw (x, y, prev_x);
1070 bool
1071 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1073 const struct hashable_expr *expr1 = p1->expr ();
1074 const class expr_hash_elt *stamp1 = p1->stamp ();
1075 const struct hashable_expr *expr2 = p2->expr ();
1076 const class expr_hash_elt *stamp2 = p2->stamp ();
1078 /* This case should apply only when removing entries from the table. */
1079 if (stamp1 == stamp2)
1080 return true;
1082 if (p1->hash () != p2->hash ())
1083 return false;
1085 /* In case of a collision, both RHS have to be identical and have the
1086 same VUSE operands. */
1087 if (hashable_expr_equal_p (expr1, expr2)
1088 && types_compatible_p (expr1->type, expr2->type))
1089 return true;
1091 return false;
1094 /* Given a conditional expression COND as a tree, initialize
1095 a hashable_expr expression EXPR. The conditional must be a
1096 comparison or logical negation. A constant or a variable is
1097 not permitted. */
1099 void
1100 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1102 expr->type = boolean_type_node;
1104 if (COMPARISON_CLASS_P (cond))
1106 expr->kind = EXPR_BINARY;
1107 expr->ops.binary.op = TREE_CODE (cond);
1108 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1109 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1111 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1113 expr->kind = EXPR_UNARY;
1114 expr->ops.unary.op = TRUTH_NOT_EXPR;
1115 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1117 else
1118 gcc_unreachable ();
1121 /* Build a cond_equivalence record indicating that the comparison
1122 CODE holds between operands OP0 and OP1 and push it to **P. */
1124 static void
1125 build_and_record_new_cond (enum tree_code code,
1126 tree op0, tree op1,
1127 vec<cond_equivalence> *p,
1128 bool val = true)
1130 cond_equivalence c;
1131 struct hashable_expr *cond = &c.cond;
1133 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1135 cond->type = boolean_type_node;
1136 cond->kind = EXPR_BINARY;
1137 cond->ops.binary.op = code;
1138 cond->ops.binary.opnd0 = op0;
1139 cond->ops.binary.opnd1 = op1;
1141 c.value = val ? boolean_true_node : boolean_false_node;
1142 p->safe_push (c);
1145 /* Record that COND is true and INVERTED is false into the edge information
1146 structure. Also record that any conditions dominated by COND are true
1147 as well.
1149 For example, if a < b is true, then a <= b must also be true. */
1151 void
1152 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1154 tree op0, op1;
1155 cond_equivalence c;
1157 if (!COMPARISON_CLASS_P (cond))
1158 return;
1160 op0 = TREE_OPERAND (cond, 0);
1161 op1 = TREE_OPERAND (cond, 1);
1163 switch (TREE_CODE (cond))
1165 case LT_EXPR:
1166 case GT_EXPR:
1167 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1169 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1170 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1173 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1174 ? LE_EXPR : GE_EXPR),
1175 op0, op1, p);
1176 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1177 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1178 break;
1180 case GE_EXPR:
1181 case LE_EXPR:
1182 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1184 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1186 break;
1188 case EQ_EXPR:
1189 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1191 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1193 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1194 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1195 break;
1197 case UNORDERED_EXPR:
1198 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1199 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1200 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1201 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1202 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1203 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1204 break;
1206 case UNLT_EXPR:
1207 case UNGT_EXPR:
1208 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1209 ? UNLE_EXPR : UNGE_EXPR),
1210 op0, op1, p);
1211 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1212 break;
1214 case UNEQ_EXPR:
1215 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1216 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1217 break;
1219 case LTGT_EXPR:
1220 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1221 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1222 break;
1224 default:
1225 break;
1228 /* Now store the original true and false conditions into the first
1229 two slots. */
1230 initialize_expr_from_cond (cond, &c.cond);
1231 c.value = boolean_true_node;
1232 p->safe_push (c);
1234 /* It is possible for INVERTED to be the negation of a comparison,
1235 and not a valid RHS or GIMPLE_COND condition. This happens because
1236 invert_truthvalue may return such an expression when asked to invert
1237 a floating-point comparison. These comparisons are not assumed to
1238 obey the trichotomy law. */
1239 initialize_expr_from_cond (inverted, &c.cond);
1240 c.value = boolean_false_node;
1241 p->safe_push (c);