RISC-V: Fix rtl checking enabled failure with -msave-restore.
[official-gcc.git] / gcc / tree-ssa-scopedtables.c
blobb680db412418c023032971cf5e675cbbfd0f9cf8
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
38 static bool hashable_expr_equal_p (const struct hashable_expr *,
39 const struct hashable_expr *);
41 /* Initialize local stacks for this optimizer and record equivalences
42 upon entry to BB. Equivalences can come from the edge traversed to
43 reach BB or they may come from PHI nodes at the start of BB. */
45 /* Pop items off the unwinding stack, removing each from the hash table
46 until a marker is encountered. */
48 void
49 avail_exprs_stack::pop_to_marker ()
51 /* Remove all the expressions made available in this block. */
52 while (m_stack.length () > 0)
54 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
55 expr_hash_elt **slot;
57 if (victim.first == NULL)
58 break;
60 /* This must precede the actual removal from the hash table,
61 as ELEMENT and the table entry may share a call argument
62 vector which will be freed during removal. */
63 if (dump_file && (dump_flags & TDF_DETAILS))
65 fprintf (dump_file, "<<<< ");
66 victim.first->print (dump_file);
69 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
70 gcc_assert (slot && *slot == victim.first);
71 if (victim.second != NULL)
73 delete *slot;
74 *slot = victim.second;
76 else
77 m_avail_exprs->clear_slot (slot);
81 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
82 from the hash table. */
84 void
85 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
86 class expr_hash_elt *elt2,
87 char type)
89 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
91 fprintf (dump_file, "%c>>> ", type);
92 elt1->print (dump_file);
95 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
98 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
99 the desired memory state. */
101 static void *
102 vuse_eq (ao_ref *, tree vuse1, void *data)
104 tree vuse2 = (tree) data;
105 if (vuse1 == vuse2)
106 return data;
108 return NULL;
111 /* We looked for STMT in the hash table, but did not find it.
113 If STMT is an assignment from a binary operator, we may know something
114 about the operands relationship to each other which would allow
115 us to derive a constant value for the RHS of STMT. */
117 tree
118 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
119 class expr_hash_elt element)
121 if (is_gimple_assign (stmt))
123 struct hashable_expr *expr = element.expr ();
124 if (expr->kind == EXPR_BINARY)
126 enum tree_code code = expr->ops.binary.op;
128 switch (code)
130 /* For these cases, if we know the operands
131 are equal, then we know the result. */
132 case MIN_EXPR:
133 case MAX_EXPR:
134 case BIT_IOR_EXPR:
135 case BIT_AND_EXPR:
136 case BIT_XOR_EXPR:
137 case MINUS_EXPR:
138 case TRUNC_DIV_EXPR:
139 case CEIL_DIV_EXPR:
140 case FLOOR_DIV_EXPR:
141 case ROUND_DIV_EXPR:
142 case EXACT_DIV_EXPR:
143 case TRUNC_MOD_EXPR:
144 case CEIL_MOD_EXPR:
145 case FLOOR_MOD_EXPR:
146 case ROUND_MOD_EXPR:
148 /* Build a simple equality expr and query the hash table
149 for it. */
150 struct hashable_expr expr;
151 expr.type = boolean_type_node;
152 expr.kind = EXPR_BINARY;
153 expr.ops.binary.op = EQ_EXPR;
154 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
155 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
156 class expr_hash_elt element2 (&expr, NULL_TREE);
157 expr_hash_elt **slot
158 = m_avail_exprs->find_slot (&element2, NO_INSERT);
159 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
161 /* If the query was successful and returned a nonzero
162 result, then we know that the operands of the binary
163 expression are the same. In many cases this allows
164 us to compute a constant result of the expression
165 at compile time, even if we do not know the exact
166 values of the operands. */
167 if (slot && *slot && integer_onep ((*slot)->lhs ()))
169 switch (code)
171 case MIN_EXPR:
172 case MAX_EXPR:
173 case BIT_IOR_EXPR:
174 case BIT_AND_EXPR:
175 return gimple_assign_rhs1 (stmt);
177 case MINUS_EXPR:
178 /* This is unsafe for certain floats even in non-IEEE
179 formats. In IEEE, it is unsafe because it does
180 wrong for NaNs. */
181 if (FLOAT_TYPE_P (result_type)
182 && HONOR_NANS (result_type))
183 break;
184 /* FALLTHRU */
185 case BIT_XOR_EXPR:
186 case TRUNC_MOD_EXPR:
187 case CEIL_MOD_EXPR:
188 case FLOOR_MOD_EXPR:
189 case ROUND_MOD_EXPR:
190 return build_zero_cst (result_type);
192 case TRUNC_DIV_EXPR:
193 case CEIL_DIV_EXPR:
194 case FLOOR_DIV_EXPR:
195 case ROUND_DIV_EXPR:
196 case EXACT_DIV_EXPR:
197 /* Avoid _Fract types where we can't build 1. */
198 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
199 break;
200 return build_one_cst (result_type);
202 default:
203 gcc_unreachable ();
206 break;
209 default:
210 break;
214 return NULL_TREE;
217 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
218 If found, return its LHS. Otherwise insert STMT in the table and
219 return NULL_TREE.
221 Also, when an expression is first inserted in the table, it is also
222 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
223 we finish processing this block and its children. */
225 tree
226 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
228 expr_hash_elt **slot;
229 tree lhs;
231 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
232 if (gimple_code (stmt) == GIMPLE_PHI)
233 lhs = gimple_phi_result (stmt);
234 else
235 lhs = gimple_get_lhs (stmt);
237 class expr_hash_elt element (stmt, lhs);
239 if (dump_file && (dump_flags & TDF_DETAILS))
241 fprintf (dump_file, "LKUP ");
242 element.print (dump_file);
245 /* Don't bother remembering constant assignments and copy operations.
246 Constants and copy operations are handled by the constant/copy propagator
247 in optimize_stmt. */
248 if (element.expr()->kind == EXPR_SINGLE
249 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
250 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
251 return NULL_TREE;
253 /* Finally try to find the expression in the main expression hash table. */
254 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
255 if (slot == NULL)
257 return NULL_TREE;
259 else if (*slot == NULL)
261 /* If we did not find the expression in the hash table, we may still
262 be able to produce a result for some expressions. */
263 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
264 element);
266 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
267 We must initialize *SLOT to a real entry, even if we found a
268 way to prove ELEMENT was a constant after not finding ELEMENT
269 in the hash table.
271 An uninitialized or empty slot is an indication no prior objects
272 entered into the hash table had a hash collection with ELEMENT.
274 If we fail to do so and had such entries in the table, they
275 would become unreachable. */
276 class expr_hash_elt *element2 = new expr_hash_elt (element);
277 *slot = element2;
279 record_expr (element2, NULL, '2');
280 return retval;
283 /* If we found a redundant memory operation do an alias walk to
284 check if we can re-use it. */
285 if (gimple_vuse (stmt) != (*slot)->vop ())
287 tree vuse1 = (*slot)->vop ();
288 tree vuse2 = gimple_vuse (stmt);
289 /* If we have a load of a register and a candidate in the
290 hash with vuse1 then try to reach its stmt by walking
291 up the virtual use-def chain using walk_non_aliased_vuses.
292 But don't do this when removing expressions from the hash. */
293 ao_ref ref;
294 unsigned limit = param_sccvn_max_alias_queries_per_access;
295 if (!(vuse1 && vuse2
296 && gimple_assign_single_p (stmt)
297 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
298 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
299 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
300 && walk_non_aliased_vuses (&ref, vuse2, true, vuse_eq, NULL, NULL,
301 limit, vuse1) != NULL))
303 if (insert)
305 class expr_hash_elt *element2 = new expr_hash_elt (element);
307 /* Insert the expr into the hash by replacing the current
308 entry and recording the value to restore in the
309 avail_exprs_stack. */
310 record_expr (element2, *slot, '2');
311 *slot = element2;
313 return NULL_TREE;
317 /* Extract the LHS of the assignment so that it can be used as the current
318 definition of another variable. */
319 lhs = (*slot)->lhs ();
321 /* Valueize the result. */
322 if (TREE_CODE (lhs) == SSA_NAME)
324 tree tem = SSA_NAME_VALUE (lhs);
325 if (tem)
326 lhs = tem;
329 if (dump_file && (dump_flags & TDF_DETAILS))
331 fprintf (dump_file, "FIND: ");
332 print_generic_expr (dump_file, lhs);
333 fprintf (dump_file, "\n");
336 return lhs;
339 /* Enter condition equivalence P into the hash table.
341 This indicates that a conditional expression has a known
342 boolean value. */
344 void
345 avail_exprs_stack::record_cond (cond_equivalence *p)
347 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
348 expr_hash_elt **slot;
350 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
351 if (*slot == NULL)
353 *slot = element;
354 record_expr (element, NULL, '1');
356 else
357 delete element;
360 /* Generate a hash value for a pair of expressions. This can be used
361 iteratively by passing a previous result in HSTATE.
363 The same hash value is always returned for a given pair of expressions,
364 regardless of the order in which they are presented. This is useful in
365 hashing the operands of commutative functions. */
367 namespace inchash
370 static void
371 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
373 hash one, two;
375 inchash::add_expr (t1, one);
376 inchash::add_expr (t2, two);
377 hstate.add_commutative (one, two);
380 /* Compute a hash value for a hashable_expr value EXPR and a
381 previously accumulated hash value VAL. If two hashable_expr
382 values compare equal with hashable_expr_equal_p, they must
383 hash to the same value, given an identical value of VAL.
384 The logic is intended to follow inchash::add_expr in tree.c. */
386 static void
387 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
389 switch (expr->kind)
391 case EXPR_SINGLE:
392 inchash::add_expr (expr->ops.single.rhs, hstate);
393 break;
395 case EXPR_UNARY:
396 hstate.add_object (expr->ops.unary.op);
398 /* Make sure to include signedness in the hash computation.
399 Don't hash the type, that can lead to having nodes which
400 compare equal according to operand_equal_p, but which
401 have different hash codes. */
402 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
403 || expr->ops.unary.op == NON_LVALUE_EXPR)
404 hstate.add_int (TYPE_UNSIGNED (expr->type));
406 inchash::add_expr (expr->ops.unary.opnd, hstate);
407 break;
409 case EXPR_BINARY:
410 hstate.add_object (expr->ops.binary.op);
411 if (commutative_tree_code (expr->ops.binary.op))
412 inchash::add_expr_commutative (expr->ops.binary.opnd0,
413 expr->ops.binary.opnd1, hstate);
414 else
416 inchash::add_expr (expr->ops.binary.opnd0, hstate);
417 inchash::add_expr (expr->ops.binary.opnd1, hstate);
419 break;
421 case EXPR_TERNARY:
422 hstate.add_object (expr->ops.ternary.op);
423 if (commutative_ternary_tree_code (expr->ops.ternary.op))
424 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
425 expr->ops.ternary.opnd1, hstate);
426 else
428 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
429 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
431 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
432 break;
434 case EXPR_CALL:
436 size_t i;
437 enum tree_code code = CALL_EXPR;
438 gcall *fn_from;
440 hstate.add_object (code);
441 fn_from = expr->ops.call.fn_from;
442 if (gimple_call_internal_p (fn_from))
443 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
444 else
445 inchash::add_expr (gimple_call_fn (fn_from), hstate);
446 for (i = 0; i < expr->ops.call.nargs; i++)
447 inchash::add_expr (expr->ops.call.args[i], hstate);
449 break;
451 case EXPR_PHI:
453 size_t i;
455 for (i = 0; i < expr->ops.phi.nargs; i++)
456 inchash::add_expr (expr->ops.phi.args[i], hstate);
458 break;
460 default:
461 gcc_unreachable ();
467 /* Hashing and equality functions. We compute a value number for expressions
468 using the code of the expression and the SSA numbers of its operands. */
470 static hashval_t
471 avail_expr_hash (class expr_hash_elt *p)
473 const struct hashable_expr *expr = p->expr ();
474 inchash::hash hstate;
476 if (expr->kind == EXPR_SINGLE)
478 /* T could potentially be a switch index or a goto dest. */
479 tree t = expr->ops.single.rhs;
480 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
482 /* Make equivalent statements of both these kinds hash together.
483 Dealing with both MEM_REF and ARRAY_REF allows us not to care
484 about equivalence with other statements not considered here. */
485 bool reverse;
486 poly_int64 offset, size, max_size;
487 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
488 &reverse);
489 /* Strictly, we could try to normalize variable-sized accesses too,
490 but here we just deal with the common case. */
491 if (known_size_p (max_size)
492 && known_eq (size, max_size))
494 enum tree_code code = MEM_REF;
495 hstate.add_object (code);
496 inchash::add_expr (base, hstate,
497 TREE_CODE (base) == MEM_REF
498 ? OEP_ADDRESS_OF : 0);
499 hstate.add_object (offset);
500 hstate.add_object (size);
501 return hstate.end ();
506 inchash::add_hashable_expr (expr, hstate);
508 return hstate.end ();
511 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
512 to each other. (That is, they return the value of the same bit of memory.)
514 Return TRUE if the two are so equivalent; FALSE if not (which could still
515 mean the two are equivalent by other means). */
517 static bool
518 equal_mem_array_ref_p (tree t0, tree t1)
520 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
521 return false;
522 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
523 return false;
525 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
526 return false;
527 bool rev0;
528 poly_int64 off0, sz0, max0;
529 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
530 if (!known_size_p (max0)
531 || maybe_ne (sz0, max0))
532 return false;
534 bool rev1;
535 poly_int64 off1, sz1, max1;
536 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
537 if (!known_size_p (max1)
538 || maybe_ne (sz1, max1))
539 return false;
541 if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
542 return false;
544 return operand_equal_p (base0, base1,
545 (TREE_CODE (base0) == MEM_REF
546 || TREE_CODE (base0) == TARGET_MEM_REF)
547 && (TREE_CODE (base1) == MEM_REF
548 || TREE_CODE (base1) == TARGET_MEM_REF)
549 ? OEP_ADDRESS_OF : 0);
552 /* Compare two hashable_expr structures for equivalence. They are
553 considered equivalent when the expressions they denote must
554 necessarily be equal. The logic is intended to follow that of
555 operand_equal_p in fold-const.c */
557 static bool
558 hashable_expr_equal_p (const struct hashable_expr *expr0,
559 const struct hashable_expr *expr1)
561 tree type0 = expr0->type;
562 tree type1 = expr1->type;
564 /* If either type is NULL, there is nothing to check. */
565 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
566 return false;
568 /* If both types don't have the same signedness, precision, and mode,
569 then we can't consider them equal. */
570 if (type0 != type1
571 && (TREE_CODE (type0) == ERROR_MARK
572 || TREE_CODE (type1) == ERROR_MARK
573 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
574 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
575 || TYPE_MODE (type0) != TYPE_MODE (type1)))
576 return false;
578 if (expr0->kind != expr1->kind)
579 return false;
581 switch (expr0->kind)
583 case EXPR_SINGLE:
584 return equal_mem_array_ref_p (expr0->ops.single.rhs,
585 expr1->ops.single.rhs)
586 || operand_equal_p (expr0->ops.single.rhs,
587 expr1->ops.single.rhs, 0);
588 case EXPR_UNARY:
589 if (expr0->ops.unary.op != expr1->ops.unary.op)
590 return false;
592 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
593 || expr0->ops.unary.op == NON_LVALUE_EXPR)
594 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
595 return false;
597 return operand_equal_p (expr0->ops.unary.opnd,
598 expr1->ops.unary.opnd, 0);
600 case EXPR_BINARY:
601 if (expr0->ops.binary.op != expr1->ops.binary.op)
602 return false;
604 if (operand_equal_p (expr0->ops.binary.opnd0,
605 expr1->ops.binary.opnd0, 0)
606 && operand_equal_p (expr0->ops.binary.opnd1,
607 expr1->ops.binary.opnd1, 0))
608 return true;
610 /* For commutative ops, allow the other order. */
611 return (commutative_tree_code (expr0->ops.binary.op)
612 && operand_equal_p (expr0->ops.binary.opnd0,
613 expr1->ops.binary.opnd1, 0)
614 && operand_equal_p (expr0->ops.binary.opnd1,
615 expr1->ops.binary.opnd0, 0));
617 case EXPR_TERNARY:
618 if (expr0->ops.ternary.op != expr1->ops.ternary.op
619 || !operand_equal_p (expr0->ops.ternary.opnd2,
620 expr1->ops.ternary.opnd2, 0))
621 return false;
623 /* BIT_INSERT_EXPR has an implict operand as the type precision
624 of op1. Need to check to make sure they are the same. */
625 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
626 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
627 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
628 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
629 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
630 return false;
632 if (operand_equal_p (expr0->ops.ternary.opnd0,
633 expr1->ops.ternary.opnd0, 0)
634 && operand_equal_p (expr0->ops.ternary.opnd1,
635 expr1->ops.ternary.opnd1, 0))
636 return true;
638 /* For commutative ops, allow the other order. */
639 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
640 && operand_equal_p (expr0->ops.ternary.opnd0,
641 expr1->ops.ternary.opnd1, 0)
642 && operand_equal_p (expr0->ops.ternary.opnd1,
643 expr1->ops.ternary.opnd0, 0));
645 case EXPR_CALL:
647 size_t i;
649 /* If the calls are to different functions, then they
650 clearly cannot be equal. */
651 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
652 expr1->ops.call.fn_from))
653 return false;
655 if (! expr0->ops.call.pure)
656 return false;
658 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
659 return false;
661 for (i = 0; i < expr0->ops.call.nargs; i++)
662 if (! operand_equal_p (expr0->ops.call.args[i],
663 expr1->ops.call.args[i], 0))
664 return false;
666 if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
668 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
669 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
670 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
671 return false;
674 return true;
677 case EXPR_PHI:
679 size_t i;
681 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
682 return false;
684 for (i = 0; i < expr0->ops.phi.nargs; i++)
685 if (! operand_equal_p (expr0->ops.phi.args[i],
686 expr1->ops.phi.args[i], 0))
687 return false;
689 return true;
692 default:
693 gcc_unreachable ();
697 /* Given a statement STMT, construct a hash table element. */
699 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
701 enum gimple_code code = gimple_code (stmt);
702 struct hashable_expr *expr = this->expr ();
704 if (code == GIMPLE_ASSIGN)
706 enum tree_code subcode = gimple_assign_rhs_code (stmt);
708 switch (get_gimple_rhs_class (subcode))
710 case GIMPLE_SINGLE_RHS:
711 expr->kind = EXPR_SINGLE;
712 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
713 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
714 break;
715 case GIMPLE_UNARY_RHS:
716 expr->kind = EXPR_UNARY;
717 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
718 if (CONVERT_EXPR_CODE_P (subcode))
719 subcode = NOP_EXPR;
720 expr->ops.unary.op = subcode;
721 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
722 break;
723 case GIMPLE_BINARY_RHS:
724 expr->kind = EXPR_BINARY;
725 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
726 expr->ops.binary.op = subcode;
727 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
728 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
729 break;
730 case GIMPLE_TERNARY_RHS:
731 expr->kind = EXPR_TERNARY;
732 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
733 expr->ops.ternary.op = subcode;
734 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
735 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
736 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
737 break;
738 default:
739 gcc_unreachable ();
742 else if (code == GIMPLE_COND)
744 expr->type = boolean_type_node;
745 expr->kind = EXPR_BINARY;
746 expr->ops.binary.op = gimple_cond_code (stmt);
747 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
748 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
750 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
752 size_t nargs = gimple_call_num_args (call_stmt);
753 size_t i;
755 gcc_assert (gimple_call_lhs (call_stmt));
757 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
758 expr->kind = EXPR_CALL;
759 expr->ops.call.fn_from = call_stmt;
761 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
762 expr->ops.call.pure = true;
763 else
764 expr->ops.call.pure = false;
766 expr->ops.call.nargs = nargs;
767 expr->ops.call.args = XCNEWVEC (tree, nargs);
768 for (i = 0; i < nargs; i++)
769 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
771 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
773 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
774 expr->kind = EXPR_SINGLE;
775 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
777 else if (code == GIMPLE_GOTO)
779 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
780 expr->kind = EXPR_SINGLE;
781 expr->ops.single.rhs = gimple_goto_dest (stmt);
783 else if (code == GIMPLE_PHI)
785 size_t nargs = gimple_phi_num_args (stmt);
786 size_t i;
788 expr->type = TREE_TYPE (gimple_phi_result (stmt));
789 expr->kind = EXPR_PHI;
790 expr->ops.phi.nargs = nargs;
791 expr->ops.phi.args = XCNEWVEC (tree, nargs);
792 for (i = 0; i < nargs; i++)
793 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
795 else
796 gcc_unreachable ();
798 m_lhs = orig_lhs;
799 m_vop = gimple_vuse (stmt);
800 m_hash = avail_expr_hash (this);
801 m_stamp = this;
804 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
805 construct a hash table element. */
807 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
809 m_expr = *orig;
810 m_lhs = orig_lhs;
811 m_vop = NULL_TREE;
812 m_hash = avail_expr_hash (this);
813 m_stamp = this;
816 /* Copy constructor for a hash table element. */
818 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
820 m_expr = old_elt.m_expr;
821 m_lhs = old_elt.m_lhs;
822 m_vop = old_elt.m_vop;
823 m_hash = old_elt.m_hash;
824 m_stamp = this;
826 /* Now deep copy the malloc'd space for CALL and PHI args. */
827 if (old_elt.m_expr.kind == EXPR_CALL)
829 size_t nargs = old_elt.m_expr.ops.call.nargs;
830 size_t i;
832 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
833 for (i = 0; i < nargs; i++)
834 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
836 else if (old_elt.m_expr.kind == EXPR_PHI)
838 size_t nargs = old_elt.m_expr.ops.phi.nargs;
839 size_t i;
841 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
842 for (i = 0; i < nargs; i++)
843 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
847 /* Calls and PHIs have a variable number of arguments that are allocated
848 on the heap. Thus we have to have a special dtor to release them. */
850 expr_hash_elt::~expr_hash_elt ()
852 if (m_expr.kind == EXPR_CALL)
853 free (m_expr.ops.call.args);
854 else if (m_expr.kind == EXPR_PHI)
855 free (m_expr.ops.phi.args);
858 /* Print a diagnostic dump of an expression hash table entry. */
860 void
861 expr_hash_elt::print (FILE *stream)
863 fprintf (stream, "STMT ");
865 if (m_lhs)
867 print_generic_expr (stream, m_lhs);
868 fprintf (stream, " = ");
871 switch (m_expr.kind)
873 case EXPR_SINGLE:
874 print_generic_expr (stream, m_expr.ops.single.rhs);
875 break;
877 case EXPR_UNARY:
878 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
879 print_generic_expr (stream, m_expr.ops.unary.opnd);
880 break;
882 case EXPR_BINARY:
883 print_generic_expr (stream, m_expr.ops.binary.opnd0);
884 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
885 print_generic_expr (stream, m_expr.ops.binary.opnd1);
886 break;
888 case EXPR_TERNARY:
889 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
890 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
891 fputs (", ", stream);
892 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
893 fputs (", ", stream);
894 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
895 fputs (">", stream);
896 break;
898 case EXPR_CALL:
900 size_t i;
901 size_t nargs = m_expr.ops.call.nargs;
902 gcall *fn_from;
904 fn_from = m_expr.ops.call.fn_from;
905 if (gimple_call_internal_p (fn_from))
906 fprintf (stream, ".%s",
907 internal_fn_name (gimple_call_internal_fn (fn_from)));
908 else
909 print_generic_expr (stream, gimple_call_fn (fn_from));
910 fprintf (stream, " (");
911 for (i = 0; i < nargs; i++)
913 print_generic_expr (stream, m_expr.ops.call.args[i]);
914 if (i + 1 < nargs)
915 fprintf (stream, ", ");
917 fprintf (stream, ")");
919 break;
921 case EXPR_PHI:
923 size_t i;
924 size_t nargs = m_expr.ops.phi.nargs;
926 fprintf (stream, "PHI <");
927 for (i = 0; i < nargs; i++)
929 print_generic_expr (stream, m_expr.ops.phi.args[i]);
930 if (i + 1 < nargs)
931 fprintf (stream, ", ");
933 fprintf (stream, ">");
935 break;
938 if (m_vop)
940 fprintf (stream, " with ");
941 print_generic_expr (stream, m_vop);
944 fprintf (stream, "\n");
947 /* Pop entries off the stack until we hit the NULL marker.
948 For each entry popped, use the SRC/DEST pair to restore
949 SRC to its prior value. */
951 void
952 const_and_copies::pop_to_marker (void)
954 while (m_stack.length () > 0)
956 tree prev_value, dest;
958 dest = m_stack.pop ();
960 /* A NULL value indicates we should stop unwinding, otherwise
961 pop off the next entry as they're recorded in pairs. */
962 if (dest == NULL)
963 break;
965 if (dump_file && (dump_flags & TDF_DETAILS))
967 fprintf (dump_file, "<<<< COPY ");
968 print_generic_expr (dump_file, dest);
969 fprintf (dump_file, " = ");
970 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
971 fprintf (dump_file, "\n");
974 prev_value = m_stack.pop ();
975 set_ssa_name_value (dest, prev_value);
979 /* Record that X has the value Y and that X's previous value is PREV_X.
981 This variant does not follow the value chain for Y. */
983 void
984 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
986 if (dump_file && (dump_flags & TDF_DETAILS))
988 fprintf (dump_file, "0>>> COPY ");
989 print_generic_expr (dump_file, x);
990 fprintf (dump_file, " = ");
991 print_generic_expr (dump_file, y);
992 fprintf (dump_file, "\n");
995 set_ssa_name_value (x, y);
996 m_stack.reserve (2);
997 m_stack.quick_push (prev_x);
998 m_stack.quick_push (x);
1001 /* Record that X has the value Y. */
1003 void
1004 const_and_copies::record_const_or_copy (tree x, tree y)
1006 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1009 /* Record that X has the value Y and that X's previous value is PREV_X.
1011 This variant follow's Y value chain. */
1013 void
1014 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1016 /* Y may be NULL if we are invalidating entries in the table. */
1017 if (y && TREE_CODE (y) == SSA_NAME)
1019 tree tmp = SSA_NAME_VALUE (y);
1020 y = tmp ? tmp : y;
1023 record_const_or_copy_raw (x, y, prev_x);
1026 bool
1027 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1029 const struct hashable_expr *expr1 = p1->expr ();
1030 const class expr_hash_elt *stamp1 = p1->stamp ();
1031 const struct hashable_expr *expr2 = p2->expr ();
1032 const class expr_hash_elt *stamp2 = p2->stamp ();
1034 /* This case should apply only when removing entries from the table. */
1035 if (stamp1 == stamp2)
1036 return true;
1038 if (p1->hash () != p2->hash ())
1039 return false;
1041 /* In case of a collision, both RHS have to be identical and have the
1042 same VUSE operands. */
1043 if (hashable_expr_equal_p (expr1, expr2)
1044 && types_compatible_p (expr1->type, expr2->type))
1045 return true;
1047 return false;
1050 /* Given a conditional expression COND as a tree, initialize
1051 a hashable_expr expression EXPR. The conditional must be a
1052 comparison or logical negation. A constant or a variable is
1053 not permitted. */
1055 void
1056 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1058 expr->type = boolean_type_node;
1060 if (COMPARISON_CLASS_P (cond))
1062 expr->kind = EXPR_BINARY;
1063 expr->ops.binary.op = TREE_CODE (cond);
1064 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1065 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1067 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1069 expr->kind = EXPR_UNARY;
1070 expr->ops.unary.op = TRUTH_NOT_EXPR;
1071 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1073 else
1074 gcc_unreachable ();
1077 /* Build a cond_equivalence record indicating that the comparison
1078 CODE holds between operands OP0 and OP1 and push it to **P. */
1080 static void
1081 build_and_record_new_cond (enum tree_code code,
1082 tree op0, tree op1,
1083 vec<cond_equivalence> *p,
1084 bool val = true)
1086 cond_equivalence c;
1087 struct hashable_expr *cond = &c.cond;
1089 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1091 cond->type = boolean_type_node;
1092 cond->kind = EXPR_BINARY;
1093 cond->ops.binary.op = code;
1094 cond->ops.binary.opnd0 = op0;
1095 cond->ops.binary.opnd1 = op1;
1097 c.value = val ? boolean_true_node : boolean_false_node;
1098 p->safe_push (c);
1101 /* Record that COND is true and INVERTED is false into the edge information
1102 structure. Also record that any conditions dominated by COND are true
1103 as well.
1105 For example, if a < b is true, then a <= b must also be true. */
1107 void
1108 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1110 tree op0, op1;
1111 cond_equivalence c;
1113 if (!COMPARISON_CLASS_P (cond))
1114 return;
1116 op0 = TREE_OPERAND (cond, 0);
1117 op1 = TREE_OPERAND (cond, 1);
1119 switch (TREE_CODE (cond))
1121 case LT_EXPR:
1122 case GT_EXPR:
1123 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1125 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1126 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1129 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1130 ? LE_EXPR : GE_EXPR),
1131 op0, op1, p);
1132 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1133 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1134 break;
1136 case GE_EXPR:
1137 case LE_EXPR:
1138 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1140 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1142 break;
1144 case EQ_EXPR:
1145 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1147 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1149 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1150 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1151 break;
1153 case UNORDERED_EXPR:
1154 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1155 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1156 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1157 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1158 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1159 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1160 break;
1162 case UNLT_EXPR:
1163 case UNGT_EXPR:
1164 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1165 ? UNLE_EXPR : UNGE_EXPR),
1166 op0, op1, p);
1167 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1168 break;
1170 case UNEQ_EXPR:
1171 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1172 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1173 break;
1175 case LTGT_EXPR:
1176 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1177 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1178 break;
1180 default:
1181 break;
1184 /* Now store the original true and false conditions into the first
1185 two slots. */
1186 initialize_expr_from_cond (cond, &c.cond);
1187 c.value = boolean_true_node;
1188 p->safe_push (c);
1190 /* It is possible for INVERTED to be the negation of a comparison,
1191 and not a valid RHS or GIMPLE_COND condition. This happens because
1192 invert_truthvalue may return such an expression when asked to invert
1193 a floating-point comparison. These comparisons are not assumed to
1194 obey the trichotomy law. */
1195 initialize_expr_from_cond (inverted, &c.cond);
1196 c.value = boolean_false_node;
1197 p->safe_push (c);