Add GCC support to ENQCMD.
[official-gcc.git] / gcc / tree-ssa-scopedtables.c
blobe08e24dfcc1bbf9efe72fc529c5e76457baa9d08
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
49 void
50 avail_exprs_stack::pop_to_marker ()
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
58 if (victim.first == NULL)
59 break;
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
74 delete *slot;
75 *slot = victim.second;
77 else
78 m_avail_exprs->clear_slot (slot);
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, void *data)
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
109 return NULL;
112 /* We looked for STMT in the hash table, but did not find it.
114 If STMT is an assignment from a binary operator, we may know something
115 about the operands relationship to each other which would allow
116 us to derive a constant value for the RHS of STMT. */
118 tree
119 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
120 class expr_hash_elt element)
122 if (is_gimple_assign (stmt))
124 struct hashable_expr *expr = element.expr ();
125 if (expr->kind == EXPR_BINARY)
127 enum tree_code code = expr->ops.binary.op;
129 switch (code)
131 /* For these cases, if we know the operands
132 are equal, then we know the result. */
133 case MIN_EXPR:
134 case MAX_EXPR:
135 case BIT_IOR_EXPR:
136 case BIT_AND_EXPR:
137 case BIT_XOR_EXPR:
138 case MINUS_EXPR:
139 case TRUNC_DIV_EXPR:
140 case CEIL_DIV_EXPR:
141 case FLOOR_DIV_EXPR:
142 case ROUND_DIV_EXPR:
143 case EXACT_DIV_EXPR:
144 case TRUNC_MOD_EXPR:
145 case CEIL_MOD_EXPR:
146 case FLOOR_MOD_EXPR:
147 case ROUND_MOD_EXPR:
149 /* Build a simple equality expr and query the hash table
150 for it. */
151 struct hashable_expr expr;
152 expr.type = boolean_type_node;
153 expr.kind = EXPR_BINARY;
154 expr.ops.binary.op = EQ_EXPR;
155 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
156 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
157 class expr_hash_elt element2 (&expr, NULL_TREE);
158 expr_hash_elt **slot
159 = m_avail_exprs->find_slot (&element2, NO_INSERT);
160 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
162 /* If the query was successful and returned a nonzero
163 result, then we know that the operands of the binary
164 expression are the same. In many cases this allows
165 us to compute a constant result of the expression
166 at compile time, even if we do not know the exact
167 values of the operands. */
168 if (slot && *slot && integer_onep ((*slot)->lhs ()))
170 switch (code)
172 case MIN_EXPR:
173 case MAX_EXPR:
174 case BIT_IOR_EXPR:
175 case BIT_AND_EXPR:
176 return gimple_assign_rhs1 (stmt);
178 case MINUS_EXPR:
179 /* This is unsafe for certain floats even in non-IEEE
180 formats. In IEEE, it is unsafe because it does
181 wrong for NaNs. */
182 if (FLOAT_TYPE_P (result_type)
183 && HONOR_NANS (result_type))
184 break;
185 /* FALLTHRU */
186 case BIT_XOR_EXPR:
187 case TRUNC_MOD_EXPR:
188 case CEIL_MOD_EXPR:
189 case FLOOR_MOD_EXPR:
190 case ROUND_MOD_EXPR:
191 return build_zero_cst (result_type);
193 case TRUNC_DIV_EXPR:
194 case CEIL_DIV_EXPR:
195 case FLOOR_DIV_EXPR:
196 case ROUND_DIV_EXPR:
197 case EXACT_DIV_EXPR:
198 /* Avoid _Fract types where we can't build 1. */
199 if (ALL_FRACT_MODE_P (TYPE_MODE (result_type)))
200 break;
201 return build_one_cst (result_type);
203 default:
204 gcc_unreachable ();
207 break;
210 default:
211 break;
215 return NULL_TREE;
218 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
219 If found, return its LHS. Otherwise insert STMT in the table and
220 return NULL_TREE.
222 Also, when an expression is first inserted in the table, it is also
223 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
224 we finish processing this block and its children. */
226 tree
227 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
229 expr_hash_elt **slot;
230 tree lhs;
232 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
233 if (gimple_code (stmt) == GIMPLE_PHI)
234 lhs = gimple_phi_result (stmt);
235 else
236 lhs = gimple_get_lhs (stmt);
238 class expr_hash_elt element (stmt, lhs);
240 if (dump_file && (dump_flags & TDF_DETAILS))
242 fprintf (dump_file, "LKUP ");
243 element.print (dump_file);
246 /* Don't bother remembering constant assignments and copy operations.
247 Constants and copy operations are handled by the constant/copy propagator
248 in optimize_stmt. */
249 if (element.expr()->kind == EXPR_SINGLE
250 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
251 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
252 return NULL_TREE;
254 /* Finally try to find the expression in the main expression hash table. */
255 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
256 if (slot == NULL)
258 return NULL_TREE;
260 else if (*slot == NULL)
262 /* If we did not find the expression in the hash table, we may still
263 be able to produce a result for some expressions. */
264 tree retval = avail_exprs_stack::simplify_binary_operation (stmt,
265 element);
267 /* We have, in effect, allocated *SLOT for ELEMENT at this point.
268 We must initialize *SLOT to a real entry, even if we found a
269 way to prove ELEMENT was a constant after not finding ELEMENT
270 in the hash table.
272 An uninitialized or empty slot is an indication no prior objects
273 entered into the hash table had a hash collection with ELEMENT.
275 If we fail to do so and had such entries in the table, they
276 would become unreachable. */
277 class expr_hash_elt *element2 = new expr_hash_elt (element);
278 *slot = element2;
280 record_expr (element2, NULL, '2');
281 return retval;
284 /* If we found a redundant memory operation do an alias walk to
285 check if we can re-use it. */
286 if (gimple_vuse (stmt) != (*slot)->vop ())
288 tree vuse1 = (*slot)->vop ();
289 tree vuse2 = gimple_vuse (stmt);
290 /* If we have a load of a register and a candidate in the
291 hash with vuse1 then try to reach its stmt by walking
292 up the virtual use-def chain using walk_non_aliased_vuses.
293 But don't do this when removing expressions from the hash. */
294 ao_ref ref;
295 unsigned limit = PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS);
296 if (!(vuse1 && vuse2
297 && gimple_assign_single_p (stmt)
298 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
299 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
300 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
301 && walk_non_aliased_vuses (&ref, vuse2, vuse_eq, NULL, NULL,
302 limit, vuse1) != NULL))
304 if (insert)
306 class expr_hash_elt *element2 = new expr_hash_elt (element);
308 /* Insert the expr into the hash by replacing the current
309 entry and recording the value to restore in the
310 avail_exprs_stack. */
311 record_expr (element2, *slot, '2');
312 *slot = element2;
314 return NULL_TREE;
318 /* Extract the LHS of the assignment so that it can be used as the current
319 definition of another variable. */
320 lhs = (*slot)->lhs ();
322 /* Valueize the result. */
323 if (TREE_CODE (lhs) == SSA_NAME)
325 tree tem = SSA_NAME_VALUE (lhs);
326 if (tem)
327 lhs = tem;
330 if (dump_file && (dump_flags & TDF_DETAILS))
332 fprintf (dump_file, "FIND: ");
333 print_generic_expr (dump_file, lhs);
334 fprintf (dump_file, "\n");
337 return lhs;
340 /* Enter condition equivalence P into the hash table.
342 This indicates that a conditional expression has a known
343 boolean value. */
345 void
346 avail_exprs_stack::record_cond (cond_equivalence *p)
348 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
349 expr_hash_elt **slot;
351 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
352 if (*slot == NULL)
354 *slot = element;
355 record_expr (element, NULL, '1');
357 else
358 delete element;
361 /* Generate a hash value for a pair of expressions. This can be used
362 iteratively by passing a previous result in HSTATE.
364 The same hash value is always returned for a given pair of expressions,
365 regardless of the order in which they are presented. This is useful in
366 hashing the operands of commutative functions. */
368 namespace inchash
371 static void
372 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
374 hash one, two;
376 inchash::add_expr (t1, one);
377 inchash::add_expr (t2, two);
378 hstate.add_commutative (one, two);
381 /* Compute a hash value for a hashable_expr value EXPR and a
382 previously accumulated hash value VAL. If two hashable_expr
383 values compare equal with hashable_expr_equal_p, they must
384 hash to the same value, given an identical value of VAL.
385 The logic is intended to follow inchash::add_expr in tree.c. */
387 static void
388 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
390 switch (expr->kind)
392 case EXPR_SINGLE:
393 inchash::add_expr (expr->ops.single.rhs, hstate);
394 break;
396 case EXPR_UNARY:
397 hstate.add_object (expr->ops.unary.op);
399 /* Make sure to include signedness in the hash computation.
400 Don't hash the type, that can lead to having nodes which
401 compare equal according to operand_equal_p, but which
402 have different hash codes. */
403 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
404 || expr->ops.unary.op == NON_LVALUE_EXPR)
405 hstate.add_int (TYPE_UNSIGNED (expr->type));
407 inchash::add_expr (expr->ops.unary.opnd, hstate);
408 break;
410 case EXPR_BINARY:
411 hstate.add_object (expr->ops.binary.op);
412 if (commutative_tree_code (expr->ops.binary.op))
413 inchash::add_expr_commutative (expr->ops.binary.opnd0,
414 expr->ops.binary.opnd1, hstate);
415 else
417 inchash::add_expr (expr->ops.binary.opnd0, hstate);
418 inchash::add_expr (expr->ops.binary.opnd1, hstate);
420 break;
422 case EXPR_TERNARY:
423 hstate.add_object (expr->ops.ternary.op);
424 if (commutative_ternary_tree_code (expr->ops.ternary.op))
425 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
426 expr->ops.ternary.opnd1, hstate);
427 else
429 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
430 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
432 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
433 break;
435 case EXPR_CALL:
437 size_t i;
438 enum tree_code code = CALL_EXPR;
439 gcall *fn_from;
441 hstate.add_object (code);
442 fn_from = expr->ops.call.fn_from;
443 if (gimple_call_internal_p (fn_from))
444 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
445 else
446 inchash::add_expr (gimple_call_fn (fn_from), hstate);
447 for (i = 0; i < expr->ops.call.nargs; i++)
448 inchash::add_expr (expr->ops.call.args[i], hstate);
450 break;
452 case EXPR_PHI:
454 size_t i;
456 for (i = 0; i < expr->ops.phi.nargs; i++)
457 inchash::add_expr (expr->ops.phi.args[i], hstate);
459 break;
461 default:
462 gcc_unreachable ();
468 /* Hashing and equality functions. We compute a value number for expressions
469 using the code of the expression and the SSA numbers of its operands. */
471 static hashval_t
472 avail_expr_hash (class expr_hash_elt *p)
474 const struct hashable_expr *expr = p->expr ();
475 inchash::hash hstate;
477 if (expr->kind == EXPR_SINGLE)
479 /* T could potentially be a switch index or a goto dest. */
480 tree t = expr->ops.single.rhs;
481 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
483 /* Make equivalent statements of both these kinds hash together.
484 Dealing with both MEM_REF and ARRAY_REF allows us not to care
485 about equivalence with other statements not considered here. */
486 bool reverse;
487 poly_int64 offset, size, max_size;
488 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
489 &reverse);
490 /* Strictly, we could try to normalize variable-sized accesses too,
491 but here we just deal with the common case. */
492 if (known_size_p (max_size)
493 && known_eq (size, max_size))
495 enum tree_code code = MEM_REF;
496 hstate.add_object (code);
497 inchash::add_expr (base, hstate);
498 hstate.add_object (offset);
499 hstate.add_object (size);
500 return hstate.end ();
505 inchash::add_hashable_expr (expr, hstate);
507 return hstate.end ();
510 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
511 to each other. (That is, they return the value of the same bit of memory.)
513 Return TRUE if the two are so equivalent; FALSE if not (which could still
514 mean the two are equivalent by other means). */
516 static bool
517 equal_mem_array_ref_p (tree t0, tree t1)
519 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
520 return false;
521 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
522 return false;
524 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
525 return false;
526 bool rev0;
527 poly_int64 off0, sz0, max0;
528 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
529 if (!known_size_p (max0)
530 || maybe_ne (sz0, max0))
531 return false;
533 bool rev1;
534 poly_int64 off1, sz1, max1;
535 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
536 if (!known_size_p (max1)
537 || maybe_ne (sz1, max1))
538 return false;
540 if (rev0 != rev1 || maybe_ne (sz0, sz1) || maybe_ne (off0, off1))
541 return false;
543 return operand_equal_p (base0, base1, 0);
546 /* Compare two hashable_expr structures for equivalence. They are
547 considered equivalent when the expressions they denote must
548 necessarily be equal. The logic is intended to follow that of
549 operand_equal_p in fold-const.c */
551 static bool
552 hashable_expr_equal_p (const struct hashable_expr *expr0,
553 const struct hashable_expr *expr1)
555 tree type0 = expr0->type;
556 tree type1 = expr1->type;
558 /* If either type is NULL, there is nothing to check. */
559 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
560 return false;
562 /* If both types don't have the same signedness, precision, and mode,
563 then we can't consider them equal. */
564 if (type0 != type1
565 && (TREE_CODE (type0) == ERROR_MARK
566 || TREE_CODE (type1) == ERROR_MARK
567 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
568 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
569 || TYPE_MODE (type0) != TYPE_MODE (type1)))
570 return false;
572 if (expr0->kind != expr1->kind)
573 return false;
575 switch (expr0->kind)
577 case EXPR_SINGLE:
578 return equal_mem_array_ref_p (expr0->ops.single.rhs,
579 expr1->ops.single.rhs)
580 || operand_equal_p (expr0->ops.single.rhs,
581 expr1->ops.single.rhs, 0);
582 case EXPR_UNARY:
583 if (expr0->ops.unary.op != expr1->ops.unary.op)
584 return false;
586 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
587 || expr0->ops.unary.op == NON_LVALUE_EXPR)
588 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
589 return false;
591 return operand_equal_p (expr0->ops.unary.opnd,
592 expr1->ops.unary.opnd, 0);
594 case EXPR_BINARY:
595 if (expr0->ops.binary.op != expr1->ops.binary.op)
596 return false;
598 if (operand_equal_p (expr0->ops.binary.opnd0,
599 expr1->ops.binary.opnd0, 0)
600 && operand_equal_p (expr0->ops.binary.opnd1,
601 expr1->ops.binary.opnd1, 0))
602 return true;
604 /* For commutative ops, allow the other order. */
605 return (commutative_tree_code (expr0->ops.binary.op)
606 && operand_equal_p (expr0->ops.binary.opnd0,
607 expr1->ops.binary.opnd1, 0)
608 && operand_equal_p (expr0->ops.binary.opnd1,
609 expr1->ops.binary.opnd0, 0));
611 case EXPR_TERNARY:
612 if (expr0->ops.ternary.op != expr1->ops.ternary.op
613 || !operand_equal_p (expr0->ops.ternary.opnd2,
614 expr1->ops.ternary.opnd2, 0))
615 return false;
617 /* BIT_INSERT_EXPR has an implict operand as the type precision
618 of op1. Need to check to make sure they are the same. */
619 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
620 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
621 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
622 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
623 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
624 return false;
626 if (operand_equal_p (expr0->ops.ternary.opnd0,
627 expr1->ops.ternary.opnd0, 0)
628 && operand_equal_p (expr0->ops.ternary.opnd1,
629 expr1->ops.ternary.opnd1, 0))
630 return true;
632 /* For commutative ops, allow the other order. */
633 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
634 && operand_equal_p (expr0->ops.ternary.opnd0,
635 expr1->ops.ternary.opnd1, 0)
636 && operand_equal_p (expr0->ops.ternary.opnd1,
637 expr1->ops.ternary.opnd0, 0));
639 case EXPR_CALL:
641 size_t i;
643 /* If the calls are to different functions, then they
644 clearly cannot be equal. */
645 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
646 expr1->ops.call.fn_from))
647 return false;
649 if (! expr0->ops.call.pure)
650 return false;
652 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
653 return false;
655 for (i = 0; i < expr0->ops.call.nargs; i++)
656 if (! operand_equal_p (expr0->ops.call.args[i],
657 expr1->ops.call.args[i], 0))
658 return false;
660 if (stmt_could_throw_p (cfun, expr0->ops.call.fn_from))
662 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
663 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
664 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
665 return false;
668 return true;
671 case EXPR_PHI:
673 size_t i;
675 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
676 return false;
678 for (i = 0; i < expr0->ops.phi.nargs; i++)
679 if (! operand_equal_p (expr0->ops.phi.args[i],
680 expr1->ops.phi.args[i], 0))
681 return false;
683 return true;
686 default:
687 gcc_unreachable ();
691 /* Given a statement STMT, construct a hash table element. */
693 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
695 enum gimple_code code = gimple_code (stmt);
696 struct hashable_expr *expr = this->expr ();
698 if (code == GIMPLE_ASSIGN)
700 enum tree_code subcode = gimple_assign_rhs_code (stmt);
702 switch (get_gimple_rhs_class (subcode))
704 case GIMPLE_SINGLE_RHS:
705 expr->kind = EXPR_SINGLE;
706 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
707 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
708 break;
709 case GIMPLE_UNARY_RHS:
710 expr->kind = EXPR_UNARY;
711 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
712 if (CONVERT_EXPR_CODE_P (subcode))
713 subcode = NOP_EXPR;
714 expr->ops.unary.op = subcode;
715 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
716 break;
717 case GIMPLE_BINARY_RHS:
718 expr->kind = EXPR_BINARY;
719 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
720 expr->ops.binary.op = subcode;
721 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
722 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
723 break;
724 case GIMPLE_TERNARY_RHS:
725 expr->kind = EXPR_TERNARY;
726 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
727 expr->ops.ternary.op = subcode;
728 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
729 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
730 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
731 break;
732 default:
733 gcc_unreachable ();
736 else if (code == GIMPLE_COND)
738 expr->type = boolean_type_node;
739 expr->kind = EXPR_BINARY;
740 expr->ops.binary.op = gimple_cond_code (stmt);
741 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
742 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
744 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
746 size_t nargs = gimple_call_num_args (call_stmt);
747 size_t i;
749 gcc_assert (gimple_call_lhs (call_stmt));
751 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
752 expr->kind = EXPR_CALL;
753 expr->ops.call.fn_from = call_stmt;
755 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
756 expr->ops.call.pure = true;
757 else
758 expr->ops.call.pure = false;
760 expr->ops.call.nargs = nargs;
761 expr->ops.call.args = XCNEWVEC (tree, nargs);
762 for (i = 0; i < nargs; i++)
763 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
765 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
767 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
768 expr->kind = EXPR_SINGLE;
769 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
771 else if (code == GIMPLE_GOTO)
773 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
774 expr->kind = EXPR_SINGLE;
775 expr->ops.single.rhs = gimple_goto_dest (stmt);
777 else if (code == GIMPLE_PHI)
779 size_t nargs = gimple_phi_num_args (stmt);
780 size_t i;
782 expr->type = TREE_TYPE (gimple_phi_result (stmt));
783 expr->kind = EXPR_PHI;
784 expr->ops.phi.nargs = nargs;
785 expr->ops.phi.args = XCNEWVEC (tree, nargs);
786 for (i = 0; i < nargs; i++)
787 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
789 else
790 gcc_unreachable ();
792 m_lhs = orig_lhs;
793 m_vop = gimple_vuse (stmt);
794 m_hash = avail_expr_hash (this);
795 m_stamp = this;
798 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
799 construct a hash table element. */
801 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
803 m_expr = *orig;
804 m_lhs = orig_lhs;
805 m_vop = NULL_TREE;
806 m_hash = avail_expr_hash (this);
807 m_stamp = this;
810 /* Copy constructor for a hash table element. */
812 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
814 m_expr = old_elt.m_expr;
815 m_lhs = old_elt.m_lhs;
816 m_vop = old_elt.m_vop;
817 m_hash = old_elt.m_hash;
818 m_stamp = this;
820 /* Now deep copy the malloc'd space for CALL and PHI args. */
821 if (old_elt.m_expr.kind == EXPR_CALL)
823 size_t nargs = old_elt.m_expr.ops.call.nargs;
824 size_t i;
826 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
827 for (i = 0; i < nargs; i++)
828 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
830 else if (old_elt.m_expr.kind == EXPR_PHI)
832 size_t nargs = old_elt.m_expr.ops.phi.nargs;
833 size_t i;
835 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
836 for (i = 0; i < nargs; i++)
837 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
841 /* Calls and PHIs have a variable number of arguments that are allocated
842 on the heap. Thus we have to have a special dtor to release them. */
844 expr_hash_elt::~expr_hash_elt ()
846 if (m_expr.kind == EXPR_CALL)
847 free (m_expr.ops.call.args);
848 else if (m_expr.kind == EXPR_PHI)
849 free (m_expr.ops.phi.args);
852 /* Print a diagnostic dump of an expression hash table entry. */
854 void
855 expr_hash_elt::print (FILE *stream)
857 fprintf (stream, "STMT ");
859 if (m_lhs)
861 print_generic_expr (stream, m_lhs);
862 fprintf (stream, " = ");
865 switch (m_expr.kind)
867 case EXPR_SINGLE:
868 print_generic_expr (stream, m_expr.ops.single.rhs);
869 break;
871 case EXPR_UNARY:
872 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
873 print_generic_expr (stream, m_expr.ops.unary.opnd);
874 break;
876 case EXPR_BINARY:
877 print_generic_expr (stream, m_expr.ops.binary.opnd0);
878 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
879 print_generic_expr (stream, m_expr.ops.binary.opnd1);
880 break;
882 case EXPR_TERNARY:
883 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
884 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
885 fputs (", ", stream);
886 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
887 fputs (", ", stream);
888 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
889 fputs (">", stream);
890 break;
892 case EXPR_CALL:
894 size_t i;
895 size_t nargs = m_expr.ops.call.nargs;
896 gcall *fn_from;
898 fn_from = m_expr.ops.call.fn_from;
899 if (gimple_call_internal_p (fn_from))
900 fprintf (stream, ".%s",
901 internal_fn_name (gimple_call_internal_fn (fn_from)));
902 else
903 print_generic_expr (stream, gimple_call_fn (fn_from));
904 fprintf (stream, " (");
905 for (i = 0; i < nargs; i++)
907 print_generic_expr (stream, m_expr.ops.call.args[i]);
908 if (i + 1 < nargs)
909 fprintf (stream, ", ");
911 fprintf (stream, ")");
913 break;
915 case EXPR_PHI:
917 size_t i;
918 size_t nargs = m_expr.ops.phi.nargs;
920 fprintf (stream, "PHI <");
921 for (i = 0; i < nargs; i++)
923 print_generic_expr (stream, m_expr.ops.phi.args[i]);
924 if (i + 1 < nargs)
925 fprintf (stream, ", ");
927 fprintf (stream, ">");
929 break;
932 if (m_vop)
934 fprintf (stream, " with ");
935 print_generic_expr (stream, m_vop);
938 fprintf (stream, "\n");
941 /* Pop entries off the stack until we hit the NULL marker.
942 For each entry popped, use the SRC/DEST pair to restore
943 SRC to its prior value. */
945 void
946 const_and_copies::pop_to_marker (void)
948 while (m_stack.length () > 0)
950 tree prev_value, dest;
952 dest = m_stack.pop ();
954 /* A NULL value indicates we should stop unwinding, otherwise
955 pop off the next entry as they're recorded in pairs. */
956 if (dest == NULL)
957 break;
959 if (dump_file && (dump_flags & TDF_DETAILS))
961 fprintf (dump_file, "<<<< COPY ");
962 print_generic_expr (dump_file, dest);
963 fprintf (dump_file, " = ");
964 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
965 fprintf (dump_file, "\n");
968 prev_value = m_stack.pop ();
969 set_ssa_name_value (dest, prev_value);
973 /* Record that X has the value Y and that X's previous value is PREV_X.
975 This variant does not follow the value chain for Y. */
977 void
978 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
980 if (dump_file && (dump_flags & TDF_DETAILS))
982 fprintf (dump_file, "0>>> COPY ");
983 print_generic_expr (dump_file, x);
984 fprintf (dump_file, " = ");
985 print_generic_expr (dump_file, y);
986 fprintf (dump_file, "\n");
989 set_ssa_name_value (x, y);
990 m_stack.reserve (2);
991 m_stack.quick_push (prev_x);
992 m_stack.quick_push (x);
995 /* Record that X has the value Y. */
997 void
998 const_and_copies::record_const_or_copy (tree x, tree y)
1000 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
1003 /* Record that X has the value Y and that X's previous value is PREV_X.
1005 This variant follow's Y value chain. */
1007 void
1008 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1010 /* Y may be NULL if we are invalidating entries in the table. */
1011 if (y && TREE_CODE (y) == SSA_NAME)
1013 tree tmp = SSA_NAME_VALUE (y);
1014 y = tmp ? tmp : y;
1017 record_const_or_copy_raw (x, y, prev_x);
1020 bool
1021 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1023 const struct hashable_expr *expr1 = p1->expr ();
1024 const struct expr_hash_elt *stamp1 = p1->stamp ();
1025 const struct hashable_expr *expr2 = p2->expr ();
1026 const struct expr_hash_elt *stamp2 = p2->stamp ();
1028 /* This case should apply only when removing entries from the table. */
1029 if (stamp1 == stamp2)
1030 return true;
1032 if (p1->hash () != p2->hash ())
1033 return false;
1035 /* In case of a collision, both RHS have to be identical and have the
1036 same VUSE operands. */
1037 if (hashable_expr_equal_p (expr1, expr2)
1038 && types_compatible_p (expr1->type, expr2->type))
1039 return true;
1041 return false;
1044 /* Given a conditional expression COND as a tree, initialize
1045 a hashable_expr expression EXPR. The conditional must be a
1046 comparison or logical negation. A constant or a variable is
1047 not permitted. */
1049 void
1050 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1052 expr->type = boolean_type_node;
1054 if (COMPARISON_CLASS_P (cond))
1056 expr->kind = EXPR_BINARY;
1057 expr->ops.binary.op = TREE_CODE (cond);
1058 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1059 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1061 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1063 expr->kind = EXPR_UNARY;
1064 expr->ops.unary.op = TRUTH_NOT_EXPR;
1065 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1067 else
1068 gcc_unreachable ();
1071 /* Build a cond_equivalence record indicating that the comparison
1072 CODE holds between operands OP0 and OP1 and push it to **P. */
1074 static void
1075 build_and_record_new_cond (enum tree_code code,
1076 tree op0, tree op1,
1077 vec<cond_equivalence> *p,
1078 bool val = true)
1080 cond_equivalence c;
1081 struct hashable_expr *cond = &c.cond;
1083 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1085 cond->type = boolean_type_node;
1086 cond->kind = EXPR_BINARY;
1087 cond->ops.binary.op = code;
1088 cond->ops.binary.opnd0 = op0;
1089 cond->ops.binary.opnd1 = op1;
1091 c.value = val ? boolean_true_node : boolean_false_node;
1092 p->safe_push (c);
1095 /* Record that COND is true and INVERTED is false into the edge information
1096 structure. Also record that any conditions dominated by COND are true
1097 as well.
1099 For example, if a < b is true, then a <= b must also be true. */
1101 void
1102 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1104 tree op0, op1;
1105 cond_equivalence c;
1107 if (!COMPARISON_CLASS_P (cond))
1108 return;
1110 op0 = TREE_OPERAND (cond, 0);
1111 op1 = TREE_OPERAND (cond, 1);
1113 switch (TREE_CODE (cond))
1115 case LT_EXPR:
1116 case GT_EXPR:
1117 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1119 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1120 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1123 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1124 ? LE_EXPR : GE_EXPR),
1125 op0, op1, p);
1126 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1127 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1128 break;
1130 case GE_EXPR:
1131 case LE_EXPR:
1132 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1134 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1136 break;
1138 case EQ_EXPR:
1139 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1141 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1143 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1144 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1145 break;
1147 case UNORDERED_EXPR:
1148 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1149 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1150 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1151 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1152 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1153 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1154 break;
1156 case UNLT_EXPR:
1157 case UNGT_EXPR:
1158 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1159 ? UNLE_EXPR : UNGE_EXPR),
1160 op0, op1, p);
1161 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1162 break;
1164 case UNEQ_EXPR:
1165 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1166 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1167 break;
1169 case LTGT_EXPR:
1170 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1171 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1172 break;
1174 default:
1175 break;
1178 /* Now store the original true and false conditions into the first
1179 two slots. */
1180 initialize_expr_from_cond (cond, &c.cond);
1181 c.value = boolean_true_node;
1182 p->safe_push (c);
1184 /* It is possible for INVERTED to be the negation of a comparison,
1185 and not a valid RHS or GIMPLE_COND condition. This happens because
1186 invert_truthvalue may return such an expression when asked to invert
1187 a floating-point comparison. These comparisons are not assumed to
1188 obey the trichotomy law. */
1189 initialize_expr_from_cond (inverted, &c.cond);
1190 c.value = boolean_false_node;
1191 p->safe_push (c);