Split out parts of scompare_loc_descriptor and emit_store_flag
[official-gcc.git] / gcc / tree-ssa-scopedtables.c
blob6e1fd582814ac0d65c50819f681cb2920879befd
1 /* Header file for SSA dominator optimizations.
2 Copyright (C) 2013-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "function.h"
24 #include "basic-block.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "tree-pretty-print.h"
29 #include "tree-ssa-scopedtables.h"
30 #include "tree-ssa-threadedge.h"
31 #include "stor-layout.h"
32 #include "fold-const.h"
33 #include "tree-eh.h"
34 #include "internal-fn.h"
35 #include "tree-dfa.h"
36 #include "options.h"
37 #include "params.h"
39 static bool hashable_expr_equal_p (const struct hashable_expr *,
40 const struct hashable_expr *);
42 /* Initialize local stacks for this optimizer and record equivalences
43 upon entry to BB. Equivalences can come from the edge traversed to
44 reach BB or they may come from PHI nodes at the start of BB. */
46 /* Pop items off the unwinding stack, removing each from the hash table
47 until a marker is encountered. */
49 void
50 avail_exprs_stack::pop_to_marker ()
52 /* Remove all the expressions made available in this block. */
53 while (m_stack.length () > 0)
55 std::pair<expr_hash_elt_t, expr_hash_elt_t> victim = m_stack.pop ();
56 expr_hash_elt **slot;
58 if (victim.first == NULL)
59 break;
61 /* This must precede the actual removal from the hash table,
62 as ELEMENT and the table entry may share a call argument
63 vector which will be freed during removal. */
64 if (dump_file && (dump_flags & TDF_DETAILS))
66 fprintf (dump_file, "<<<< ");
67 victim.first->print (dump_file);
70 slot = m_avail_exprs->find_slot (victim.first, NO_INSERT);
71 gcc_assert (slot && *slot == victim.first);
72 if (victim.second != NULL)
74 delete *slot;
75 *slot = victim.second;
77 else
78 m_avail_exprs->clear_slot (slot);
82 /* Add <ELT1,ELT2> to the unwinding stack so they can be later removed
83 from the hash table. */
85 void
86 avail_exprs_stack::record_expr (class expr_hash_elt *elt1,
87 class expr_hash_elt *elt2,
88 char type)
90 if (elt1 && dump_file && (dump_flags & TDF_DETAILS))
92 fprintf (dump_file, "%c>>> ", type);
93 elt1->print (dump_file);
96 m_stack.safe_push (std::pair<expr_hash_elt_t, expr_hash_elt_t> (elt1, elt2));
99 /* Helper for walk_non_aliased_vuses. Determine if we arrived at
100 the desired memory state. */
102 static void *
103 vuse_eq (ao_ref *, tree vuse1, unsigned int cnt, void *data)
105 tree vuse2 = (tree) data;
106 if (vuse1 == vuse2)
107 return data;
109 /* This bounds the stmt walks we perform on reference lookups
110 to O(1) instead of O(N) where N is the number of dominating
111 stores leading to a candidate. We re-use the SCCVN param
112 for this as it is basically the same complexity. */
113 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
114 return (void *)-1;
116 return NULL;
119 /* We looked for STMT in the hash table, but did not find it.
121 If STMT is an assignment from a binary operator, we may know something
122 about the operands relationship to each other which would allow
123 us to derive a constant value for the RHS of STMT. */
125 tree
126 avail_exprs_stack::simplify_binary_operation (gimple *stmt,
127 class expr_hash_elt element)
129 if (is_gimple_assign (stmt))
131 struct hashable_expr *expr = element.expr ();
132 if (expr->kind == EXPR_BINARY)
134 enum tree_code code = expr->ops.binary.op;
136 switch (code)
138 /* For these cases, if we know the operands
139 are equal, then we know the result. */
140 case MIN_EXPR:
141 case MAX_EXPR:
142 case BIT_IOR_EXPR:
143 case BIT_AND_EXPR:
144 case BIT_XOR_EXPR:
145 case MINUS_EXPR:
146 case TRUNC_DIV_EXPR:
147 case CEIL_DIV_EXPR:
148 case FLOOR_DIV_EXPR:
149 case ROUND_DIV_EXPR:
150 case EXACT_DIV_EXPR:
151 case TRUNC_MOD_EXPR:
152 case CEIL_MOD_EXPR:
153 case FLOOR_MOD_EXPR:
154 case ROUND_MOD_EXPR:
156 /* Build a simple equality expr and query the hash table
157 for it. */
158 struct hashable_expr expr;
159 expr.type = boolean_type_node;
160 expr.kind = EXPR_BINARY;
161 expr.ops.binary.op = EQ_EXPR;
162 expr.ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
163 expr.ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
164 class expr_hash_elt element2 (&expr, NULL_TREE);
165 expr_hash_elt **slot
166 = m_avail_exprs->find_slot (&element2, NO_INSERT);
167 tree result_type = TREE_TYPE (gimple_assign_lhs (stmt));
169 /* If the query was successful and returned a nonzero
170 result, then we know that the operands of the binary
171 expression are the same. In many cases this allows
172 us to compute a constant result of the expression
173 at compile time, even if we do not know the exact
174 values of the operands. */
175 if (slot && *slot && integer_onep ((*slot)->lhs ()))
177 switch (code)
179 case MIN_EXPR:
180 case MAX_EXPR:
181 case BIT_IOR_EXPR:
182 case BIT_AND_EXPR:
183 return gimple_assign_rhs1 (stmt);
185 case BIT_XOR_EXPR:
186 case MINUS_EXPR:
187 case TRUNC_MOD_EXPR:
188 case CEIL_MOD_EXPR:
189 case FLOOR_MOD_EXPR:
190 case ROUND_MOD_EXPR:
191 return build_zero_cst (result_type);
193 case TRUNC_DIV_EXPR:
194 case CEIL_DIV_EXPR:
195 case FLOOR_DIV_EXPR:
196 case ROUND_DIV_EXPR:
197 case EXACT_DIV_EXPR:
198 return build_one_cst (result_type);
200 default:
201 gcc_unreachable ();
204 break;
207 default:
208 break;
212 return NULL_TREE;
215 /* Search for an existing instance of STMT in the AVAIL_EXPRS_STACK table.
216 If found, return its LHS. Otherwise insert STMT in the table and
217 return NULL_TREE.
219 Also, when an expression is first inserted in the table, it is also
220 is also added to AVAIL_EXPRS_STACK, so that it can be removed when
221 we finish processing this block and its children. */
223 tree
224 avail_exprs_stack::lookup_avail_expr (gimple *stmt, bool insert, bool tbaa_p)
226 expr_hash_elt **slot;
227 tree lhs;
229 /* Get LHS of phi, assignment, or call; else NULL_TREE. */
230 if (gimple_code (stmt) == GIMPLE_PHI)
231 lhs = gimple_phi_result (stmt);
232 else
233 lhs = gimple_get_lhs (stmt);
235 class expr_hash_elt element (stmt, lhs);
237 if (dump_file && (dump_flags & TDF_DETAILS))
239 fprintf (dump_file, "LKUP ");
240 element.print (dump_file);
243 /* Don't bother remembering constant assignments and copy operations.
244 Constants and copy operations are handled by the constant/copy propagator
245 in optimize_stmt. */
246 if (element.expr()->kind == EXPR_SINGLE
247 && (TREE_CODE (element.expr()->ops.single.rhs) == SSA_NAME
248 || is_gimple_min_invariant (element.expr()->ops.single.rhs)))
249 return NULL_TREE;
251 /* Finally try to find the expression in the main expression hash table. */
252 slot = m_avail_exprs->find_slot (&element, (insert ? INSERT : NO_INSERT));
253 if (slot == NULL)
255 return NULL_TREE;
257 else if (*slot == NULL)
259 /* If we did not find the expression in the hash table, we may still
260 be able to produce a result for some expressions. */
261 tree alt = avail_exprs_stack::simplify_binary_operation (stmt, element);
262 if (alt)
263 return alt;
265 class expr_hash_elt *element2 = new expr_hash_elt (element);
266 *slot = element2;
268 record_expr (element2, NULL, '2');
269 return NULL_TREE;
272 /* If we found a redundant memory operation do an alias walk to
273 check if we can re-use it. */
274 if (gimple_vuse (stmt) != (*slot)->vop ())
276 tree vuse1 = (*slot)->vop ();
277 tree vuse2 = gimple_vuse (stmt);
278 /* If we have a load of a register and a candidate in the
279 hash with vuse1 then try to reach its stmt by walking
280 up the virtual use-def chain using walk_non_aliased_vuses.
281 But don't do this when removing expressions from the hash. */
282 ao_ref ref;
283 if (!(vuse1 && vuse2
284 && gimple_assign_single_p (stmt)
285 && TREE_CODE (gimple_assign_lhs (stmt)) == SSA_NAME
286 && (ao_ref_init (&ref, gimple_assign_rhs1 (stmt)),
287 ref.base_alias_set = ref.ref_alias_set = tbaa_p ? -1 : 0, true)
288 && walk_non_aliased_vuses (&ref, vuse2,
289 vuse_eq, NULL, NULL, vuse1) != NULL))
291 if (insert)
293 class expr_hash_elt *element2 = new expr_hash_elt (element);
295 /* Insert the expr into the hash by replacing the current
296 entry and recording the value to restore in the
297 avail_exprs_stack. */
298 record_expr (element2, *slot, '2');
299 *slot = element2;
301 return NULL_TREE;
305 /* Extract the LHS of the assignment so that it can be used as the current
306 definition of another variable. */
307 lhs = (*slot)->lhs ();
309 /* Valueize the result. */
310 if (TREE_CODE (lhs) == SSA_NAME)
312 tree tem = SSA_NAME_VALUE (lhs);
313 if (tem)
314 lhs = tem;
317 if (dump_file && (dump_flags & TDF_DETAILS))
319 fprintf (dump_file, "FIND: ");
320 print_generic_expr (dump_file, lhs);
321 fprintf (dump_file, "\n");
324 return lhs;
327 /* Enter condition equivalence P into the hash table.
329 This indicates that a conditional expression has a known
330 boolean value. */
332 void
333 avail_exprs_stack::record_cond (cond_equivalence *p)
335 class expr_hash_elt *element = new expr_hash_elt (&p->cond, p->value);
336 expr_hash_elt **slot;
338 slot = m_avail_exprs->find_slot_with_hash (element, element->hash (), INSERT);
339 if (*slot == NULL)
341 *slot = element;
342 record_expr (element, NULL, '1');
344 else
345 delete element;
348 /* Generate a hash value for a pair of expressions. This can be used
349 iteratively by passing a previous result in HSTATE.
351 The same hash value is always returned for a given pair of expressions,
352 regardless of the order in which they are presented. This is useful in
353 hashing the operands of commutative functions. */
355 namespace inchash
358 static void
359 add_expr_commutative (const_tree t1, const_tree t2, hash &hstate)
361 hash one, two;
363 inchash::add_expr (t1, one);
364 inchash::add_expr (t2, two);
365 hstate.add_commutative (one, two);
368 /* Compute a hash value for a hashable_expr value EXPR and a
369 previously accumulated hash value VAL. If two hashable_expr
370 values compare equal with hashable_expr_equal_p, they must
371 hash to the same value, given an identical value of VAL.
372 The logic is intended to follow inchash::add_expr in tree.c. */
374 static void
375 add_hashable_expr (const struct hashable_expr *expr, hash &hstate)
377 switch (expr->kind)
379 case EXPR_SINGLE:
380 inchash::add_expr (expr->ops.single.rhs, hstate);
381 break;
383 case EXPR_UNARY:
384 hstate.add_object (expr->ops.unary.op);
386 /* Make sure to include signedness in the hash computation.
387 Don't hash the type, that can lead to having nodes which
388 compare equal according to operand_equal_p, but which
389 have different hash codes. */
390 if (CONVERT_EXPR_CODE_P (expr->ops.unary.op)
391 || expr->ops.unary.op == NON_LVALUE_EXPR)
392 hstate.add_int (TYPE_UNSIGNED (expr->type));
394 inchash::add_expr (expr->ops.unary.opnd, hstate);
395 break;
397 case EXPR_BINARY:
398 hstate.add_object (expr->ops.binary.op);
399 if (commutative_tree_code (expr->ops.binary.op))
400 inchash::add_expr_commutative (expr->ops.binary.opnd0,
401 expr->ops.binary.opnd1, hstate);
402 else
404 inchash::add_expr (expr->ops.binary.opnd0, hstate);
405 inchash::add_expr (expr->ops.binary.opnd1, hstate);
407 break;
409 case EXPR_TERNARY:
410 hstate.add_object (expr->ops.ternary.op);
411 if (commutative_ternary_tree_code (expr->ops.ternary.op))
412 inchash::add_expr_commutative (expr->ops.ternary.opnd0,
413 expr->ops.ternary.opnd1, hstate);
414 else
416 inchash::add_expr (expr->ops.ternary.opnd0, hstate);
417 inchash::add_expr (expr->ops.ternary.opnd1, hstate);
419 inchash::add_expr (expr->ops.ternary.opnd2, hstate);
420 break;
422 case EXPR_CALL:
424 size_t i;
425 enum tree_code code = CALL_EXPR;
426 gcall *fn_from;
428 hstate.add_object (code);
429 fn_from = expr->ops.call.fn_from;
430 if (gimple_call_internal_p (fn_from))
431 hstate.merge_hash ((hashval_t) gimple_call_internal_fn (fn_from));
432 else
433 inchash::add_expr (gimple_call_fn (fn_from), hstate);
434 for (i = 0; i < expr->ops.call.nargs; i++)
435 inchash::add_expr (expr->ops.call.args[i], hstate);
437 break;
439 case EXPR_PHI:
441 size_t i;
443 for (i = 0; i < expr->ops.phi.nargs; i++)
444 inchash::add_expr (expr->ops.phi.args[i], hstate);
446 break;
448 default:
449 gcc_unreachable ();
455 /* Hashing and equality functions. We compute a value number for expressions
456 using the code of the expression and the SSA numbers of its operands. */
458 static hashval_t
459 avail_expr_hash (class expr_hash_elt *p)
461 const struct hashable_expr *expr = p->expr ();
462 inchash::hash hstate;
464 if (expr->kind == EXPR_SINGLE)
466 /* T could potentially be a switch index or a goto dest. */
467 tree t = expr->ops.single.rhs;
468 if (TREE_CODE (t) == MEM_REF || handled_component_p (t))
470 /* Make equivalent statements of both these kinds hash together.
471 Dealing with both MEM_REF and ARRAY_REF allows us not to care
472 about equivalence with other statements not considered here. */
473 bool reverse;
474 HOST_WIDE_INT offset, size, max_size;
475 tree base = get_ref_base_and_extent (t, &offset, &size, &max_size,
476 &reverse);
477 /* Strictly, we could try to normalize variable-sized accesses too,
478 but here we just deal with the common case. */
479 if (size != -1
480 && size == max_size)
482 enum tree_code code = MEM_REF;
483 hstate.add_object (code);
484 inchash::add_expr (base, hstate);
485 hstate.add_object (offset);
486 hstate.add_object (size);
487 return hstate.end ();
492 inchash::add_hashable_expr (expr, hstate);
494 return hstate.end ();
497 /* Compares trees T0 and T1 to see if they are MEM_REF or ARRAY_REFs equivalent
498 to each other. (That is, they return the value of the same bit of memory.)
500 Return TRUE if the two are so equivalent; FALSE if not (which could still
501 mean the two are equivalent by other means). */
503 static bool
504 equal_mem_array_ref_p (tree t0, tree t1)
506 if (TREE_CODE (t0) != MEM_REF && ! handled_component_p (t0))
507 return false;
508 if (TREE_CODE (t1) != MEM_REF && ! handled_component_p (t1))
509 return false;
511 if (!types_compatible_p (TREE_TYPE (t0), TREE_TYPE (t1)))
512 return false;
513 bool rev0;
514 HOST_WIDE_INT off0, sz0, max0;
515 tree base0 = get_ref_base_and_extent (t0, &off0, &sz0, &max0, &rev0);
516 if (sz0 == -1
517 || sz0 != max0)
518 return false;
520 bool rev1;
521 HOST_WIDE_INT off1, sz1, max1;
522 tree base1 = get_ref_base_and_extent (t1, &off1, &sz1, &max1, &rev1);
523 if (sz1 == -1
524 || sz1 != max1)
525 return false;
527 if (rev0 != rev1)
528 return false;
530 /* Types were compatible, so this is a sanity check. */
531 gcc_assert (sz0 == sz1);
533 return (off0 == off1) && operand_equal_p (base0, base1, 0);
536 /* Compare two hashable_expr structures for equivalence. They are
537 considered equivalent when the expressions they denote must
538 necessarily be equal. The logic is intended to follow that of
539 operand_equal_p in fold-const.c */
541 static bool
542 hashable_expr_equal_p (const struct hashable_expr *expr0,
543 const struct hashable_expr *expr1)
545 tree type0 = expr0->type;
546 tree type1 = expr1->type;
548 /* If either type is NULL, there is nothing to check. */
549 if ((type0 == NULL_TREE) ^ (type1 == NULL_TREE))
550 return false;
552 /* If both types don't have the same signedness, precision, and mode,
553 then we can't consider them equal. */
554 if (type0 != type1
555 && (TREE_CODE (type0) == ERROR_MARK
556 || TREE_CODE (type1) == ERROR_MARK
557 || TYPE_UNSIGNED (type0) != TYPE_UNSIGNED (type1)
558 || TYPE_PRECISION (type0) != TYPE_PRECISION (type1)
559 || TYPE_MODE (type0) != TYPE_MODE (type1)))
560 return false;
562 if (expr0->kind != expr1->kind)
563 return false;
565 switch (expr0->kind)
567 case EXPR_SINGLE:
568 return equal_mem_array_ref_p (expr0->ops.single.rhs,
569 expr1->ops.single.rhs)
570 || operand_equal_p (expr0->ops.single.rhs,
571 expr1->ops.single.rhs, 0);
572 case EXPR_UNARY:
573 if (expr0->ops.unary.op != expr1->ops.unary.op)
574 return false;
576 if ((CONVERT_EXPR_CODE_P (expr0->ops.unary.op)
577 || expr0->ops.unary.op == NON_LVALUE_EXPR)
578 && TYPE_UNSIGNED (expr0->type) != TYPE_UNSIGNED (expr1->type))
579 return false;
581 return operand_equal_p (expr0->ops.unary.opnd,
582 expr1->ops.unary.opnd, 0);
584 case EXPR_BINARY:
585 if (expr0->ops.binary.op != expr1->ops.binary.op)
586 return false;
588 if (operand_equal_p (expr0->ops.binary.opnd0,
589 expr1->ops.binary.opnd0, 0)
590 && operand_equal_p (expr0->ops.binary.opnd1,
591 expr1->ops.binary.opnd1, 0))
592 return true;
594 /* For commutative ops, allow the other order. */
595 return (commutative_tree_code (expr0->ops.binary.op)
596 && operand_equal_p (expr0->ops.binary.opnd0,
597 expr1->ops.binary.opnd1, 0)
598 && operand_equal_p (expr0->ops.binary.opnd1,
599 expr1->ops.binary.opnd0, 0));
601 case EXPR_TERNARY:
602 if (expr0->ops.ternary.op != expr1->ops.ternary.op
603 || !operand_equal_p (expr0->ops.ternary.opnd2,
604 expr1->ops.ternary.opnd2, 0))
605 return false;
607 /* BIT_INSERT_EXPR has an implict operand as the type precision
608 of op1. Need to check to make sure they are the same. */
609 if (expr0->ops.ternary.op == BIT_INSERT_EXPR
610 && TREE_CODE (expr0->ops.ternary.opnd1) == INTEGER_CST
611 && TREE_CODE (expr1->ops.ternary.opnd1) == INTEGER_CST
612 && TYPE_PRECISION (TREE_TYPE (expr0->ops.ternary.opnd1))
613 != TYPE_PRECISION (TREE_TYPE (expr1->ops.ternary.opnd1)))
614 return false;
616 if (operand_equal_p (expr0->ops.ternary.opnd0,
617 expr1->ops.ternary.opnd0, 0)
618 && operand_equal_p (expr0->ops.ternary.opnd1,
619 expr1->ops.ternary.opnd1, 0))
620 return true;
622 /* For commutative ops, allow the other order. */
623 return (commutative_ternary_tree_code (expr0->ops.ternary.op)
624 && operand_equal_p (expr0->ops.ternary.opnd0,
625 expr1->ops.ternary.opnd1, 0)
626 && operand_equal_p (expr0->ops.ternary.opnd1,
627 expr1->ops.ternary.opnd0, 0));
629 case EXPR_CALL:
631 size_t i;
633 /* If the calls are to different functions, then they
634 clearly cannot be equal. */
635 if (!gimple_call_same_target_p (expr0->ops.call.fn_from,
636 expr1->ops.call.fn_from))
637 return false;
639 if (! expr0->ops.call.pure)
640 return false;
642 if (expr0->ops.call.nargs != expr1->ops.call.nargs)
643 return false;
645 for (i = 0; i < expr0->ops.call.nargs; i++)
646 if (! operand_equal_p (expr0->ops.call.args[i],
647 expr1->ops.call.args[i], 0))
648 return false;
650 if (stmt_could_throw_p (expr0->ops.call.fn_from))
652 int lp0 = lookup_stmt_eh_lp (expr0->ops.call.fn_from);
653 int lp1 = lookup_stmt_eh_lp (expr1->ops.call.fn_from);
654 if ((lp0 > 0 || lp1 > 0) && lp0 != lp1)
655 return false;
658 return true;
661 case EXPR_PHI:
663 size_t i;
665 if (expr0->ops.phi.nargs != expr1->ops.phi.nargs)
666 return false;
668 for (i = 0; i < expr0->ops.phi.nargs; i++)
669 if (! operand_equal_p (expr0->ops.phi.args[i],
670 expr1->ops.phi.args[i], 0))
671 return false;
673 return true;
676 default:
677 gcc_unreachable ();
681 /* Given a statement STMT, construct a hash table element. */
683 expr_hash_elt::expr_hash_elt (gimple *stmt, tree orig_lhs)
685 enum gimple_code code = gimple_code (stmt);
686 struct hashable_expr *expr = this->expr ();
688 if (code == GIMPLE_ASSIGN)
690 enum tree_code subcode = gimple_assign_rhs_code (stmt);
692 switch (get_gimple_rhs_class (subcode))
694 case GIMPLE_SINGLE_RHS:
695 expr->kind = EXPR_SINGLE;
696 expr->type = TREE_TYPE (gimple_assign_rhs1 (stmt));
697 expr->ops.single.rhs = gimple_assign_rhs1 (stmt);
698 break;
699 case GIMPLE_UNARY_RHS:
700 expr->kind = EXPR_UNARY;
701 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
702 if (CONVERT_EXPR_CODE_P (subcode))
703 subcode = NOP_EXPR;
704 expr->ops.unary.op = subcode;
705 expr->ops.unary.opnd = gimple_assign_rhs1 (stmt);
706 break;
707 case GIMPLE_BINARY_RHS:
708 expr->kind = EXPR_BINARY;
709 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
710 expr->ops.binary.op = subcode;
711 expr->ops.binary.opnd0 = gimple_assign_rhs1 (stmt);
712 expr->ops.binary.opnd1 = gimple_assign_rhs2 (stmt);
713 break;
714 case GIMPLE_TERNARY_RHS:
715 expr->kind = EXPR_TERNARY;
716 expr->type = TREE_TYPE (gimple_assign_lhs (stmt));
717 expr->ops.ternary.op = subcode;
718 expr->ops.ternary.opnd0 = gimple_assign_rhs1 (stmt);
719 expr->ops.ternary.opnd1 = gimple_assign_rhs2 (stmt);
720 expr->ops.ternary.opnd2 = gimple_assign_rhs3 (stmt);
721 break;
722 default:
723 gcc_unreachable ();
726 else if (code == GIMPLE_COND)
728 expr->type = boolean_type_node;
729 expr->kind = EXPR_BINARY;
730 expr->ops.binary.op = gimple_cond_code (stmt);
731 expr->ops.binary.opnd0 = gimple_cond_lhs (stmt);
732 expr->ops.binary.opnd1 = gimple_cond_rhs (stmt);
734 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
736 size_t nargs = gimple_call_num_args (call_stmt);
737 size_t i;
739 gcc_assert (gimple_call_lhs (call_stmt));
741 expr->type = TREE_TYPE (gimple_call_lhs (call_stmt));
742 expr->kind = EXPR_CALL;
743 expr->ops.call.fn_from = call_stmt;
745 if (gimple_call_flags (call_stmt) & (ECF_CONST | ECF_PURE))
746 expr->ops.call.pure = true;
747 else
748 expr->ops.call.pure = false;
750 expr->ops.call.nargs = nargs;
751 expr->ops.call.args = XCNEWVEC (tree, nargs);
752 for (i = 0; i < nargs; i++)
753 expr->ops.call.args[i] = gimple_call_arg (call_stmt, i);
755 else if (gswitch *swtch_stmt = dyn_cast <gswitch *> (stmt))
757 expr->type = TREE_TYPE (gimple_switch_index (swtch_stmt));
758 expr->kind = EXPR_SINGLE;
759 expr->ops.single.rhs = gimple_switch_index (swtch_stmt);
761 else if (code == GIMPLE_GOTO)
763 expr->type = TREE_TYPE (gimple_goto_dest (stmt));
764 expr->kind = EXPR_SINGLE;
765 expr->ops.single.rhs = gimple_goto_dest (stmt);
767 else if (code == GIMPLE_PHI)
769 size_t nargs = gimple_phi_num_args (stmt);
770 size_t i;
772 expr->type = TREE_TYPE (gimple_phi_result (stmt));
773 expr->kind = EXPR_PHI;
774 expr->ops.phi.nargs = nargs;
775 expr->ops.phi.args = XCNEWVEC (tree, nargs);
776 for (i = 0; i < nargs; i++)
777 expr->ops.phi.args[i] = gimple_phi_arg_def (stmt, i);
779 else
780 gcc_unreachable ();
782 m_lhs = orig_lhs;
783 m_vop = gimple_vuse (stmt);
784 m_hash = avail_expr_hash (this);
785 m_stamp = this;
788 /* Given a hashable_expr expression ORIG and an ORIG_LHS,
789 construct a hash table element. */
791 expr_hash_elt::expr_hash_elt (struct hashable_expr *orig, tree orig_lhs)
793 m_expr = *orig;
794 m_lhs = orig_lhs;
795 m_vop = NULL_TREE;
796 m_hash = avail_expr_hash (this);
797 m_stamp = this;
800 /* Copy constructor for a hash table element. */
802 expr_hash_elt::expr_hash_elt (class expr_hash_elt &old_elt)
804 m_expr = old_elt.m_expr;
805 m_lhs = old_elt.m_lhs;
806 m_vop = old_elt.m_vop;
807 m_hash = old_elt.m_hash;
808 m_stamp = this;
810 /* Now deep copy the malloc'd space for CALL and PHI args. */
811 if (old_elt.m_expr.kind == EXPR_CALL)
813 size_t nargs = old_elt.m_expr.ops.call.nargs;
814 size_t i;
816 m_expr.ops.call.args = XCNEWVEC (tree, nargs);
817 for (i = 0; i < nargs; i++)
818 m_expr.ops.call.args[i] = old_elt.m_expr.ops.call.args[i];
820 else if (old_elt.m_expr.kind == EXPR_PHI)
822 size_t nargs = old_elt.m_expr.ops.phi.nargs;
823 size_t i;
825 m_expr.ops.phi.args = XCNEWVEC (tree, nargs);
826 for (i = 0; i < nargs; i++)
827 m_expr.ops.phi.args[i] = old_elt.m_expr.ops.phi.args[i];
831 /* Calls and PHIs have a variable number of arguments that are allocated
832 on the heap. Thus we have to have a special dtor to release them. */
834 expr_hash_elt::~expr_hash_elt ()
836 if (m_expr.kind == EXPR_CALL)
837 free (m_expr.ops.call.args);
838 else if (m_expr.kind == EXPR_PHI)
839 free (m_expr.ops.phi.args);
842 /* Print a diagnostic dump of an expression hash table entry. */
844 void
845 expr_hash_elt::print (FILE *stream)
847 fprintf (stream, "STMT ");
849 if (m_lhs)
851 print_generic_expr (stream, m_lhs);
852 fprintf (stream, " = ");
855 switch (m_expr.kind)
857 case EXPR_SINGLE:
858 print_generic_expr (stream, m_expr.ops.single.rhs);
859 break;
861 case EXPR_UNARY:
862 fprintf (stream, "%s ", get_tree_code_name (m_expr.ops.unary.op));
863 print_generic_expr (stream, m_expr.ops.unary.opnd);
864 break;
866 case EXPR_BINARY:
867 print_generic_expr (stream, m_expr.ops.binary.opnd0);
868 fprintf (stream, " %s ", get_tree_code_name (m_expr.ops.binary.op));
869 print_generic_expr (stream, m_expr.ops.binary.opnd1);
870 break;
872 case EXPR_TERNARY:
873 fprintf (stream, " %s <", get_tree_code_name (m_expr.ops.ternary.op));
874 print_generic_expr (stream, m_expr.ops.ternary.opnd0);
875 fputs (", ", stream);
876 print_generic_expr (stream, m_expr.ops.ternary.opnd1);
877 fputs (", ", stream);
878 print_generic_expr (stream, m_expr.ops.ternary.opnd2);
879 fputs (">", stream);
880 break;
882 case EXPR_CALL:
884 size_t i;
885 size_t nargs = m_expr.ops.call.nargs;
886 gcall *fn_from;
888 fn_from = m_expr.ops.call.fn_from;
889 if (gimple_call_internal_p (fn_from))
890 fputs (internal_fn_name (gimple_call_internal_fn (fn_from)),
891 stream);
892 else
893 print_generic_expr (stream, gimple_call_fn (fn_from));
894 fprintf (stream, " (");
895 for (i = 0; i < nargs; i++)
897 print_generic_expr (stream, m_expr.ops.call.args[i]);
898 if (i + 1 < nargs)
899 fprintf (stream, ", ");
901 fprintf (stream, ")");
903 break;
905 case EXPR_PHI:
907 size_t i;
908 size_t nargs = m_expr.ops.phi.nargs;
910 fprintf (stream, "PHI <");
911 for (i = 0; i < nargs; i++)
913 print_generic_expr (stream, m_expr.ops.phi.args[i]);
914 if (i + 1 < nargs)
915 fprintf (stream, ", ");
917 fprintf (stream, ">");
919 break;
922 if (m_vop)
924 fprintf (stream, " with ");
925 print_generic_expr (stream, m_vop);
928 fprintf (stream, "\n");
931 /* Pop entries off the stack until we hit the NULL marker.
932 For each entry popped, use the SRC/DEST pair to restore
933 SRC to its prior value. */
935 void
936 const_and_copies::pop_to_marker (void)
938 while (m_stack.length () > 0)
940 tree prev_value, dest;
942 dest = m_stack.pop ();
944 /* A NULL value indicates we should stop unwinding, otherwise
945 pop off the next entry as they're recorded in pairs. */
946 if (dest == NULL)
947 break;
949 if (dump_file && (dump_flags & TDF_DETAILS))
951 fprintf (dump_file, "<<<< COPY ");
952 print_generic_expr (dump_file, dest);
953 fprintf (dump_file, " = ");
954 print_generic_expr (dump_file, SSA_NAME_VALUE (dest));
955 fprintf (dump_file, "\n");
958 prev_value = m_stack.pop ();
959 set_ssa_name_value (dest, prev_value);
963 /* Record that X has the value Y and that X's previous value is PREV_X.
965 This variant does not follow the value chain for Y. */
967 void
968 const_and_copies::record_const_or_copy_raw (tree x, tree y, tree prev_x)
970 if (dump_file && (dump_flags & TDF_DETAILS))
972 fprintf (dump_file, "0>>> COPY ");
973 print_generic_expr (dump_file, x);
974 fprintf (dump_file, " = ");
975 print_generic_expr (dump_file, y);
976 fprintf (dump_file, "\n");
979 set_ssa_name_value (x, y);
980 m_stack.reserve (2);
981 m_stack.quick_push (prev_x);
982 m_stack.quick_push (x);
985 /* Record that X has the value Y. */
987 void
988 const_and_copies::record_const_or_copy (tree x, tree y)
990 record_const_or_copy (x, y, SSA_NAME_VALUE (x));
993 /* Record that X has the value Y and that X's previous value is PREV_X.
995 This variant follow's Y value chain. */
997 void
998 const_and_copies::record_const_or_copy (tree x, tree y, tree prev_x)
1000 /* Y may be NULL if we are invalidating entries in the table. */
1001 if (y && TREE_CODE (y) == SSA_NAME)
1003 tree tmp = SSA_NAME_VALUE (y);
1004 y = tmp ? tmp : y;
1007 record_const_or_copy_raw (x, y, prev_x);
1010 bool
1011 expr_elt_hasher::equal (const value_type &p1, const compare_type &p2)
1013 const struct hashable_expr *expr1 = p1->expr ();
1014 const struct expr_hash_elt *stamp1 = p1->stamp ();
1015 const struct hashable_expr *expr2 = p2->expr ();
1016 const struct expr_hash_elt *stamp2 = p2->stamp ();
1018 /* This case should apply only when removing entries from the table. */
1019 if (stamp1 == stamp2)
1020 return true;
1022 if (p1->hash () != p2->hash ())
1023 return false;
1025 /* In case of a collision, both RHS have to be identical and have the
1026 same VUSE operands. */
1027 if (hashable_expr_equal_p (expr1, expr2)
1028 && types_compatible_p (expr1->type, expr2->type))
1029 return true;
1031 return false;
1034 /* Given a conditional expression COND as a tree, initialize
1035 a hashable_expr expression EXPR. The conditional must be a
1036 comparison or logical negation. A constant or a variable is
1037 not permitted. */
1039 void
1040 initialize_expr_from_cond (tree cond, struct hashable_expr *expr)
1042 expr->type = boolean_type_node;
1044 if (COMPARISON_CLASS_P (cond))
1046 expr->kind = EXPR_BINARY;
1047 expr->ops.binary.op = TREE_CODE (cond);
1048 expr->ops.binary.opnd0 = TREE_OPERAND (cond, 0);
1049 expr->ops.binary.opnd1 = TREE_OPERAND (cond, 1);
1051 else if (TREE_CODE (cond) == TRUTH_NOT_EXPR)
1053 expr->kind = EXPR_UNARY;
1054 expr->ops.unary.op = TRUTH_NOT_EXPR;
1055 expr->ops.unary.opnd = TREE_OPERAND (cond, 0);
1057 else
1058 gcc_unreachable ();
1061 /* Build a cond_equivalence record indicating that the comparison
1062 CODE holds between operands OP0 and OP1 and push it to **P. */
1064 static void
1065 build_and_record_new_cond (enum tree_code code,
1066 tree op0, tree op1,
1067 vec<cond_equivalence> *p,
1068 bool val = true)
1070 cond_equivalence c;
1071 struct hashable_expr *cond = &c.cond;
1073 gcc_assert (TREE_CODE_CLASS (code) == tcc_comparison);
1075 cond->type = boolean_type_node;
1076 cond->kind = EXPR_BINARY;
1077 cond->ops.binary.op = code;
1078 cond->ops.binary.opnd0 = op0;
1079 cond->ops.binary.opnd1 = op1;
1081 c.value = val ? boolean_true_node : boolean_false_node;
1082 p->safe_push (c);
1085 /* Record that COND is true and INVERTED is false into the edge information
1086 structure. Also record that any conditions dominated by COND are true
1087 as well.
1089 For example, if a < b is true, then a <= b must also be true. */
1091 void
1092 record_conditions (vec<cond_equivalence> *p, tree cond, tree inverted)
1094 tree op0, op1;
1095 cond_equivalence c;
1097 if (!COMPARISON_CLASS_P (cond))
1098 return;
1100 op0 = TREE_OPERAND (cond, 0);
1101 op1 = TREE_OPERAND (cond, 1);
1103 switch (TREE_CODE (cond))
1105 case LT_EXPR:
1106 case GT_EXPR:
1107 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1109 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1110 build_and_record_new_cond (LTGT_EXPR, op0, op1, p);
1113 build_and_record_new_cond ((TREE_CODE (cond) == LT_EXPR
1114 ? LE_EXPR : GE_EXPR),
1115 op0, op1, p);
1116 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1117 build_and_record_new_cond (EQ_EXPR, op0, op1, p, false);
1118 break;
1120 case GE_EXPR:
1121 case LE_EXPR:
1122 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1124 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1126 break;
1128 case EQ_EXPR:
1129 if (FLOAT_TYPE_P (TREE_TYPE (op0)))
1131 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1133 build_and_record_new_cond (LE_EXPR, op0, op1, p);
1134 build_and_record_new_cond (GE_EXPR, op0, op1, p);
1135 break;
1137 case UNORDERED_EXPR:
1138 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1139 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1140 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1141 build_and_record_new_cond (UNEQ_EXPR, op0, op1, p);
1142 build_and_record_new_cond (UNLT_EXPR, op0, op1, p);
1143 build_and_record_new_cond (UNGT_EXPR, op0, op1, p);
1144 break;
1146 case UNLT_EXPR:
1147 case UNGT_EXPR:
1148 build_and_record_new_cond ((TREE_CODE (cond) == UNLT_EXPR
1149 ? UNLE_EXPR : UNGE_EXPR),
1150 op0, op1, p);
1151 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1152 break;
1154 case UNEQ_EXPR:
1155 build_and_record_new_cond (UNLE_EXPR, op0, op1, p);
1156 build_and_record_new_cond (UNGE_EXPR, op0, op1, p);
1157 break;
1159 case LTGT_EXPR:
1160 build_and_record_new_cond (NE_EXPR, op0, op1, p);
1161 build_and_record_new_cond (ORDERED_EXPR, op0, op1, p);
1162 break;
1164 default:
1165 break;
1168 /* Now store the original true and false conditions into the first
1169 two slots. */
1170 initialize_expr_from_cond (cond, &c.cond);
1171 c.value = boolean_true_node;
1172 p->safe_push (c);
1174 /* It is possible for INVERTED to be the negation of a comparison,
1175 and not a valid RHS or GIMPLE_COND condition. This happens because
1176 invert_truthvalue may return such an expression when asked to invert
1177 a floating-point comparison. These comparisons are not assumed to
1178 obey the trichotomy law. */
1179 initialize_expr_from_cond (inverted, &c.cond);
1180 c.value = boolean_false_node;
1181 p->safe_push (c);