fixes for darwin. Remove remnant references to vec_s.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobed89a5ab0d998cd6750e2876407d86aeb90e85d0
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "gimple-pretty-print.h"
29 #include "tree-inline.h"
30 #include "tree-flow.h"
31 #include "gimple.h"
32 #include "dumpfile.h"
33 #include "hashtab.h"
34 #include "alloc-pool.h"
35 #include "flags.h"
36 #include "bitmap.h"
37 #include "cfgloop.h"
38 #include "params.h"
39 #include "tree-ssa-propagate.h"
40 #include "tree-ssa-sccvn.h"
41 #include "gimple-fold.h"
43 /* This algorithm is based on the SCC algorithm presented by Keith
44 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
45 (http://citeseer.ist.psu.edu/41805.html). In
46 straight line code, it is equivalent to a regular hash based value
47 numbering that is performed in reverse postorder.
49 For code with cycles, there are two alternatives, both of which
50 require keeping the hashtables separate from the actual list of
51 value numbers for SSA names.
53 1. Iterate value numbering in an RPO walk of the blocks, removing
54 all the entries from the hashtable after each iteration (but
55 keeping the SSA name->value number mapping between iterations).
56 Iterate until it does not change.
58 2. Perform value numbering as part of an SCC walk on the SSA graph,
59 iterating only the cycles in the SSA graph until they do not change
60 (using a separate, optimistic hashtable for value numbering the SCC
61 operands).
63 The second is not just faster in practice (because most SSA graph
64 cycles do not involve all the variables in the graph), it also has
65 some nice properties.
67 One of these nice properties is that when we pop an SCC off the
68 stack, we are guaranteed to have processed all the operands coming from
69 *outside of that SCC*, so we do not need to do anything special to
70 ensure they have value numbers.
72 Another nice property is that the SCC walk is done as part of a DFS
73 of the SSA graph, which makes it easy to perform combining and
74 simplifying operations at the same time.
76 The code below is deliberately written in a way that makes it easy
77 to separate the SCC walk from the other work it does.
79 In order to propagate constants through the code, we track which
80 expressions contain constants, and use those while folding. In
81 theory, we could also track expressions whose value numbers are
82 replaced, in case we end up folding based on expression
83 identities.
85 In order to value number memory, we assign value numbers to vuses.
86 This enables us to note that, for example, stores to the same
87 address of the same value from the same starting memory states are
88 equivalent.
89 TODO:
91 1. We can iterate only the changing portions of the SCC's, but
92 I have not seen an SCC big enough for this to be a win.
93 2. If you differentiate between phi nodes for loops and phi nodes
94 for if-then-else, you can properly consider phi nodes in different
95 blocks for equivalence.
96 3. We could value number vuses in more cases, particularly, whole
97 structure copies.
100 /* The set of hashtables and alloc_pool's for their items. */
102 typedef struct vn_tables_s
104 htab_t nary;
105 htab_t phis;
106 htab_t references;
107 struct obstack nary_obstack;
108 alloc_pool phis_pool;
109 alloc_pool references_pool;
110 } *vn_tables_t;
112 static htab_t constant_to_value_id;
113 static bitmap constant_value_ids;
116 /* Valid hashtables storing information we have proven to be
117 correct. */
119 static vn_tables_t valid_info;
121 /* Optimistic hashtables storing information we are making assumptions about
122 during iterations. */
124 static vn_tables_t optimistic_info;
126 /* Pointer to the set of hashtables that is currently being used.
127 Should always point to either the optimistic_info, or the
128 valid_info. */
130 static vn_tables_t current_info;
133 /* Reverse post order index for each basic block. */
135 static int *rpo_numbers;
137 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
139 /* This represents the top of the VN lattice, which is the universal
140 value. */
142 tree VN_TOP;
144 /* Unique counter for our value ids. */
146 static unsigned int next_value_id;
148 /* Next DFS number and the stack for strongly connected component
149 detection. */
151 static unsigned int next_dfs_num;
152 static vec<tree> sccstack;
156 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
157 are allocated on an obstack for locality reasons, and to free them
158 without looping over the vec. */
160 static vec<vn_ssa_aux_t> vn_ssa_aux_table;
161 static struct obstack vn_ssa_aux_obstack;
163 /* Return the value numbering information for a given SSA name. */
165 vn_ssa_aux_t
166 VN_INFO (tree name)
168 vn_ssa_aux_t res = vn_ssa_aux_table[SSA_NAME_VERSION (name)];
169 gcc_checking_assert (res);
170 return res;
173 /* Set the value numbering info for a given SSA name to a given
174 value. */
176 static inline void
177 VN_INFO_SET (tree name, vn_ssa_aux_t value)
179 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = value;
182 /* Initialize the value numbering info for a given SSA name.
183 This should be called just once for every SSA name. */
185 vn_ssa_aux_t
186 VN_INFO_GET (tree name)
188 vn_ssa_aux_t newinfo;
190 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
191 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
192 if (SSA_NAME_VERSION (name) >= vn_ssa_aux_table.length ())
193 vn_ssa_aux_table.safe_grow (SSA_NAME_VERSION (name) + 1);
194 vn_ssa_aux_table[SSA_NAME_VERSION (name)] = newinfo;
195 return newinfo;
199 /* Get the representative expression for the SSA_NAME NAME. Returns
200 the representative SSA_NAME if there is no expression associated with it. */
202 tree
203 vn_get_expr_for (tree name)
205 vn_ssa_aux_t vn = VN_INFO (name);
206 gimple def_stmt;
207 tree expr = NULL_TREE;
208 enum tree_code code;
210 if (vn->valnum == VN_TOP)
211 return name;
213 /* If the value-number is a constant it is the representative
214 expression. */
215 if (TREE_CODE (vn->valnum) != SSA_NAME)
216 return vn->valnum;
218 /* Get to the information of the value of this SSA_NAME. */
219 vn = VN_INFO (vn->valnum);
221 /* If the value-number is a constant it is the representative
222 expression. */
223 if (TREE_CODE (vn->valnum) != SSA_NAME)
224 return vn->valnum;
226 /* Else if we have an expression, return it. */
227 if (vn->expr != NULL_TREE)
228 return vn->expr;
230 /* Otherwise use the defining statement to build the expression. */
231 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
233 /* If the value number is not an assignment use it directly. */
234 if (!is_gimple_assign (def_stmt))
235 return vn->valnum;
237 /* FIXME tuples. This is incomplete and likely will miss some
238 simplifications. */
239 code = gimple_assign_rhs_code (def_stmt);
240 switch (TREE_CODE_CLASS (code))
242 case tcc_reference:
243 if ((code == REALPART_EXPR
244 || code == IMAGPART_EXPR
245 || code == VIEW_CONVERT_EXPR)
246 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
247 0)) == SSA_NAME)
248 expr = fold_build1 (code,
249 gimple_expr_type (def_stmt),
250 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
251 break;
253 case tcc_unary:
254 expr = fold_build1 (code,
255 gimple_expr_type (def_stmt),
256 gimple_assign_rhs1 (def_stmt));
257 break;
259 case tcc_binary:
260 expr = fold_build2 (code,
261 gimple_expr_type (def_stmt),
262 gimple_assign_rhs1 (def_stmt),
263 gimple_assign_rhs2 (def_stmt));
264 break;
266 case tcc_exceptional:
267 if (code == CONSTRUCTOR
268 && TREE_CODE
269 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
270 expr = gimple_assign_rhs1 (def_stmt);
271 break;
273 default:;
275 if (expr == NULL_TREE)
276 return vn->valnum;
278 /* Cache the expression. */
279 vn->expr = expr;
281 return expr;
284 /* Return the vn_kind the expression computed by the stmt should be
285 associated with. */
287 enum vn_kind
288 vn_get_stmt_kind (gimple stmt)
290 switch (gimple_code (stmt))
292 case GIMPLE_CALL:
293 return VN_REFERENCE;
294 case GIMPLE_PHI:
295 return VN_PHI;
296 case GIMPLE_ASSIGN:
298 enum tree_code code = gimple_assign_rhs_code (stmt);
299 tree rhs1 = gimple_assign_rhs1 (stmt);
300 switch (get_gimple_rhs_class (code))
302 case GIMPLE_UNARY_RHS:
303 case GIMPLE_BINARY_RHS:
304 case GIMPLE_TERNARY_RHS:
305 return VN_NARY;
306 case GIMPLE_SINGLE_RHS:
307 switch (TREE_CODE_CLASS (code))
309 case tcc_reference:
310 /* VOP-less references can go through unary case. */
311 if ((code == REALPART_EXPR
312 || code == IMAGPART_EXPR
313 || code == VIEW_CONVERT_EXPR
314 || code == BIT_FIELD_REF)
315 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
316 return VN_NARY;
318 /* Fallthrough. */
319 case tcc_declaration:
320 return VN_REFERENCE;
322 case tcc_constant:
323 return VN_CONSTANT;
325 default:
326 if (code == ADDR_EXPR)
327 return (is_gimple_min_invariant (rhs1)
328 ? VN_CONSTANT : VN_REFERENCE);
329 else if (code == CONSTRUCTOR)
330 return VN_NARY;
331 return VN_NONE;
333 default:
334 return VN_NONE;
337 default:
338 return VN_NONE;
342 /* Free a phi operation structure VP. */
344 static void
345 free_phi (void *vp)
347 vn_phi_t phi = (vn_phi_t) vp;
348 phi->phiargs.release ();
351 /* Free a reference operation structure VP. */
353 static void
354 free_reference (void *vp)
356 vn_reference_t vr = (vn_reference_t) vp;
357 vr->operands.release ();
360 /* Hash table equality function for vn_constant_t. */
362 static int
363 vn_constant_eq (const void *p1, const void *p2)
365 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
366 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
368 if (vc1->hashcode != vc2->hashcode)
369 return false;
371 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
374 /* Hash table hash function for vn_constant_t. */
376 static hashval_t
377 vn_constant_hash (const void *p1)
379 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
380 return vc1->hashcode;
383 /* Lookup a value id for CONSTANT and return it. If it does not
384 exist returns 0. */
386 unsigned int
387 get_constant_value_id (tree constant)
389 void **slot;
390 struct vn_constant_s vc;
392 vc.hashcode = vn_hash_constant_with_type (constant);
393 vc.constant = constant;
394 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
395 vc.hashcode, NO_INSERT);
396 if (slot)
397 return ((vn_constant_t)*slot)->value_id;
398 return 0;
401 /* Lookup a value id for CONSTANT, and if it does not exist, create a
402 new one and return it. If it does exist, return it. */
404 unsigned int
405 get_or_alloc_constant_value_id (tree constant)
407 void **slot;
408 struct vn_constant_s vc;
409 vn_constant_t vcp;
411 vc.hashcode = vn_hash_constant_with_type (constant);
412 vc.constant = constant;
413 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
414 vc.hashcode, INSERT);
415 if (*slot)
416 return ((vn_constant_t)*slot)->value_id;
418 vcp = XNEW (struct vn_constant_s);
419 vcp->hashcode = vc.hashcode;
420 vcp->constant = constant;
421 vcp->value_id = get_next_value_id ();
422 *slot = (void *) vcp;
423 bitmap_set_bit (constant_value_ids, vcp->value_id);
424 return vcp->value_id;
427 /* Return true if V is a value id for a constant. */
429 bool
430 value_id_constant_p (unsigned int v)
432 return bitmap_bit_p (constant_value_ids, v);
435 /* Compare two reference operands P1 and P2 for equality. Return true if
436 they are equal, and false otherwise. */
438 static int
439 vn_reference_op_eq (const void *p1, const void *p2)
441 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
442 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
444 return (vro1->opcode == vro2->opcode
445 /* We do not care for differences in type qualification. */
446 && (vro1->type == vro2->type
447 || (vro1->type && vro2->type
448 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
449 TYPE_MAIN_VARIANT (vro2->type))))
450 && expressions_equal_p (vro1->op0, vro2->op0)
451 && expressions_equal_p (vro1->op1, vro2->op1)
452 && expressions_equal_p (vro1->op2, vro2->op2));
455 /* Compute the hash for a reference operand VRO1. */
457 static hashval_t
458 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
460 result = iterative_hash_hashval_t (vro1->opcode, result);
461 if (vro1->op0)
462 result = iterative_hash_expr (vro1->op0, result);
463 if (vro1->op1)
464 result = iterative_hash_expr (vro1->op1, result);
465 if (vro1->op2)
466 result = iterative_hash_expr (vro1->op2, result);
467 return result;
470 /* Return the hashcode for a given reference operation P1. */
472 static hashval_t
473 vn_reference_hash (const void *p1)
475 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
476 return vr1->hashcode;
479 /* Compute a hash for the reference operation VR1 and return it. */
481 hashval_t
482 vn_reference_compute_hash (const vn_reference_t vr1)
484 hashval_t result = 0;
485 int i;
486 vn_reference_op_t vro;
487 HOST_WIDE_INT off = -1;
488 bool deref = false;
490 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
492 if (vro->opcode == MEM_REF)
493 deref = true;
494 else if (vro->opcode != ADDR_EXPR)
495 deref = false;
496 if (vro->off != -1)
498 if (off == -1)
499 off = 0;
500 off += vro->off;
502 else
504 if (off != -1
505 && off != 0)
506 result = iterative_hash_hashval_t (off, result);
507 off = -1;
508 if (deref
509 && vro->opcode == ADDR_EXPR)
511 if (vro->op0)
513 tree op = TREE_OPERAND (vro->op0, 0);
514 result = iterative_hash_hashval_t (TREE_CODE (op), result);
515 result = iterative_hash_expr (op, result);
518 else
519 result = vn_reference_op_compute_hash (vro, result);
522 if (vr1->vuse)
523 result += SSA_NAME_VERSION (vr1->vuse);
525 return result;
528 /* Return true if reference operations P1 and P2 are equivalent. This
529 means they have the same set of operands and vuses. */
532 vn_reference_eq (const void *p1, const void *p2)
534 unsigned i, j;
536 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
537 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
538 if (vr1->hashcode != vr2->hashcode)
539 return false;
541 /* Early out if this is not a hash collision. */
542 if (vr1->hashcode != vr2->hashcode)
543 return false;
545 /* The VOP needs to be the same. */
546 if (vr1->vuse != vr2->vuse)
547 return false;
549 /* If the operands are the same we are done. */
550 if (vr1->operands == vr2->operands)
551 return true;
553 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
554 return false;
556 if (INTEGRAL_TYPE_P (vr1->type)
557 && INTEGRAL_TYPE_P (vr2->type))
559 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
560 return false;
562 else if (INTEGRAL_TYPE_P (vr1->type)
563 && (TYPE_PRECISION (vr1->type)
564 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
565 return false;
566 else if (INTEGRAL_TYPE_P (vr2->type)
567 && (TYPE_PRECISION (vr2->type)
568 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
569 return false;
571 i = 0;
572 j = 0;
575 HOST_WIDE_INT off1 = 0, off2 = 0;
576 vn_reference_op_t vro1, vro2;
577 vn_reference_op_s tem1, tem2;
578 bool deref1 = false, deref2 = false;
579 for (; vr1->operands.iterate (i, &vro1); i++)
581 if (vro1->opcode == MEM_REF)
582 deref1 = true;
583 if (vro1->off == -1)
584 break;
585 off1 += vro1->off;
587 for (; vr2->operands.iterate (j, &vro2); j++)
589 if (vro2->opcode == MEM_REF)
590 deref2 = true;
591 if (vro2->off == -1)
592 break;
593 off2 += vro2->off;
595 if (off1 != off2)
596 return false;
597 if (deref1 && vro1->opcode == ADDR_EXPR)
599 memset (&tem1, 0, sizeof (tem1));
600 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
601 tem1.type = TREE_TYPE (tem1.op0);
602 tem1.opcode = TREE_CODE (tem1.op0);
603 vro1 = &tem1;
604 deref1 = false;
606 if (deref2 && vro2->opcode == ADDR_EXPR)
608 memset (&tem2, 0, sizeof (tem2));
609 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
610 tem2.type = TREE_TYPE (tem2.op0);
611 tem2.opcode = TREE_CODE (tem2.op0);
612 vro2 = &tem2;
613 deref2 = false;
615 if (deref1 != deref2)
616 return false;
617 if (!vn_reference_op_eq (vro1, vro2))
618 return false;
619 ++j;
620 ++i;
622 while (vr1->operands.length () != i
623 || vr2->operands.length () != j);
625 return true;
628 /* Copy the operations present in load/store REF into RESULT, a vector of
629 vn_reference_op_s's. */
631 void
632 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
634 if (TREE_CODE (ref) == TARGET_MEM_REF)
636 vn_reference_op_s temp;
638 memset (&temp, 0, sizeof (temp));
639 temp.type = TREE_TYPE (ref);
640 temp.opcode = TREE_CODE (ref);
641 temp.op0 = TMR_INDEX (ref);
642 temp.op1 = TMR_STEP (ref);
643 temp.op2 = TMR_OFFSET (ref);
644 temp.off = -1;
645 result->safe_push (temp);
647 memset (&temp, 0, sizeof (temp));
648 temp.type = NULL_TREE;
649 temp.opcode = ERROR_MARK;
650 temp.op0 = TMR_INDEX2 (ref);
651 temp.off = -1;
652 result->safe_push (temp);
654 memset (&temp, 0, sizeof (temp));
655 temp.type = NULL_TREE;
656 temp.opcode = TREE_CODE (TMR_BASE (ref));
657 temp.op0 = TMR_BASE (ref);
658 temp.off = -1;
659 result->safe_push (temp);
660 return;
663 /* For non-calls, store the information that makes up the address. */
665 while (ref)
667 vn_reference_op_s temp;
669 memset (&temp, 0, sizeof (temp));
670 temp.type = TREE_TYPE (ref);
671 temp.opcode = TREE_CODE (ref);
672 temp.off = -1;
674 switch (temp.opcode)
676 case MODIFY_EXPR:
677 temp.op0 = TREE_OPERAND (ref, 1);
678 break;
679 case WITH_SIZE_EXPR:
680 temp.op0 = TREE_OPERAND (ref, 1);
681 temp.off = 0;
682 break;
683 case MEM_REF:
684 /* The base address gets its own vn_reference_op_s structure. */
685 temp.op0 = TREE_OPERAND (ref, 1);
686 if (host_integerp (TREE_OPERAND (ref, 1), 0))
687 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
688 break;
689 case BIT_FIELD_REF:
690 /* Record bits and position. */
691 temp.op0 = TREE_OPERAND (ref, 1);
692 temp.op1 = TREE_OPERAND (ref, 2);
693 break;
694 case COMPONENT_REF:
695 /* The field decl is enough to unambiguously specify the field,
696 a matching type is not necessary and a mismatching type
697 is always a spurious difference. */
698 temp.type = NULL_TREE;
699 temp.op0 = TREE_OPERAND (ref, 1);
700 temp.op1 = TREE_OPERAND (ref, 2);
702 tree this_offset = component_ref_field_offset (ref);
703 if (this_offset
704 && TREE_CODE (this_offset) == INTEGER_CST)
706 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
707 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
709 double_int off
710 = tree_to_double_int (this_offset)
711 + tree_to_double_int (bit_offset)
712 .arshift (BITS_PER_UNIT == 8
713 ? 3 : exact_log2 (BITS_PER_UNIT),
714 HOST_BITS_PER_DOUBLE_INT);
715 if (off.fits_shwi ())
716 temp.off = off.low;
720 break;
721 case ARRAY_RANGE_REF:
722 case ARRAY_REF:
723 /* Record index as operand. */
724 temp.op0 = TREE_OPERAND (ref, 1);
725 /* Always record lower bounds and element size. */
726 temp.op1 = array_ref_low_bound (ref);
727 temp.op2 = array_ref_element_size (ref);
728 if (TREE_CODE (temp.op0) == INTEGER_CST
729 && TREE_CODE (temp.op1) == INTEGER_CST
730 && TREE_CODE (temp.op2) == INTEGER_CST)
732 double_int off = tree_to_double_int (temp.op0);
733 off += -tree_to_double_int (temp.op1);
734 off *= tree_to_double_int (temp.op2);
735 if (off.fits_shwi ())
736 temp.off = off.low;
738 break;
739 case VAR_DECL:
740 if (DECL_HARD_REGISTER (ref))
742 temp.op0 = ref;
743 break;
745 /* Fallthru. */
746 case PARM_DECL:
747 case CONST_DECL:
748 case RESULT_DECL:
749 /* Canonicalize decls to MEM[&decl] which is what we end up with
750 when valueizing MEM[ptr] with ptr = &decl. */
751 temp.opcode = MEM_REF;
752 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
753 temp.off = 0;
754 result->safe_push (temp);
755 temp.opcode = ADDR_EXPR;
756 temp.op0 = build_fold_addr_expr (ref);
757 temp.type = TREE_TYPE (temp.op0);
758 temp.off = -1;
759 break;
760 case STRING_CST:
761 case INTEGER_CST:
762 case COMPLEX_CST:
763 case VECTOR_CST:
764 case REAL_CST:
765 case FIXED_CST:
766 case CONSTRUCTOR:
767 case SSA_NAME:
768 temp.op0 = ref;
769 break;
770 case ADDR_EXPR:
771 if (is_gimple_min_invariant (ref))
773 temp.op0 = ref;
774 break;
776 /* Fallthrough. */
777 /* These are only interesting for their operands, their
778 existence, and their type. They will never be the last
779 ref in the chain of references (IE they require an
780 operand), so we don't have to put anything
781 for op* as it will be handled by the iteration */
782 case REALPART_EXPR:
783 case VIEW_CONVERT_EXPR:
784 temp.off = 0;
785 break;
786 case IMAGPART_EXPR:
787 /* This is only interesting for its constant offset. */
788 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
789 break;
790 default:
791 gcc_unreachable ();
793 result->safe_push (temp);
795 if (REFERENCE_CLASS_P (ref)
796 || TREE_CODE (ref) == MODIFY_EXPR
797 || TREE_CODE (ref) == WITH_SIZE_EXPR
798 || (TREE_CODE (ref) == ADDR_EXPR
799 && !is_gimple_min_invariant (ref)))
800 ref = TREE_OPERAND (ref, 0);
801 else
802 ref = NULL_TREE;
806 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
807 operands in *OPS, the reference alias set SET and the reference type TYPE.
808 Return true if something useful was produced. */
810 bool
811 ao_ref_init_from_vn_reference (ao_ref *ref,
812 alias_set_type set, tree type,
813 vec<vn_reference_op_s> ops)
815 vn_reference_op_t op;
816 unsigned i;
817 tree base = NULL_TREE;
818 tree *op0_p = &base;
819 HOST_WIDE_INT offset = 0;
820 HOST_WIDE_INT max_size;
821 HOST_WIDE_INT size = -1;
822 tree size_tree = NULL_TREE;
823 alias_set_type base_alias_set = -1;
825 /* First get the final access size from just the outermost expression. */
826 op = &ops[0];
827 if (op->opcode == COMPONENT_REF)
828 size_tree = DECL_SIZE (op->op0);
829 else if (op->opcode == BIT_FIELD_REF)
830 size_tree = op->op0;
831 else
833 enum machine_mode mode = TYPE_MODE (type);
834 if (mode == BLKmode)
835 size_tree = TYPE_SIZE (type);
836 else
837 size = GET_MODE_BITSIZE (mode);
839 if (size_tree != NULL_TREE)
841 if (!host_integerp (size_tree, 1))
842 size = -1;
843 else
844 size = TREE_INT_CST_LOW (size_tree);
847 /* Initially, maxsize is the same as the accessed element size.
848 In the following it will only grow (or become -1). */
849 max_size = size;
851 /* Compute cumulative bit-offset for nested component-refs and array-refs,
852 and find the ultimate containing object. */
853 FOR_EACH_VEC_ELT (ops, i, op)
855 switch (op->opcode)
857 /* These may be in the reference ops, but we cannot do anything
858 sensible with them here. */
859 case ADDR_EXPR:
860 /* Apart from ADDR_EXPR arguments to MEM_REF. */
861 if (base != NULL_TREE
862 && TREE_CODE (base) == MEM_REF
863 && op->op0
864 && DECL_P (TREE_OPERAND (op->op0, 0)))
866 vn_reference_op_t pop = &ops[i-1];
867 base = TREE_OPERAND (op->op0, 0);
868 if (pop->off == -1)
870 max_size = -1;
871 offset = 0;
873 else
874 offset += pop->off * BITS_PER_UNIT;
875 op0_p = NULL;
876 break;
878 /* Fallthru. */
879 case CALL_EXPR:
880 return false;
882 /* Record the base objects. */
883 case MEM_REF:
884 base_alias_set = get_deref_alias_set (op->op0);
885 *op0_p = build2 (MEM_REF, op->type,
886 NULL_TREE, op->op0);
887 op0_p = &TREE_OPERAND (*op0_p, 0);
888 break;
890 case VAR_DECL:
891 case PARM_DECL:
892 case RESULT_DECL:
893 case SSA_NAME:
894 *op0_p = op->op0;
895 op0_p = NULL;
896 break;
898 /* And now the usual component-reference style ops. */
899 case BIT_FIELD_REF:
900 offset += tree_low_cst (op->op1, 0);
901 break;
903 case COMPONENT_REF:
905 tree field = op->op0;
906 /* We do not have a complete COMPONENT_REF tree here so we
907 cannot use component_ref_field_offset. Do the interesting
908 parts manually. */
910 if (op->op1
911 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
912 max_size = -1;
913 else
915 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
916 * BITS_PER_UNIT);
917 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
919 break;
922 case ARRAY_RANGE_REF:
923 case ARRAY_REF:
924 /* We recorded the lower bound and the element size. */
925 if (!host_integerp (op->op0, 0)
926 || !host_integerp (op->op1, 0)
927 || !host_integerp (op->op2, 0))
928 max_size = -1;
929 else
931 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
932 hindex -= TREE_INT_CST_LOW (op->op1);
933 hindex *= TREE_INT_CST_LOW (op->op2);
934 hindex *= BITS_PER_UNIT;
935 offset += hindex;
937 break;
939 case REALPART_EXPR:
940 break;
942 case IMAGPART_EXPR:
943 offset += size;
944 break;
946 case VIEW_CONVERT_EXPR:
947 break;
949 case STRING_CST:
950 case INTEGER_CST:
951 case COMPLEX_CST:
952 case VECTOR_CST:
953 case REAL_CST:
954 case CONSTRUCTOR:
955 case CONST_DECL:
956 return false;
958 default:
959 return false;
963 if (base == NULL_TREE)
964 return false;
966 ref->ref = NULL_TREE;
967 ref->base = base;
968 ref->offset = offset;
969 ref->size = size;
970 ref->max_size = max_size;
971 ref->ref_alias_set = set;
972 if (base_alias_set != -1)
973 ref->base_alias_set = base_alias_set;
974 else
975 ref->base_alias_set = get_alias_set (base);
976 /* We discount volatiles from value-numbering elsewhere. */
977 ref->volatile_p = false;
979 return true;
982 /* Copy the operations present in load/store/call REF into RESULT, a vector of
983 vn_reference_op_s's. */
985 void
986 copy_reference_ops_from_call (gimple call,
987 vec<vn_reference_op_s> *result)
989 vn_reference_op_s temp;
990 unsigned i;
991 tree lhs = gimple_call_lhs (call);
993 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
994 different. By adding the lhs here in the vector, we ensure that the
995 hashcode is different, guaranteeing a different value number. */
996 if (lhs && TREE_CODE (lhs) != SSA_NAME)
998 memset (&temp, 0, sizeof (temp));
999 temp.opcode = MODIFY_EXPR;
1000 temp.type = TREE_TYPE (lhs);
1001 temp.op0 = lhs;
1002 temp.off = -1;
1003 result->safe_push (temp);
1006 /* Copy the type, opcode, function being called and static chain. */
1007 memset (&temp, 0, sizeof (temp));
1008 temp.type = gimple_call_return_type (call);
1009 temp.opcode = CALL_EXPR;
1010 temp.op0 = gimple_call_fn (call);
1011 temp.op1 = gimple_call_chain (call);
1012 temp.off = -1;
1013 result->safe_push (temp);
1015 /* Copy the call arguments. As they can be references as well,
1016 just chain them together. */
1017 for (i = 0; i < gimple_call_num_args (call); ++i)
1019 tree callarg = gimple_call_arg (call, i);
1020 copy_reference_ops_from_ref (callarg, result);
1024 /* Create a vector of vn_reference_op_s structures from REF, a
1025 REFERENCE_CLASS_P tree. The vector is not shared. */
1027 static vec<vn_reference_op_s>
1028 create_reference_ops_from_ref (tree ref)
1030 vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
1032 copy_reference_ops_from_ref (ref, &result);
1033 return result;
1036 /* Create a vector of vn_reference_op_s structures from CALL, a
1037 call statement. The vector is not shared. */
1039 static vec<vn_reference_op_s>
1040 create_reference_ops_from_call (gimple call)
1042 vec<vn_reference_op_s> result = vec<vn_reference_op_s>();
1044 copy_reference_ops_from_call (call, &result);
1045 return result;
1048 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1049 *I_P to point to the last element of the replacement. */
1050 void
1051 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1052 unsigned int *i_p)
1054 unsigned int i = *i_p;
1055 vn_reference_op_t op = &(*ops)[i];
1056 vn_reference_op_t mem_op = &(*ops)[i - 1];
1057 tree addr_base;
1058 HOST_WIDE_INT addr_offset = 0;
1060 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1061 from .foo.bar to the preceding MEM_REF offset and replace the
1062 address with &OBJ. */
1063 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
1064 &addr_offset);
1065 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1066 if (addr_base != op->op0)
1068 double_int off = tree_to_double_int (mem_op->op0);
1069 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1070 off += double_int::from_shwi (addr_offset);
1071 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1072 op->op0 = build_fold_addr_expr (addr_base);
1073 if (host_integerp (mem_op->op0, 0))
1074 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1075 else
1076 mem_op->off = -1;
1080 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1081 *I_P to point to the last element of the replacement. */
1082 static void
1083 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1084 unsigned int *i_p)
1086 unsigned int i = *i_p;
1087 vn_reference_op_t op = &(*ops)[i];
1088 vn_reference_op_t mem_op = &(*ops)[i - 1];
1089 gimple def_stmt;
1090 enum tree_code code;
1091 double_int off;
1093 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1094 if (!is_gimple_assign (def_stmt))
1095 return;
1097 code = gimple_assign_rhs_code (def_stmt);
1098 if (code != ADDR_EXPR
1099 && code != POINTER_PLUS_EXPR)
1100 return;
1102 off = tree_to_double_int (mem_op->op0);
1103 off = off.sext (TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1105 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1106 from .foo.bar to the preceding MEM_REF offset and replace the
1107 address with &OBJ. */
1108 if (code == ADDR_EXPR)
1110 tree addr, addr_base;
1111 HOST_WIDE_INT addr_offset;
1113 addr = gimple_assign_rhs1 (def_stmt);
1114 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1115 &addr_offset);
1116 if (!addr_base
1117 || TREE_CODE (addr_base) != MEM_REF)
1118 return;
1120 off += double_int::from_shwi (addr_offset);
1121 off += mem_ref_offset (addr_base);
1122 op->op0 = TREE_OPERAND (addr_base, 0);
1124 else
1126 tree ptr, ptroff;
1127 ptr = gimple_assign_rhs1 (def_stmt);
1128 ptroff = gimple_assign_rhs2 (def_stmt);
1129 if (TREE_CODE (ptr) != SSA_NAME
1130 || TREE_CODE (ptroff) != INTEGER_CST)
1131 return;
1133 off += tree_to_double_int (ptroff);
1134 op->op0 = ptr;
1137 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1138 if (host_integerp (mem_op->op0, 0))
1139 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1140 else
1141 mem_op->off = -1;
1142 if (TREE_CODE (op->op0) == SSA_NAME)
1143 op->op0 = SSA_VAL (op->op0);
1144 if (TREE_CODE (op->op0) != SSA_NAME)
1145 op->opcode = TREE_CODE (op->op0);
1147 /* And recurse. */
1148 if (TREE_CODE (op->op0) == SSA_NAME)
1149 vn_reference_maybe_forwprop_address (ops, i_p);
1150 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1151 vn_reference_fold_indirect (ops, i_p);
1154 /* Optimize the reference REF to a constant if possible or return
1155 NULL_TREE if not. */
1157 tree
1158 fully_constant_vn_reference_p (vn_reference_t ref)
1160 vec<vn_reference_op_s> operands = ref->operands;
1161 vn_reference_op_t op;
1163 /* Try to simplify the translated expression if it is
1164 a call to a builtin function with at most two arguments. */
1165 op = &operands[0];
1166 if (op->opcode == CALL_EXPR
1167 && TREE_CODE (op->op0) == ADDR_EXPR
1168 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1169 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1170 && operands.length () >= 2
1171 && operands.length () <= 3)
1173 vn_reference_op_t arg0, arg1 = NULL;
1174 bool anyconst = false;
1175 arg0 = &operands[1];
1176 if (operands.length () > 2)
1177 arg1 = &operands[2];
1178 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1179 || (arg0->opcode == ADDR_EXPR
1180 && is_gimple_min_invariant (arg0->op0)))
1181 anyconst = true;
1182 if (arg1
1183 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1184 || (arg1->opcode == ADDR_EXPR
1185 && is_gimple_min_invariant (arg1->op0))))
1186 anyconst = true;
1187 if (anyconst)
1189 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1190 arg1 ? 2 : 1,
1191 arg0->op0,
1192 arg1 ? arg1->op0 : NULL);
1193 if (folded
1194 && TREE_CODE (folded) == NOP_EXPR)
1195 folded = TREE_OPERAND (folded, 0);
1196 if (folded
1197 && is_gimple_min_invariant (folded))
1198 return folded;
1202 /* Simplify reads from constant strings. */
1203 else if (op->opcode == ARRAY_REF
1204 && TREE_CODE (op->op0) == INTEGER_CST
1205 && integer_zerop (op->op1)
1206 && operands.length () == 2)
1208 vn_reference_op_t arg0;
1209 arg0 = &operands[1];
1210 if (arg0->opcode == STRING_CST
1211 && (TYPE_MODE (op->type)
1212 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1213 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1214 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1215 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1216 return build_int_cst_type (op->type,
1217 (TREE_STRING_POINTER (arg0->op0)
1218 [TREE_INT_CST_LOW (op->op0)]));
1221 return NULL_TREE;
1224 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1225 structures into their value numbers. This is done in-place, and
1226 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1227 whether any operands were valueized. */
1229 static vec<vn_reference_op_s>
1230 valueize_refs_1 (vec<vn_reference_op_s> orig, bool *valueized_anything)
1232 vn_reference_op_t vro;
1233 unsigned int i;
1235 *valueized_anything = false;
1237 FOR_EACH_VEC_ELT (orig, i, vro)
1239 if (vro->opcode == SSA_NAME
1240 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1242 tree tem = SSA_VAL (vro->op0);
1243 if (tem != vro->op0)
1245 *valueized_anything = true;
1246 vro->op0 = tem;
1248 /* If it transforms from an SSA_NAME to a constant, update
1249 the opcode. */
1250 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1251 vro->opcode = TREE_CODE (vro->op0);
1253 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1255 tree tem = SSA_VAL (vro->op1);
1256 if (tem != vro->op1)
1258 *valueized_anything = true;
1259 vro->op1 = tem;
1262 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1264 tree tem = SSA_VAL (vro->op2);
1265 if (tem != vro->op2)
1267 *valueized_anything = true;
1268 vro->op2 = tem;
1271 /* If it transforms from an SSA_NAME to an address, fold with
1272 a preceding indirect reference. */
1273 if (i > 0
1274 && vro->op0
1275 && TREE_CODE (vro->op0) == ADDR_EXPR
1276 && orig[i - 1].opcode == MEM_REF)
1277 vn_reference_fold_indirect (&orig, &i);
1278 else if (i > 0
1279 && vro->opcode == SSA_NAME
1280 && orig[i - 1].opcode == MEM_REF)
1281 vn_reference_maybe_forwprop_address (&orig, &i);
1282 /* If it transforms a non-constant ARRAY_REF into a constant
1283 one, adjust the constant offset. */
1284 else if (vro->opcode == ARRAY_REF
1285 && vro->off == -1
1286 && TREE_CODE (vro->op0) == INTEGER_CST
1287 && TREE_CODE (vro->op1) == INTEGER_CST
1288 && TREE_CODE (vro->op2) == INTEGER_CST)
1290 double_int off = tree_to_double_int (vro->op0);
1291 off += -tree_to_double_int (vro->op1);
1292 off *= tree_to_double_int (vro->op2);
1293 if (off.fits_shwi ())
1294 vro->off = off.low;
1298 return orig;
1301 static vec<vn_reference_op_s>
1302 valueize_refs (vec<vn_reference_op_s> orig)
1304 bool tem;
1305 return valueize_refs_1 (orig, &tem);
1308 static vec<vn_reference_op_s> shared_lookup_references;
1310 /* Create a vector of vn_reference_op_s structures from REF, a
1311 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1312 this function. *VALUEIZED_ANYTHING will specify whether any
1313 operands were valueized. */
1315 static vec<vn_reference_op_s>
1316 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1318 if (!ref)
1319 return vec<vn_reference_op_s>();
1320 shared_lookup_references.truncate (0);
1321 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1322 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1323 valueized_anything);
1324 return shared_lookup_references;
1327 /* Create a vector of vn_reference_op_s structures from CALL, a
1328 call statement. The vector is shared among all callers of
1329 this function. */
1331 static vec<vn_reference_op_s>
1332 valueize_shared_reference_ops_from_call (gimple call)
1334 if (!call)
1335 return vec<vn_reference_op_s>();
1336 shared_lookup_references.truncate (0);
1337 copy_reference_ops_from_call (call, &shared_lookup_references);
1338 shared_lookup_references = valueize_refs (shared_lookup_references);
1339 return shared_lookup_references;
1342 /* Lookup a SCCVN reference operation VR in the current hash table.
1343 Returns the resulting value number if it exists in the hash table,
1344 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1345 vn_reference_t stored in the hashtable if something is found. */
1347 static tree
1348 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1350 void **slot;
1351 hashval_t hash;
1353 hash = vr->hashcode;
1354 slot = htab_find_slot_with_hash (current_info->references, vr,
1355 hash, NO_INSERT);
1356 if (!slot && current_info == optimistic_info)
1357 slot = htab_find_slot_with_hash (valid_info->references, vr,
1358 hash, NO_INSERT);
1359 if (slot)
1361 if (vnresult)
1362 *vnresult = (vn_reference_t)*slot;
1363 return ((vn_reference_t)*slot)->result;
1366 return NULL_TREE;
1369 static tree *last_vuse_ptr;
1370 static vn_lookup_kind vn_walk_kind;
1371 static vn_lookup_kind default_vn_walk_kind;
1373 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1374 with the current VUSE and performs the expression lookup. */
1376 static void *
1377 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse,
1378 unsigned int cnt, void *vr_)
1380 vn_reference_t vr = (vn_reference_t)vr_;
1381 void **slot;
1382 hashval_t hash;
1384 /* This bounds the stmt walks we perform on reference lookups
1385 to O(1) instead of O(N) where N is the number of dominating
1386 stores. */
1387 if (cnt > (unsigned) PARAM_VALUE (PARAM_SCCVN_MAX_ALIAS_QUERIES_PER_ACCESS))
1388 return (void *)-1;
1390 if (last_vuse_ptr)
1391 *last_vuse_ptr = vuse;
1393 /* Fixup vuse and hash. */
1394 if (vr->vuse)
1395 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1396 vr->vuse = SSA_VAL (vuse);
1397 if (vr->vuse)
1398 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1400 hash = vr->hashcode;
1401 slot = htab_find_slot_with_hash (current_info->references, vr,
1402 hash, NO_INSERT);
1403 if (!slot && current_info == optimistic_info)
1404 slot = htab_find_slot_with_hash (valid_info->references, vr,
1405 hash, NO_INSERT);
1406 if (slot)
1407 return *slot;
1409 return NULL;
1412 /* Lookup an existing or insert a new vn_reference entry into the
1413 value table for the VUSE, SET, TYPE, OPERANDS reference which
1414 has the value VALUE which is either a constant or an SSA name. */
1416 static vn_reference_t
1417 vn_reference_lookup_or_insert_for_pieces (tree vuse,
1418 alias_set_type set,
1419 tree type,
1420 vec<vn_reference_op_s,
1421 va_heap> operands,
1422 tree value)
1424 struct vn_reference_s vr1;
1425 vn_reference_t result;
1426 unsigned value_id;
1427 vr1.vuse = vuse;
1428 vr1.operands = operands;
1429 vr1.type = type;
1430 vr1.set = set;
1431 vr1.hashcode = vn_reference_compute_hash (&vr1);
1432 if (vn_reference_lookup_1 (&vr1, &result))
1433 return result;
1434 if (TREE_CODE (value) == SSA_NAME)
1435 value_id = VN_INFO (value)->value_id;
1436 else
1437 value_id = get_or_alloc_constant_value_id (value);
1438 return vn_reference_insert_pieces (vuse, set, type,
1439 operands.copy (), value, value_id);
1442 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1443 from the statement defining VUSE and if not successful tries to
1444 translate *REFP and VR_ through an aggregate copy at the definition
1445 of VUSE. */
1447 static void *
1448 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1450 vn_reference_t vr = (vn_reference_t)vr_;
1451 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1452 tree base;
1453 HOST_WIDE_INT offset, maxsize;
1454 static vec<vn_reference_op_s>
1455 lhs_ops = vec<vn_reference_op_s>();
1456 ao_ref lhs_ref;
1457 bool lhs_ref_ok = false;
1459 /* First try to disambiguate after value-replacing in the definitions LHS. */
1460 if (is_gimple_assign (def_stmt))
1462 vec<vn_reference_op_s> tem;
1463 tree lhs = gimple_assign_lhs (def_stmt);
1464 bool valueized_anything = false;
1465 /* Avoid re-allocation overhead. */
1466 lhs_ops.truncate (0);
1467 copy_reference_ops_from_ref (lhs, &lhs_ops);
1468 tem = lhs_ops;
1469 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1470 gcc_assert (lhs_ops == tem);
1471 if (valueized_anything)
1473 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1474 get_alias_set (lhs),
1475 TREE_TYPE (lhs), lhs_ops);
1476 if (lhs_ref_ok
1477 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1478 return NULL;
1480 else
1482 ao_ref_init (&lhs_ref, lhs);
1483 lhs_ref_ok = true;
1487 base = ao_ref_base (ref);
1488 offset = ref->offset;
1489 maxsize = ref->max_size;
1491 /* If we cannot constrain the size of the reference we cannot
1492 test if anything kills it. */
1493 if (maxsize == -1)
1494 return (void *)-1;
1496 /* We can't deduce anything useful from clobbers. */
1497 if (gimple_clobber_p (def_stmt))
1498 return (void *)-1;
1500 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1501 from that definition.
1502 1) Memset. */
1503 if (is_gimple_reg_type (vr->type)
1504 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1505 && integer_zerop (gimple_call_arg (def_stmt, 1))
1506 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1507 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1509 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1510 tree base2;
1511 HOST_WIDE_INT offset2, size2, maxsize2;
1512 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1513 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1514 if ((unsigned HOST_WIDE_INT)size2 / 8
1515 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1516 && maxsize2 != -1
1517 && operand_equal_p (base, base2, 0)
1518 && offset2 <= offset
1519 && offset2 + size2 >= offset + maxsize)
1521 tree val = build_zero_cst (vr->type);
1522 return vn_reference_lookup_or_insert_for_pieces
1523 (vuse, vr->set, vr->type, vr->operands, val);
1527 /* 2) Assignment from an empty CONSTRUCTOR. */
1528 else if (is_gimple_reg_type (vr->type)
1529 && gimple_assign_single_p (def_stmt)
1530 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1531 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1533 tree base2;
1534 HOST_WIDE_INT offset2, size2, maxsize2;
1535 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1536 &offset2, &size2, &maxsize2);
1537 if (maxsize2 != -1
1538 && operand_equal_p (base, base2, 0)
1539 && offset2 <= offset
1540 && offset2 + size2 >= offset + maxsize)
1542 tree val = build_zero_cst (vr->type);
1543 return vn_reference_lookup_or_insert_for_pieces
1544 (vuse, vr->set, vr->type, vr->operands, val);
1548 /* 3) Assignment from a constant. We can use folds native encode/interpret
1549 routines to extract the assigned bits. */
1550 else if (vn_walk_kind == VN_WALKREWRITE
1551 && CHAR_BIT == 8 && BITS_PER_UNIT == 8
1552 && ref->size == maxsize
1553 && maxsize % BITS_PER_UNIT == 0
1554 && offset % BITS_PER_UNIT == 0
1555 && is_gimple_reg_type (vr->type)
1556 && gimple_assign_single_p (def_stmt)
1557 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1559 tree base2;
1560 HOST_WIDE_INT offset2, size2, maxsize2;
1561 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1562 &offset2, &size2, &maxsize2);
1563 if (maxsize2 != -1
1564 && maxsize2 == size2
1565 && size2 % BITS_PER_UNIT == 0
1566 && offset2 % BITS_PER_UNIT == 0
1567 && operand_equal_p (base, base2, 0)
1568 && offset2 <= offset
1569 && offset2 + size2 >= offset + maxsize)
1571 /* We support up to 512-bit values (for V8DFmode). */
1572 unsigned char buffer[64];
1573 int len;
1575 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1576 buffer, sizeof (buffer));
1577 if (len > 0)
1579 tree val = native_interpret_expr (vr->type,
1580 buffer
1581 + ((offset - offset2)
1582 / BITS_PER_UNIT),
1583 ref->size / BITS_PER_UNIT);
1584 if (val)
1585 return vn_reference_lookup_or_insert_for_pieces
1586 (vuse, vr->set, vr->type, vr->operands, val);
1591 /* 4) Assignment from an SSA name which definition we may be able
1592 to access pieces from. */
1593 else if (ref->size == maxsize
1594 && is_gimple_reg_type (vr->type)
1595 && gimple_assign_single_p (def_stmt)
1596 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1598 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1599 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1600 if (is_gimple_assign (def_stmt2)
1601 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1602 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1603 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1605 tree base2;
1606 HOST_WIDE_INT offset2, size2, maxsize2, off;
1607 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1608 &offset2, &size2, &maxsize2);
1609 off = offset - offset2;
1610 if (maxsize2 != -1
1611 && maxsize2 == size2
1612 && operand_equal_p (base, base2, 0)
1613 && offset2 <= offset
1614 && offset2 + size2 >= offset + maxsize)
1616 tree val = NULL_TREE;
1617 HOST_WIDE_INT elsz
1618 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1619 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1621 if (off == 0)
1622 val = gimple_assign_rhs1 (def_stmt2);
1623 else if (off == elsz)
1624 val = gimple_assign_rhs2 (def_stmt2);
1626 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1627 && off % elsz == 0)
1629 tree ctor = gimple_assign_rhs1 (def_stmt2);
1630 unsigned i = off / elsz;
1631 if (i < CONSTRUCTOR_NELTS (ctor))
1633 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1634 if (TREE_CODE (TREE_TYPE (rhs1)) == VECTOR_TYPE)
1636 if (TREE_CODE (TREE_TYPE (elt->value))
1637 != VECTOR_TYPE)
1638 val = elt->value;
1642 if (val)
1643 return vn_reference_lookup_or_insert_for_pieces
1644 (vuse, vr->set, vr->type, vr->operands, val);
1649 /* 5) For aggregate copies translate the reference through them if
1650 the copy kills ref. */
1651 else if (vn_walk_kind == VN_WALKREWRITE
1652 && gimple_assign_single_p (def_stmt)
1653 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1654 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1655 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1657 tree base2;
1658 HOST_WIDE_INT offset2, size2, maxsize2;
1659 int i, j;
1660 vec<vn_reference_op_s>
1661 rhs = vec<vn_reference_op_s>();
1662 vn_reference_op_t vro;
1663 ao_ref r;
1665 if (!lhs_ref_ok)
1666 return (void *)-1;
1668 /* See if the assignment kills REF. */
1669 base2 = ao_ref_base (&lhs_ref);
1670 offset2 = lhs_ref.offset;
1671 size2 = lhs_ref.size;
1672 maxsize2 = lhs_ref.max_size;
1673 if (maxsize2 == -1
1674 || (base != base2 && !operand_equal_p (base, base2, 0))
1675 || offset2 > offset
1676 || offset2 + size2 < offset + maxsize)
1677 return (void *)-1;
1679 /* Find the common base of ref and the lhs. lhs_ops already
1680 contains valueized operands for the lhs. */
1681 i = vr->operands.length () - 1;
1682 j = lhs_ops.length () - 1;
1683 while (j >= 0 && i >= 0
1684 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
1686 i--;
1687 j--;
1690 /* ??? The innermost op should always be a MEM_REF and we already
1691 checked that the assignment to the lhs kills vr. Thus for
1692 aggregate copies using char[] types the vn_reference_op_eq
1693 may fail when comparing types for compatibility. But we really
1694 don't care here - further lookups with the rewritten operands
1695 will simply fail if we messed up types too badly. */
1696 if (j == 0 && i >= 0
1697 && lhs_ops[0].opcode == MEM_REF
1698 && lhs_ops[0].off != -1
1699 && (lhs_ops[0].off == vr->operands[i].off))
1700 i--, j--;
1702 /* i now points to the first additional op.
1703 ??? LHS may not be completely contained in VR, one or more
1704 VIEW_CONVERT_EXPRs could be in its way. We could at least
1705 try handling outermost VIEW_CONVERT_EXPRs. */
1706 if (j != -1)
1707 return (void *)-1;
1709 /* Now re-write REF to be based on the rhs of the assignment. */
1710 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1711 /* We need to pre-pend vr->operands[0..i] to rhs. */
1712 if (i + 1 + rhs.length () > vr->operands.length ())
1714 vec<vn_reference_op_s> old = vr->operands;
1715 vr->operands.safe_grow (i + 1 + rhs.length ());
1716 if (old == shared_lookup_references
1717 && vr->operands != old)
1718 shared_lookup_references = vec<vn_reference_op_s>();
1720 else
1721 vr->operands.truncate (i + 1 + rhs.length ());
1722 FOR_EACH_VEC_ELT (rhs, j, vro)
1723 vr->operands[i + 1 + j] = *vro;
1724 rhs.release ();
1725 vr->operands = valueize_refs (vr->operands);
1726 vr->hashcode = vn_reference_compute_hash (vr);
1728 /* Adjust *ref from the new operands. */
1729 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1730 return (void *)-1;
1731 /* This can happen with bitfields. */
1732 if (ref->size != r.size)
1733 return (void *)-1;
1734 *ref = r;
1736 /* Do not update last seen VUSE after translating. */
1737 last_vuse_ptr = NULL;
1739 /* Keep looking for the adjusted *REF / VR pair. */
1740 return NULL;
1743 /* 6) For memcpy copies translate the reference through them if
1744 the copy kills ref. */
1745 else if (vn_walk_kind == VN_WALKREWRITE
1746 && is_gimple_reg_type (vr->type)
1747 /* ??? Handle BCOPY as well. */
1748 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1749 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1750 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1751 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1752 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1753 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1754 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1755 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1757 tree lhs, rhs;
1758 ao_ref r;
1759 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1760 vn_reference_op_s op;
1761 HOST_WIDE_INT at;
1764 /* Only handle non-variable, addressable refs. */
1765 if (ref->size != maxsize
1766 || offset % BITS_PER_UNIT != 0
1767 || ref->size % BITS_PER_UNIT != 0)
1768 return (void *)-1;
1770 /* Extract a pointer base and an offset for the destination. */
1771 lhs = gimple_call_arg (def_stmt, 0);
1772 lhs_offset = 0;
1773 if (TREE_CODE (lhs) == SSA_NAME)
1774 lhs = SSA_VAL (lhs);
1775 if (TREE_CODE (lhs) == ADDR_EXPR)
1777 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1778 &lhs_offset);
1779 if (!tem)
1780 return (void *)-1;
1781 if (TREE_CODE (tem) == MEM_REF
1782 && host_integerp (TREE_OPERAND (tem, 1), 1))
1784 lhs = TREE_OPERAND (tem, 0);
1785 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1787 else if (DECL_P (tem))
1788 lhs = build_fold_addr_expr (tem);
1789 else
1790 return (void *)-1;
1792 if (TREE_CODE (lhs) != SSA_NAME
1793 && TREE_CODE (lhs) != ADDR_EXPR)
1794 return (void *)-1;
1796 /* Extract a pointer base and an offset for the source. */
1797 rhs = gimple_call_arg (def_stmt, 1);
1798 rhs_offset = 0;
1799 if (TREE_CODE (rhs) == SSA_NAME)
1800 rhs = SSA_VAL (rhs);
1801 if (TREE_CODE (rhs) == ADDR_EXPR)
1803 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1804 &rhs_offset);
1805 if (!tem)
1806 return (void *)-1;
1807 if (TREE_CODE (tem) == MEM_REF
1808 && host_integerp (TREE_OPERAND (tem, 1), 1))
1810 rhs = TREE_OPERAND (tem, 0);
1811 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1813 else if (DECL_P (tem))
1814 rhs = build_fold_addr_expr (tem);
1815 else
1816 return (void *)-1;
1818 if (TREE_CODE (rhs) != SSA_NAME
1819 && TREE_CODE (rhs) != ADDR_EXPR)
1820 return (void *)-1;
1822 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1824 /* The bases of the destination and the references have to agree. */
1825 if ((TREE_CODE (base) != MEM_REF
1826 && !DECL_P (base))
1827 || (TREE_CODE (base) == MEM_REF
1828 && (TREE_OPERAND (base, 0) != lhs
1829 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1830 || (DECL_P (base)
1831 && (TREE_CODE (lhs) != ADDR_EXPR
1832 || TREE_OPERAND (lhs, 0) != base)))
1833 return (void *)-1;
1835 /* And the access has to be contained within the memcpy destination. */
1836 at = offset / BITS_PER_UNIT;
1837 if (TREE_CODE (base) == MEM_REF)
1838 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1839 if (lhs_offset > at
1840 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1841 return (void *)-1;
1843 /* Make room for 2 operands in the new reference. */
1844 if (vr->operands.length () < 2)
1846 vec<vn_reference_op_s> old = vr->operands;
1847 vr->operands.safe_grow_cleared (2);
1848 if (old == shared_lookup_references
1849 && vr->operands != old)
1850 shared_lookup_references.create (0);
1852 else
1853 vr->operands.truncate (2);
1855 /* The looked-through reference is a simple MEM_REF. */
1856 memset (&op, 0, sizeof (op));
1857 op.type = vr->type;
1858 op.opcode = MEM_REF;
1859 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1860 op.off = at - lhs_offset + rhs_offset;
1861 vr->operands[0] = op;
1862 op.type = TREE_TYPE (rhs);
1863 op.opcode = TREE_CODE (rhs);
1864 op.op0 = rhs;
1865 op.off = -1;
1866 vr->operands[1] = op;
1867 vr->hashcode = vn_reference_compute_hash (vr);
1869 /* Adjust *ref from the new operands. */
1870 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1871 return (void *)-1;
1872 /* This can happen with bitfields. */
1873 if (ref->size != r.size)
1874 return (void *)-1;
1875 *ref = r;
1877 /* Do not update last seen VUSE after translating. */
1878 last_vuse_ptr = NULL;
1880 /* Keep looking for the adjusted *REF / VR pair. */
1881 return NULL;
1884 /* Bail out and stop walking. */
1885 return (void *)-1;
1888 /* Lookup a reference operation by it's parts, in the current hash table.
1889 Returns the resulting value number if it exists in the hash table,
1890 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1891 vn_reference_t stored in the hashtable if something is found. */
1893 tree
1894 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1895 vec<vn_reference_op_s> operands,
1896 vn_reference_t *vnresult, vn_lookup_kind kind)
1898 struct vn_reference_s vr1;
1899 vn_reference_t tmp;
1900 tree cst;
1902 if (!vnresult)
1903 vnresult = &tmp;
1904 *vnresult = NULL;
1906 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1907 shared_lookup_references.truncate (0);
1908 shared_lookup_references.safe_grow (operands.length ());
1909 memcpy (shared_lookup_references.address (),
1910 operands.address (),
1911 sizeof (vn_reference_op_s)
1912 * operands.length ());
1913 vr1.operands = operands = shared_lookup_references
1914 = valueize_refs (shared_lookup_references);
1915 vr1.type = type;
1916 vr1.set = set;
1917 vr1.hashcode = vn_reference_compute_hash (&vr1);
1918 if ((cst = fully_constant_vn_reference_p (&vr1)))
1919 return cst;
1921 vn_reference_lookup_1 (&vr1, vnresult);
1922 if (!*vnresult
1923 && kind != VN_NOWALK
1924 && vr1.vuse)
1926 ao_ref r;
1927 vn_walk_kind = kind;
1928 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1929 *vnresult =
1930 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1931 vn_reference_lookup_2,
1932 vn_reference_lookup_3, &vr1);
1933 if (vr1.operands != operands)
1934 vr1.operands.release ();
1937 if (*vnresult)
1938 return (*vnresult)->result;
1940 return NULL_TREE;
1943 /* Lookup OP in the current hash table, and return the resulting value
1944 number if it exists in the hash table. Return NULL_TREE if it does
1945 not exist in the hash table or if the result field of the structure
1946 was NULL.. VNRESULT will be filled in with the vn_reference_t
1947 stored in the hashtable if one exists. */
1949 tree
1950 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1951 vn_reference_t *vnresult)
1953 vec<vn_reference_op_s> operands;
1954 struct vn_reference_s vr1;
1955 tree cst;
1956 bool valuezied_anything;
1958 if (vnresult)
1959 *vnresult = NULL;
1961 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1962 vr1.operands = operands
1963 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1964 vr1.type = TREE_TYPE (op);
1965 vr1.set = get_alias_set (op);
1966 vr1.hashcode = vn_reference_compute_hash (&vr1);
1967 if ((cst = fully_constant_vn_reference_p (&vr1)))
1968 return cst;
1970 if (kind != VN_NOWALK
1971 && vr1.vuse)
1973 vn_reference_t wvnresult;
1974 ao_ref r;
1975 /* Make sure to use a valueized reference if we valueized anything.
1976 Otherwise preserve the full reference for advanced TBAA. */
1977 if (!valuezied_anything
1978 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1979 vr1.operands))
1980 ao_ref_init (&r, op);
1981 vn_walk_kind = kind;
1982 wvnresult =
1983 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1984 vn_reference_lookup_2,
1985 vn_reference_lookup_3, &vr1);
1986 if (vr1.operands != operands)
1987 vr1.operands.release ();
1988 if (wvnresult)
1990 if (vnresult)
1991 *vnresult = wvnresult;
1992 return wvnresult->result;
1995 return NULL_TREE;
1998 return vn_reference_lookup_1 (&vr1, vnresult);
2002 /* Insert OP into the current hash table with a value number of
2003 RESULT, and return the resulting reference structure we created. */
2005 vn_reference_t
2006 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
2008 void **slot;
2009 vn_reference_t vr1;
2011 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2012 if (TREE_CODE (result) == SSA_NAME)
2013 vr1->value_id = VN_INFO (result)->value_id;
2014 else
2015 vr1->value_id = get_or_alloc_constant_value_id (result);
2016 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2017 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
2018 vr1->type = TREE_TYPE (op);
2019 vr1->set = get_alias_set (op);
2020 vr1->hashcode = vn_reference_compute_hash (vr1);
2021 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
2022 vr1->result_vdef = vdef;
2024 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2025 INSERT);
2027 /* Because we lookup stores using vuses, and value number failures
2028 using the vdefs (see visit_reference_op_store for how and why),
2029 it's possible that on failure we may try to insert an already
2030 inserted store. This is not wrong, there is no ssa name for a
2031 store that we could use as a differentiator anyway. Thus, unlike
2032 the other lookup functions, you cannot gcc_assert (!*slot)
2033 here. */
2035 /* But free the old slot in case of a collision. */
2036 if (*slot)
2037 free_reference (*slot);
2039 *slot = vr1;
2040 return vr1;
2043 /* Insert a reference by it's pieces into the current hash table with
2044 a value number of RESULT. Return the resulting reference
2045 structure we created. */
2047 vn_reference_t
2048 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
2049 vec<vn_reference_op_s> operands,
2050 tree result, unsigned int value_id)
2053 void **slot;
2054 vn_reference_t vr1;
2056 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
2057 vr1->value_id = value_id;
2058 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2059 vr1->operands = valueize_refs (operands);
2060 vr1->type = type;
2061 vr1->set = set;
2062 vr1->hashcode = vn_reference_compute_hash (vr1);
2063 if (result && TREE_CODE (result) == SSA_NAME)
2064 result = SSA_VAL (result);
2065 vr1->result = result;
2067 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
2068 INSERT);
2070 /* At this point we should have all the things inserted that we have
2071 seen before, and we should never try inserting something that
2072 already exists. */
2073 gcc_assert (!*slot);
2074 if (*slot)
2075 free_reference (*slot);
2077 *slot = vr1;
2078 return vr1;
2081 /* Compute and return the hash value for nary operation VBO1. */
2083 hashval_t
2084 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
2086 hashval_t hash;
2087 unsigned i;
2089 for (i = 0; i < vno1->length; ++i)
2090 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
2091 vno1->op[i] = SSA_VAL (vno1->op[i]);
2093 if (vno1->length == 2
2094 && commutative_tree_code (vno1->opcode)
2095 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2097 tree temp = vno1->op[0];
2098 vno1->op[0] = vno1->op[1];
2099 vno1->op[1] = temp;
2102 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2103 for (i = 0; i < vno1->length; ++i)
2104 hash = iterative_hash_expr (vno1->op[i], hash);
2106 return hash;
2109 /* Return the computed hashcode for nary operation P1. */
2111 static hashval_t
2112 vn_nary_op_hash (const void *p1)
2114 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2115 return vno1->hashcode;
2118 /* Compare nary operations P1 and P2 and return true if they are
2119 equivalent. */
2122 vn_nary_op_eq (const void *p1, const void *p2)
2124 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2125 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2126 unsigned i;
2128 if (vno1->hashcode != vno2->hashcode)
2129 return false;
2131 if (vno1->length != vno2->length)
2132 return false;
2134 if (vno1->opcode != vno2->opcode
2135 || !types_compatible_p (vno1->type, vno2->type))
2136 return false;
2138 for (i = 0; i < vno1->length; ++i)
2139 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2140 return false;
2142 return true;
2145 /* Initialize VNO from the pieces provided. */
2147 static void
2148 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2149 enum tree_code code, tree type, tree *ops)
2151 vno->opcode = code;
2152 vno->length = length;
2153 vno->type = type;
2154 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2157 /* Initialize VNO from OP. */
2159 static void
2160 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2162 unsigned i;
2164 vno->opcode = TREE_CODE (op);
2165 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2166 vno->type = TREE_TYPE (op);
2167 for (i = 0; i < vno->length; ++i)
2168 vno->op[i] = TREE_OPERAND (op, i);
2171 /* Return the number of operands for a vn_nary ops structure from STMT. */
2173 static unsigned int
2174 vn_nary_length_from_stmt (gimple stmt)
2176 switch (gimple_assign_rhs_code (stmt))
2178 case REALPART_EXPR:
2179 case IMAGPART_EXPR:
2180 case VIEW_CONVERT_EXPR:
2181 return 1;
2183 case BIT_FIELD_REF:
2184 return 3;
2186 case CONSTRUCTOR:
2187 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2189 default:
2190 return gimple_num_ops (stmt) - 1;
2194 /* Initialize VNO from STMT. */
2196 static void
2197 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2199 unsigned i;
2201 vno->opcode = gimple_assign_rhs_code (stmt);
2202 vno->type = gimple_expr_type (stmt);
2203 switch (vno->opcode)
2205 case REALPART_EXPR:
2206 case IMAGPART_EXPR:
2207 case VIEW_CONVERT_EXPR:
2208 vno->length = 1;
2209 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2210 break;
2212 case BIT_FIELD_REF:
2213 vno->length = 3;
2214 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2215 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
2216 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
2217 break;
2219 case CONSTRUCTOR:
2220 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2221 for (i = 0; i < vno->length; ++i)
2222 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2223 break;
2225 default:
2226 gcc_checking_assert (!gimple_assign_single_p (stmt));
2227 vno->length = gimple_num_ops (stmt) - 1;
2228 for (i = 0; i < vno->length; ++i)
2229 vno->op[i] = gimple_op (stmt, i + 1);
2233 /* Compute the hashcode for VNO and look for it in the hash table;
2234 return the resulting value number if it exists in the hash table.
2235 Return NULL_TREE if it does not exist in the hash table or if the
2236 result field of the operation is NULL. VNRESULT will contain the
2237 vn_nary_op_t from the hashtable if it exists. */
2239 static tree
2240 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2242 void **slot;
2244 if (vnresult)
2245 *vnresult = NULL;
2247 vno->hashcode = vn_nary_op_compute_hash (vno);
2248 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2249 NO_INSERT);
2250 if (!slot && current_info == optimistic_info)
2251 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2252 NO_INSERT);
2253 if (!slot)
2254 return NULL_TREE;
2255 if (vnresult)
2256 *vnresult = (vn_nary_op_t)*slot;
2257 return ((vn_nary_op_t)*slot)->result;
2260 /* Lookup a n-ary operation by its pieces and return the resulting value
2261 number if it exists in the hash table. Return NULL_TREE if it does
2262 not exist in the hash table or if the result field of the operation
2263 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2264 if it exists. */
2266 tree
2267 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2268 tree type, tree *ops, vn_nary_op_t *vnresult)
2270 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2271 sizeof_vn_nary_op (length));
2272 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2273 return vn_nary_op_lookup_1 (vno1, vnresult);
2276 /* Lookup OP in the current hash table, and return the resulting value
2277 number if it exists in the hash table. Return NULL_TREE if it does
2278 not exist in the hash table or if the result field of the operation
2279 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2280 if it exists. */
2282 tree
2283 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2285 vn_nary_op_t vno1
2286 = XALLOCAVAR (struct vn_nary_op_s,
2287 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2288 init_vn_nary_op_from_op (vno1, op);
2289 return vn_nary_op_lookup_1 (vno1, vnresult);
2292 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2293 value number if it exists in the hash table. Return NULL_TREE if
2294 it does not exist in the hash table. VNRESULT will contain the
2295 vn_nary_op_t from the hashtable if it exists. */
2297 tree
2298 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2300 vn_nary_op_t vno1
2301 = XALLOCAVAR (struct vn_nary_op_s,
2302 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2303 init_vn_nary_op_from_stmt (vno1, stmt);
2304 return vn_nary_op_lookup_1 (vno1, vnresult);
2307 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2309 static vn_nary_op_t
2310 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2312 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2315 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2316 obstack. */
2318 static vn_nary_op_t
2319 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2321 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2322 &current_info->nary_obstack);
2324 vno1->value_id = value_id;
2325 vno1->length = length;
2326 vno1->result = result;
2328 return vno1;
2331 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2332 VNO->HASHCODE first. */
2334 static vn_nary_op_t
2335 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2337 void **slot;
2339 if (compute_hash)
2340 vno->hashcode = vn_nary_op_compute_hash (vno);
2342 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2343 gcc_assert (!*slot);
2345 *slot = vno;
2346 return vno;
2349 /* Insert a n-ary operation into the current hash table using it's
2350 pieces. Return the vn_nary_op_t structure we created and put in
2351 the hashtable. */
2353 vn_nary_op_t
2354 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2355 tree type, tree *ops,
2356 tree result, unsigned int value_id)
2358 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2359 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2360 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2363 /* Insert OP into the current hash table with a value number of
2364 RESULT. Return the vn_nary_op_t structure we created and put in
2365 the hashtable. */
2367 vn_nary_op_t
2368 vn_nary_op_insert (tree op, tree result)
2370 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2371 vn_nary_op_t vno1;
2373 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2374 init_vn_nary_op_from_op (vno1, op);
2375 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2378 /* Insert the rhs of STMT into the current hash table with a value number of
2379 RESULT. */
2381 vn_nary_op_t
2382 vn_nary_op_insert_stmt (gimple stmt, tree result)
2384 vn_nary_op_t vno1
2385 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2386 result, VN_INFO (result)->value_id);
2387 init_vn_nary_op_from_stmt (vno1, stmt);
2388 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2391 /* Compute a hashcode for PHI operation VP1 and return it. */
2393 static inline hashval_t
2394 vn_phi_compute_hash (vn_phi_t vp1)
2396 hashval_t result;
2397 int i;
2398 tree phi1op;
2399 tree type;
2401 result = vp1->block->index;
2403 /* If all PHI arguments are constants we need to distinguish
2404 the PHI node via its type. */
2405 type = TREE_TYPE (vp1->phiargs[0]);
2406 result += (INTEGRAL_TYPE_P (type)
2407 + (INTEGRAL_TYPE_P (type)
2408 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2410 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2412 if (phi1op == VN_TOP)
2413 continue;
2414 result = iterative_hash_expr (phi1op, result);
2417 return result;
2420 /* Return the computed hashcode for phi operation P1. */
2422 static hashval_t
2423 vn_phi_hash (const void *p1)
2425 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2426 return vp1->hashcode;
2429 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2431 static int
2432 vn_phi_eq (const void *p1, const void *p2)
2434 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2435 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2437 if (vp1->hashcode != vp2->hashcode)
2438 return false;
2440 if (vp1->block == vp2->block)
2442 int i;
2443 tree phi1op;
2445 /* If the PHI nodes do not have compatible types
2446 they are not the same. */
2447 if (!types_compatible_p (TREE_TYPE (vp1->phiargs[0]),
2448 TREE_TYPE (vp2->phiargs[0])))
2449 return false;
2451 /* Any phi in the same block will have it's arguments in the
2452 same edge order, because of how we store phi nodes. */
2453 FOR_EACH_VEC_ELT (vp1->phiargs, i, phi1op)
2455 tree phi2op = vp2->phiargs[i];
2456 if (phi1op == VN_TOP || phi2op == VN_TOP)
2457 continue;
2458 if (!expressions_equal_p (phi1op, phi2op))
2459 return false;
2461 return true;
2463 return false;
2466 static vec<tree> shared_lookup_phiargs;
2468 /* Lookup PHI in the current hash table, and return the resulting
2469 value number if it exists in the hash table. Return NULL_TREE if
2470 it does not exist in the hash table. */
2472 static tree
2473 vn_phi_lookup (gimple phi)
2475 void **slot;
2476 struct vn_phi_s vp1;
2477 unsigned i;
2479 shared_lookup_phiargs.truncate (0);
2481 /* Canonicalize the SSA_NAME's to their value number. */
2482 for (i = 0; i < gimple_phi_num_args (phi); i++)
2484 tree def = PHI_ARG_DEF (phi, i);
2485 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2486 shared_lookup_phiargs.safe_push (def);
2488 vp1.phiargs = shared_lookup_phiargs;
2489 vp1.block = gimple_bb (phi);
2490 vp1.hashcode = vn_phi_compute_hash (&vp1);
2491 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2492 NO_INSERT);
2493 if (!slot && current_info == optimistic_info)
2494 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2495 NO_INSERT);
2496 if (!slot)
2497 return NULL_TREE;
2498 return ((vn_phi_t)*slot)->result;
2501 /* Insert PHI into the current hash table with a value number of
2502 RESULT. */
2504 static vn_phi_t
2505 vn_phi_insert (gimple phi, tree result)
2507 void **slot;
2508 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2509 unsigned i;
2510 vec<tree> args = vec<tree>();
2512 /* Canonicalize the SSA_NAME's to their value number. */
2513 for (i = 0; i < gimple_phi_num_args (phi); i++)
2515 tree def = PHI_ARG_DEF (phi, i);
2516 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2517 args.safe_push (def);
2519 vp1->value_id = VN_INFO (result)->value_id;
2520 vp1->phiargs = args;
2521 vp1->block = gimple_bb (phi);
2522 vp1->result = result;
2523 vp1->hashcode = vn_phi_compute_hash (vp1);
2525 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2526 INSERT);
2528 /* Because we iterate over phi operations more than once, it's
2529 possible the slot might already exist here, hence no assert.*/
2530 *slot = vp1;
2531 return vp1;
2535 /* Print set of components in strongly connected component SCC to OUT. */
2537 static void
2538 print_scc (FILE *out, vec<tree> scc)
2540 tree var;
2541 unsigned int i;
2543 fprintf (out, "SCC consists of:");
2544 FOR_EACH_VEC_ELT (scc, i, var)
2546 fprintf (out, " ");
2547 print_generic_expr (out, var, 0);
2549 fprintf (out, "\n");
2552 /* Set the value number of FROM to TO, return true if it has changed
2553 as a result. */
2555 static inline bool
2556 set_ssa_val_to (tree from, tree to)
2558 tree currval = SSA_VAL (from);
2560 if (from != to)
2562 if (currval == from)
2564 if (dump_file && (dump_flags & TDF_DETAILS))
2566 fprintf (dump_file, "Not changing value number of ");
2567 print_generic_expr (dump_file, from, 0);
2568 fprintf (dump_file, " from VARYING to ");
2569 print_generic_expr (dump_file, to, 0);
2570 fprintf (dump_file, "\n");
2572 return false;
2574 else if (TREE_CODE (to) == SSA_NAME
2575 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2576 to = from;
2579 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2580 and invariants. So assert that here. */
2581 gcc_assert (to != NULL_TREE
2582 && (to == VN_TOP
2583 || TREE_CODE (to) == SSA_NAME
2584 || is_gimple_min_invariant (to)));
2586 if (dump_file && (dump_flags & TDF_DETAILS))
2588 fprintf (dump_file, "Setting value number of ");
2589 print_generic_expr (dump_file, from, 0);
2590 fprintf (dump_file, " to ");
2591 print_generic_expr (dump_file, to, 0);
2594 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2596 VN_INFO (from)->valnum = to;
2597 if (dump_file && (dump_flags & TDF_DETAILS))
2598 fprintf (dump_file, " (changed)\n");
2599 return true;
2601 if (dump_file && (dump_flags & TDF_DETAILS))
2602 fprintf (dump_file, "\n");
2603 return false;
2606 /* Mark as processed all the definitions in the defining stmt of USE, or
2607 the USE itself. */
2609 static void
2610 mark_use_processed (tree use)
2612 ssa_op_iter iter;
2613 def_operand_p defp;
2614 gimple stmt = SSA_NAME_DEF_STMT (use);
2616 if (SSA_NAME_IS_DEFAULT_DEF (use) || gimple_code (stmt) == GIMPLE_PHI)
2618 VN_INFO (use)->use_processed = true;
2619 return;
2622 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2624 tree def = DEF_FROM_PTR (defp);
2626 VN_INFO (def)->use_processed = true;
2630 /* Set all definitions in STMT to value number to themselves.
2631 Return true if a value number changed. */
2633 static bool
2634 defs_to_varying (gimple stmt)
2636 bool changed = false;
2637 ssa_op_iter iter;
2638 def_operand_p defp;
2640 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2642 tree def = DEF_FROM_PTR (defp);
2643 changed |= set_ssa_val_to (def, def);
2645 return changed;
2648 static bool expr_has_constants (tree expr);
2649 static tree valueize_expr (tree expr);
2651 /* Visit a copy between LHS and RHS, return true if the value number
2652 changed. */
2654 static bool
2655 visit_copy (tree lhs, tree rhs)
2657 /* Follow chains of copies to their destination. */
2658 while (TREE_CODE (rhs) == SSA_NAME
2659 && SSA_VAL (rhs) != rhs)
2660 rhs = SSA_VAL (rhs);
2662 /* The copy may have a more interesting constant filled expression
2663 (we don't, since we know our RHS is just an SSA name). */
2664 if (TREE_CODE (rhs) == SSA_NAME)
2666 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2667 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2670 return set_ssa_val_to (lhs, rhs);
2673 /* Visit a nary operator RHS, value number it, and return true if the
2674 value number of LHS has changed as a result. */
2676 static bool
2677 visit_nary_op (tree lhs, gimple stmt)
2679 bool changed = false;
2680 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2682 if (result)
2683 changed = set_ssa_val_to (lhs, result);
2684 else
2686 changed = set_ssa_val_to (lhs, lhs);
2687 vn_nary_op_insert_stmt (stmt, lhs);
2690 return changed;
2693 /* Visit a call STMT storing into LHS. Return true if the value number
2694 of the LHS has changed as a result. */
2696 static bool
2697 visit_reference_op_call (tree lhs, gimple stmt)
2699 bool changed = false;
2700 struct vn_reference_s vr1;
2701 vn_reference_t vnresult = NULL;
2702 tree vuse = gimple_vuse (stmt);
2703 tree vdef = gimple_vdef (stmt);
2705 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
2706 if (lhs && TREE_CODE (lhs) != SSA_NAME)
2707 lhs = NULL_TREE;
2709 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2710 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2711 vr1.type = gimple_expr_type (stmt);
2712 vr1.set = 0;
2713 vr1.hashcode = vn_reference_compute_hash (&vr1);
2714 vn_reference_lookup_1 (&vr1, &vnresult);
2716 if (vnresult)
2718 if (vnresult->result_vdef)
2719 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
2721 if (!vnresult->result && lhs)
2722 vnresult->result = lhs;
2724 if (vnresult->result && lhs)
2726 changed |= set_ssa_val_to (lhs, vnresult->result);
2728 if (VN_INFO (vnresult->result)->has_constants)
2729 VN_INFO (lhs)->has_constants = true;
2732 else
2734 void **slot;
2735 vn_reference_t vr2;
2736 if (vdef)
2737 changed |= set_ssa_val_to (vdef, vdef);
2738 if (lhs)
2739 changed |= set_ssa_val_to (lhs, lhs);
2740 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2741 vr2->vuse = vr1.vuse;
2742 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2743 vr2->type = vr1.type;
2744 vr2->set = vr1.set;
2745 vr2->hashcode = vr1.hashcode;
2746 vr2->result = lhs;
2747 vr2->result_vdef = vdef;
2748 slot = htab_find_slot_with_hash (current_info->references,
2749 vr2, vr2->hashcode, INSERT);
2750 if (*slot)
2751 free_reference (*slot);
2752 *slot = vr2;
2755 return changed;
2758 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2759 and return true if the value number of the LHS has changed as a result. */
2761 static bool
2762 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2764 bool changed = false;
2765 tree last_vuse;
2766 tree result;
2768 last_vuse = gimple_vuse (stmt);
2769 last_vuse_ptr = &last_vuse;
2770 result = vn_reference_lookup (op, gimple_vuse (stmt),
2771 default_vn_walk_kind, NULL);
2772 last_vuse_ptr = NULL;
2774 /* If we have a VCE, try looking up its operand as it might be stored in
2775 a different type. */
2776 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2777 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2778 default_vn_walk_kind, NULL);
2780 /* We handle type-punning through unions by value-numbering based
2781 on offset and size of the access. Be prepared to handle a
2782 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2783 if (result
2784 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2786 /* We will be setting the value number of lhs to the value number
2787 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2788 So first simplify and lookup this expression to see if it
2789 is already available. */
2790 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2791 if ((CONVERT_EXPR_P (val)
2792 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2793 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2795 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2796 if ((CONVERT_EXPR_P (tem)
2797 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2798 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2799 TREE_TYPE (val), tem)))
2800 val = tem;
2802 result = val;
2803 if (!is_gimple_min_invariant (val)
2804 && TREE_CODE (val) != SSA_NAME)
2805 result = vn_nary_op_lookup (val, NULL);
2806 /* If the expression is not yet available, value-number lhs to
2807 a new SSA_NAME we create. */
2808 if (!result)
2810 result = make_temp_ssa_name (TREE_TYPE (lhs), gimple_build_nop (),
2811 "vntemp");
2812 /* Initialize value-number information properly. */
2813 VN_INFO_GET (result)->valnum = result;
2814 VN_INFO (result)->value_id = get_next_value_id ();
2815 VN_INFO (result)->expr = val;
2816 VN_INFO (result)->has_constants = expr_has_constants (val);
2817 VN_INFO (result)->needs_insertion = true;
2818 /* As all "inserted" statements are singleton SCCs, insert
2819 to the valid table. This is strictly needed to
2820 avoid re-generating new value SSA_NAMEs for the same
2821 expression during SCC iteration over and over (the
2822 optimistic table gets cleared after each iteration).
2823 We do not need to insert into the optimistic table, as
2824 lookups there will fall back to the valid table. */
2825 if (current_info == optimistic_info)
2827 current_info = valid_info;
2828 vn_nary_op_insert (val, result);
2829 current_info = optimistic_info;
2831 else
2832 vn_nary_op_insert (val, result);
2833 if (dump_file && (dump_flags & TDF_DETAILS))
2835 fprintf (dump_file, "Inserting name ");
2836 print_generic_expr (dump_file, result, 0);
2837 fprintf (dump_file, " for expression ");
2838 print_generic_expr (dump_file, val, 0);
2839 fprintf (dump_file, "\n");
2844 if (result)
2846 changed = set_ssa_val_to (lhs, result);
2847 if (TREE_CODE (result) == SSA_NAME
2848 && VN_INFO (result)->has_constants)
2850 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2851 VN_INFO (lhs)->has_constants = true;
2854 else
2856 changed = set_ssa_val_to (lhs, lhs);
2857 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
2860 return changed;
2864 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2865 and return true if the value number of the LHS has changed as a result. */
2867 static bool
2868 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2870 bool changed = false;
2871 vn_reference_t vnresult = NULL;
2872 tree result, assign;
2873 bool resultsame = false;
2874 tree vuse = gimple_vuse (stmt);
2875 tree vdef = gimple_vdef (stmt);
2877 /* First we want to lookup using the *vuses* from the store and see
2878 if there the last store to this location with the same address
2879 had the same value.
2881 The vuses represent the memory state before the store. If the
2882 memory state, address, and value of the store is the same as the
2883 last store to this location, then this store will produce the
2884 same memory state as that store.
2886 In this case the vdef versions for this store are value numbered to those
2887 vuse versions, since they represent the same memory state after
2888 this store.
2890 Otherwise, the vdefs for the store are used when inserting into
2891 the table, since the store generates a new memory state. */
2893 result = vn_reference_lookup (lhs, vuse, VN_NOWALK, NULL);
2895 if (result)
2897 if (TREE_CODE (result) == SSA_NAME)
2898 result = SSA_VAL (result);
2899 if (TREE_CODE (op) == SSA_NAME)
2900 op = SSA_VAL (op);
2901 resultsame = expressions_equal_p (result, op);
2904 if (!result || !resultsame)
2906 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2907 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult);
2908 if (vnresult)
2910 VN_INFO (vdef)->use_processed = true;
2911 return set_ssa_val_to (vdef, vnresult->result_vdef);
2915 if (!result || !resultsame)
2917 if (dump_file && (dump_flags & TDF_DETAILS))
2919 fprintf (dump_file, "No store match\n");
2920 fprintf (dump_file, "Value numbering store ");
2921 print_generic_expr (dump_file, lhs, 0);
2922 fprintf (dump_file, " to ");
2923 print_generic_expr (dump_file, op, 0);
2924 fprintf (dump_file, "\n");
2926 /* Have to set value numbers before insert, since insert is
2927 going to valueize the references in-place. */
2928 if (vdef)
2930 changed |= set_ssa_val_to (vdef, vdef);
2933 /* Do not insert structure copies into the tables. */
2934 if (is_gimple_min_invariant (op)
2935 || is_gimple_reg (op))
2936 vn_reference_insert (lhs, op, vdef, NULL);
2938 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
2939 vn_reference_insert (assign, lhs, vuse, vdef);
2941 else
2943 /* We had a match, so value number the vdef to have the value
2944 number of the vuse it came from. */
2946 if (dump_file && (dump_flags & TDF_DETAILS))
2947 fprintf (dump_file, "Store matched earlier value,"
2948 "value numbering store vdefs to matching vuses.\n");
2950 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
2953 return changed;
2956 /* Visit and value number PHI, return true if the value number
2957 changed. */
2959 static bool
2960 visit_phi (gimple phi)
2962 bool changed = false;
2963 tree result;
2964 tree sameval = VN_TOP;
2965 bool allsame = true;
2966 unsigned i;
2968 /* TODO: We could check for this in init_sccvn, and replace this
2969 with a gcc_assert. */
2970 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2971 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2973 /* See if all non-TOP arguments have the same value. TOP is
2974 equivalent to everything, so we can ignore it. */
2975 for (i = 0; i < gimple_phi_num_args (phi); i++)
2977 tree def = PHI_ARG_DEF (phi, i);
2979 if (TREE_CODE (def) == SSA_NAME)
2980 def = SSA_VAL (def);
2981 if (def == VN_TOP)
2982 continue;
2983 if (sameval == VN_TOP)
2985 sameval = def;
2987 else
2989 if (!expressions_equal_p (def, sameval))
2991 allsame = false;
2992 break;
2997 /* If all value numbered to the same value, the phi node has that
2998 value. */
2999 if (allsame)
3001 if (is_gimple_min_invariant (sameval))
3003 VN_INFO (PHI_RESULT (phi))->has_constants = true;
3004 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3006 else
3008 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3009 VN_INFO (PHI_RESULT (phi))->expr = sameval;
3012 if (TREE_CODE (sameval) == SSA_NAME)
3013 return visit_copy (PHI_RESULT (phi), sameval);
3015 return set_ssa_val_to (PHI_RESULT (phi), sameval);
3018 /* Otherwise, see if it is equivalent to a phi node in this block. */
3019 result = vn_phi_lookup (phi);
3020 if (result)
3022 if (TREE_CODE (result) == SSA_NAME)
3023 changed = visit_copy (PHI_RESULT (phi), result);
3024 else
3025 changed = set_ssa_val_to (PHI_RESULT (phi), result);
3027 else
3029 vn_phi_insert (phi, PHI_RESULT (phi));
3030 VN_INFO (PHI_RESULT (phi))->has_constants = false;
3031 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
3032 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
3035 return changed;
3038 /* Return true if EXPR contains constants. */
3040 static bool
3041 expr_has_constants (tree expr)
3043 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3045 case tcc_unary:
3046 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
3048 case tcc_binary:
3049 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
3050 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
3051 /* Constants inside reference ops are rarely interesting, but
3052 it can take a lot of looking to find them. */
3053 case tcc_reference:
3054 case tcc_declaration:
3055 return false;
3056 default:
3057 return is_gimple_min_invariant (expr);
3059 return false;
3062 /* Return true if STMT contains constants. */
3064 static bool
3065 stmt_has_constants (gimple stmt)
3067 if (gimple_code (stmt) != GIMPLE_ASSIGN)
3068 return false;
3070 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3072 case GIMPLE_UNARY_RHS:
3073 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3075 case GIMPLE_BINARY_RHS:
3076 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3077 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
3078 case GIMPLE_TERNARY_RHS:
3079 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
3080 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
3081 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
3082 case GIMPLE_SINGLE_RHS:
3083 /* Constants inside reference ops are rarely interesting, but
3084 it can take a lot of looking to find them. */
3085 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
3086 default:
3087 gcc_unreachable ();
3089 return false;
3092 /* Replace SSA_NAMES in expr with their value numbers, and return the
3093 result.
3094 This is performed in place. */
3096 static tree
3097 valueize_expr (tree expr)
3099 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
3101 case tcc_binary:
3102 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
3103 /* Fallthru. */
3104 case tcc_unary:
3105 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
3106 break;
3107 default:;
3109 return expr;
3112 /* Simplify the binary expression RHS, and return the result if
3113 simplified. */
3115 static tree
3116 simplify_binary_expression (gimple stmt)
3118 tree result = NULL_TREE;
3119 tree op0 = gimple_assign_rhs1 (stmt);
3120 tree op1 = gimple_assign_rhs2 (stmt);
3121 enum tree_code code = gimple_assign_rhs_code (stmt);
3123 /* This will not catch every single case we could combine, but will
3124 catch those with constants. The goal here is to simultaneously
3125 combine constants between expressions, but avoid infinite
3126 expansion of expressions during simplification. */
3127 if (TREE_CODE (op0) == SSA_NAME)
3129 if (VN_INFO (op0)->has_constants
3130 || TREE_CODE_CLASS (code) == tcc_comparison
3131 || code == COMPLEX_EXPR)
3132 op0 = valueize_expr (vn_get_expr_for (op0));
3133 else
3134 op0 = vn_valueize (op0);
3137 if (TREE_CODE (op1) == SSA_NAME)
3139 if (VN_INFO (op1)->has_constants
3140 || code == COMPLEX_EXPR)
3141 op1 = valueize_expr (vn_get_expr_for (op1));
3142 else
3143 op1 = vn_valueize (op1);
3146 /* Pointer plus constant can be represented as invariant address.
3147 Do so to allow further propatation, see also tree forwprop. */
3148 if (code == POINTER_PLUS_EXPR
3149 && host_integerp (op1, 1)
3150 && TREE_CODE (op0) == ADDR_EXPR
3151 && is_gimple_min_invariant (op0))
3152 return build_invariant_address (TREE_TYPE (op0),
3153 TREE_OPERAND (op0, 0),
3154 TREE_INT_CST_LOW (op1));
3156 /* Avoid folding if nothing changed. */
3157 if (op0 == gimple_assign_rhs1 (stmt)
3158 && op1 == gimple_assign_rhs2 (stmt))
3159 return NULL_TREE;
3161 fold_defer_overflow_warnings ();
3163 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3164 if (result)
3165 STRIP_USELESS_TYPE_CONVERSION (result);
3167 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3168 stmt, 0);
3170 /* Make sure result is not a complex expression consisting
3171 of operators of operators (IE (a + b) + (a + c))
3172 Otherwise, we will end up with unbounded expressions if
3173 fold does anything at all. */
3174 if (result && valid_gimple_rhs_p (result))
3175 return result;
3177 return NULL_TREE;
3180 /* Simplify the unary expression RHS, and return the result if
3181 simplified. */
3183 static tree
3184 simplify_unary_expression (gimple stmt)
3186 tree result = NULL_TREE;
3187 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3188 enum tree_code code = gimple_assign_rhs_code (stmt);
3190 /* We handle some tcc_reference codes here that are all
3191 GIMPLE_ASSIGN_SINGLE codes. */
3192 if (code == REALPART_EXPR
3193 || code == IMAGPART_EXPR
3194 || code == VIEW_CONVERT_EXPR
3195 || code == BIT_FIELD_REF)
3196 op0 = TREE_OPERAND (op0, 0);
3198 if (TREE_CODE (op0) != SSA_NAME)
3199 return NULL_TREE;
3201 orig_op0 = op0;
3202 if (VN_INFO (op0)->has_constants)
3203 op0 = valueize_expr (vn_get_expr_for (op0));
3204 else if (CONVERT_EXPR_CODE_P (code)
3205 || code == REALPART_EXPR
3206 || code == IMAGPART_EXPR
3207 || code == VIEW_CONVERT_EXPR
3208 || code == BIT_FIELD_REF)
3210 /* We want to do tree-combining on conversion-like expressions.
3211 Make sure we feed only SSA_NAMEs or constants to fold though. */
3212 tree tem = valueize_expr (vn_get_expr_for (op0));
3213 if (UNARY_CLASS_P (tem)
3214 || BINARY_CLASS_P (tem)
3215 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3216 || TREE_CODE (tem) == SSA_NAME
3217 || TREE_CODE (tem) == CONSTRUCTOR
3218 || is_gimple_min_invariant (tem))
3219 op0 = tem;
3222 /* Avoid folding if nothing changed, but remember the expression. */
3223 if (op0 == orig_op0)
3224 return NULL_TREE;
3226 if (code == BIT_FIELD_REF)
3228 tree rhs = gimple_assign_rhs1 (stmt);
3229 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3230 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3232 else
3233 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3234 if (result)
3236 STRIP_USELESS_TYPE_CONVERSION (result);
3237 if (valid_gimple_rhs_p (result))
3238 return result;
3241 return NULL_TREE;
3244 /* Try to simplify RHS using equivalences and constant folding. */
3246 static tree
3247 try_to_simplify (gimple stmt)
3249 enum tree_code code = gimple_assign_rhs_code (stmt);
3250 tree tem;
3252 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3253 in this case, there is no point in doing extra work. */
3254 if (code == SSA_NAME)
3255 return NULL_TREE;
3257 /* First try constant folding based on our current lattice. */
3258 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3259 if (tem
3260 && (TREE_CODE (tem) == SSA_NAME
3261 || is_gimple_min_invariant (tem)))
3262 return tem;
3264 /* If that didn't work try combining multiple statements. */
3265 switch (TREE_CODE_CLASS (code))
3267 case tcc_reference:
3268 /* Fallthrough for some unary codes that can operate on registers. */
3269 if (!(code == REALPART_EXPR
3270 || code == IMAGPART_EXPR
3271 || code == VIEW_CONVERT_EXPR
3272 || code == BIT_FIELD_REF))
3273 break;
3274 /* We could do a little more with unary ops, if they expand
3275 into binary ops, but it's debatable whether it is worth it. */
3276 case tcc_unary:
3277 return simplify_unary_expression (stmt);
3279 case tcc_comparison:
3280 case tcc_binary:
3281 return simplify_binary_expression (stmt);
3283 default:
3284 break;
3287 return NULL_TREE;
3290 /* Visit and value number USE, return true if the value number
3291 changed. */
3293 static bool
3294 visit_use (tree use)
3296 bool changed = false;
3297 gimple stmt = SSA_NAME_DEF_STMT (use);
3299 mark_use_processed (use);
3301 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3302 if (dump_file && (dump_flags & TDF_DETAILS)
3303 && !SSA_NAME_IS_DEFAULT_DEF (use))
3305 fprintf (dump_file, "Value numbering ");
3306 print_generic_expr (dump_file, use, 0);
3307 fprintf (dump_file, " stmt = ");
3308 print_gimple_stmt (dump_file, stmt, 0, 0);
3311 /* Handle uninitialized uses. */
3312 if (SSA_NAME_IS_DEFAULT_DEF (use))
3313 changed = set_ssa_val_to (use, use);
3314 else
3316 if (gimple_code (stmt) == GIMPLE_PHI)
3317 changed = visit_phi (stmt);
3318 else if (gimple_has_volatile_ops (stmt))
3319 changed = defs_to_varying (stmt);
3320 else if (is_gimple_assign (stmt))
3322 enum tree_code code = gimple_assign_rhs_code (stmt);
3323 tree lhs = gimple_assign_lhs (stmt);
3324 tree rhs1 = gimple_assign_rhs1 (stmt);
3325 tree simplified;
3327 /* Shortcut for copies. Simplifying copies is pointless,
3328 since we copy the expression and value they represent. */
3329 if (code == SSA_NAME
3330 && TREE_CODE (lhs) == SSA_NAME)
3332 changed = visit_copy (lhs, rhs1);
3333 goto done;
3335 simplified = try_to_simplify (stmt);
3336 if (simplified)
3338 if (dump_file && (dump_flags & TDF_DETAILS))
3340 fprintf (dump_file, "RHS ");
3341 print_gimple_expr (dump_file, stmt, 0, 0);
3342 fprintf (dump_file, " simplified to ");
3343 print_generic_expr (dump_file, simplified, 0);
3344 if (TREE_CODE (lhs) == SSA_NAME)
3345 fprintf (dump_file, " has constants %d\n",
3346 expr_has_constants (simplified));
3347 else
3348 fprintf (dump_file, "\n");
3351 /* Setting value numbers to constants will occasionally
3352 screw up phi congruence because constants are not
3353 uniquely associated with a single ssa name that can be
3354 looked up. */
3355 if (simplified
3356 && is_gimple_min_invariant (simplified)
3357 && TREE_CODE (lhs) == SSA_NAME)
3359 VN_INFO (lhs)->expr = simplified;
3360 VN_INFO (lhs)->has_constants = true;
3361 changed = set_ssa_val_to (lhs, simplified);
3362 goto done;
3364 else if (simplified
3365 && TREE_CODE (simplified) == SSA_NAME
3366 && TREE_CODE (lhs) == SSA_NAME)
3368 changed = visit_copy (lhs, simplified);
3369 goto done;
3371 else if (simplified)
3373 if (TREE_CODE (lhs) == SSA_NAME)
3375 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3376 /* We have to unshare the expression or else
3377 valuizing may change the IL stream. */
3378 VN_INFO (lhs)->expr = unshare_expr (simplified);
3381 else if (stmt_has_constants (stmt)
3382 && TREE_CODE (lhs) == SSA_NAME)
3383 VN_INFO (lhs)->has_constants = true;
3384 else if (TREE_CODE (lhs) == SSA_NAME)
3386 /* We reset expr and constantness here because we may
3387 have been value numbering optimistically, and
3388 iterating. They may become non-constant in this case,
3389 even if they were optimistically constant. */
3391 VN_INFO (lhs)->has_constants = false;
3392 VN_INFO (lhs)->expr = NULL_TREE;
3395 if ((TREE_CODE (lhs) == SSA_NAME
3396 /* We can substitute SSA_NAMEs that are live over
3397 abnormal edges with their constant value. */
3398 && !(gimple_assign_copy_p (stmt)
3399 && is_gimple_min_invariant (rhs1))
3400 && !(simplified
3401 && is_gimple_min_invariant (simplified))
3402 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3403 /* Stores or copies from SSA_NAMEs that are live over
3404 abnormal edges are a problem. */
3405 || (code == SSA_NAME
3406 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3407 changed = defs_to_varying (stmt);
3408 else if (REFERENCE_CLASS_P (lhs)
3409 || DECL_P (lhs))
3410 changed = visit_reference_op_store (lhs, rhs1, stmt);
3411 else if (TREE_CODE (lhs) == SSA_NAME)
3413 if ((gimple_assign_copy_p (stmt)
3414 && is_gimple_min_invariant (rhs1))
3415 || (simplified
3416 && is_gimple_min_invariant (simplified)))
3418 VN_INFO (lhs)->has_constants = true;
3419 if (simplified)
3420 changed = set_ssa_val_to (lhs, simplified);
3421 else
3422 changed = set_ssa_val_to (lhs, rhs1);
3424 else
3426 switch (vn_get_stmt_kind (stmt))
3428 case VN_NARY:
3429 changed = visit_nary_op (lhs, stmt);
3430 break;
3431 case VN_REFERENCE:
3432 changed = visit_reference_op_load (lhs, rhs1, stmt);
3433 break;
3434 default:
3435 changed = defs_to_varying (stmt);
3436 break;
3440 else
3441 changed = defs_to_varying (stmt);
3443 else if (is_gimple_call (stmt))
3445 tree lhs = gimple_call_lhs (stmt);
3447 /* ??? We could try to simplify calls. */
3449 if (lhs && TREE_CODE (lhs) == SSA_NAME)
3451 if (stmt_has_constants (stmt))
3452 VN_INFO (lhs)->has_constants = true;
3453 else
3455 /* We reset expr and constantness here because we may
3456 have been value numbering optimistically, and
3457 iterating. They may become non-constant in this case,
3458 even if they were optimistically constant. */
3459 VN_INFO (lhs)->has_constants = false;
3460 VN_INFO (lhs)->expr = NULL_TREE;
3463 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3465 changed = defs_to_varying (stmt);
3466 goto done;
3470 if (!gimple_call_internal_p (stmt)
3471 && (/* Calls to the same function with the same vuse
3472 and the same operands do not necessarily return the same
3473 value, unless they're pure or const. */
3474 gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST)
3475 /* If calls have a vdef, subsequent calls won't have
3476 the same incoming vuse. So, if 2 calls with vdef have the
3477 same vuse, we know they're not subsequent.
3478 We can value number 2 calls to the same function with the
3479 same vuse and the same operands which are not subsequent
3480 the same, because there is no code in the program that can
3481 compare the 2 values. */
3482 || gimple_vdef (stmt)))
3483 changed = visit_reference_op_call (lhs, stmt);
3484 else
3485 changed = defs_to_varying (stmt);
3487 else
3488 changed = defs_to_varying (stmt);
3490 done:
3491 return changed;
3494 /* Compare two operands by reverse postorder index */
3496 static int
3497 compare_ops (const void *pa, const void *pb)
3499 const tree opa = *((const tree *)pa);
3500 const tree opb = *((const tree *)pb);
3501 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3502 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3503 basic_block bba;
3504 basic_block bbb;
3506 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3507 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3508 else if (gimple_nop_p (opstmta))
3509 return -1;
3510 else if (gimple_nop_p (opstmtb))
3511 return 1;
3513 bba = gimple_bb (opstmta);
3514 bbb = gimple_bb (opstmtb);
3516 if (!bba && !bbb)
3517 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3518 else if (!bba)
3519 return -1;
3520 else if (!bbb)
3521 return 1;
3523 if (bba == bbb)
3525 if (gimple_code (opstmta) == GIMPLE_PHI
3526 && gimple_code (opstmtb) == GIMPLE_PHI)
3527 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3528 else if (gimple_code (opstmta) == GIMPLE_PHI)
3529 return -1;
3530 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3531 return 1;
3532 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3533 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3534 else
3535 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3537 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3540 /* Sort an array containing members of a strongly connected component
3541 SCC so that the members are ordered by RPO number.
3542 This means that when the sort is complete, iterating through the
3543 array will give you the members in RPO order. */
3545 static void
3546 sort_scc (vec<tree> scc)
3548 scc.qsort (compare_ops);
3551 /* Insert the no longer used nary ONARY to the hash INFO. */
3553 static void
3554 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3556 size_t size = sizeof_vn_nary_op (onary->length);
3557 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3558 &info->nary_obstack);
3559 memcpy (nary, onary, size);
3560 vn_nary_op_insert_into (nary, info->nary, false);
3563 /* Insert the no longer used phi OPHI to the hash INFO. */
3565 static void
3566 copy_phi (vn_phi_t ophi, vn_tables_t info)
3568 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3569 void **slot;
3570 memcpy (phi, ophi, sizeof (*phi));
3571 ophi->phiargs.create (0);
3572 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3573 gcc_assert (!*slot);
3574 *slot = phi;
3577 /* Insert the no longer used reference OREF to the hash INFO. */
3579 static void
3580 copy_reference (vn_reference_t oref, vn_tables_t info)
3582 vn_reference_t ref;
3583 void **slot;
3584 ref = (vn_reference_t) pool_alloc (info->references_pool);
3585 memcpy (ref, oref, sizeof (*ref));
3586 oref->operands.create (0);
3587 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3588 INSERT);
3589 if (*slot)
3590 free_reference (*slot);
3591 *slot = ref;
3594 /* Process a strongly connected component in the SSA graph. */
3596 static void
3597 process_scc (vec<tree> scc)
3599 tree var;
3600 unsigned int i;
3601 unsigned int iterations = 0;
3602 bool changed = true;
3603 htab_iterator hi;
3604 vn_nary_op_t nary;
3605 vn_phi_t phi;
3606 vn_reference_t ref;
3608 /* If the SCC has a single member, just visit it. */
3609 if (scc.length () == 1)
3611 tree use = scc[0];
3612 if (VN_INFO (use)->use_processed)
3613 return;
3614 /* We need to make sure it doesn't form a cycle itself, which can
3615 happen for self-referential PHI nodes. In that case we would
3616 end up inserting an expression with VN_TOP operands into the
3617 valid table which makes us derive bogus equivalences later.
3618 The cheapest way to check this is to assume it for all PHI nodes. */
3619 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3620 /* Fallthru to iteration. */ ;
3621 else
3623 visit_use (use);
3624 return;
3628 /* Iterate over the SCC with the optimistic table until it stops
3629 changing. */
3630 current_info = optimistic_info;
3631 while (changed)
3633 changed = false;
3634 iterations++;
3635 if (dump_file && (dump_flags & TDF_DETAILS))
3636 fprintf (dump_file, "Starting iteration %d\n", iterations);
3637 /* As we are value-numbering optimistically we have to
3638 clear the expression tables and the simplified expressions
3639 in each iteration until we converge. */
3640 htab_empty (optimistic_info->nary);
3641 htab_empty (optimistic_info->phis);
3642 htab_empty (optimistic_info->references);
3643 obstack_free (&optimistic_info->nary_obstack, NULL);
3644 gcc_obstack_init (&optimistic_info->nary_obstack);
3645 empty_alloc_pool (optimistic_info->phis_pool);
3646 empty_alloc_pool (optimistic_info->references_pool);
3647 FOR_EACH_VEC_ELT (scc, i, var)
3648 VN_INFO (var)->expr = NULL_TREE;
3649 FOR_EACH_VEC_ELT (scc, i, var)
3650 changed |= visit_use (var);
3653 statistics_histogram_event (cfun, "SCC iterations", iterations);
3655 /* Finally, copy the contents of the no longer used optimistic
3656 table to the valid table. */
3657 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3658 copy_nary (nary, valid_info);
3659 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3660 copy_phi (phi, valid_info);
3661 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3662 copy_reference (ref, valid_info);
3664 current_info = valid_info;
3668 /* Pop the components of the found SCC for NAME off the SCC stack
3669 and process them. Returns true if all went well, false if
3670 we run into resource limits. */
3672 static bool
3673 extract_and_process_scc_for_name (tree name)
3675 vec<tree> scc = vec<tree>();
3676 tree x;
3678 /* Found an SCC, pop the components off the SCC stack and
3679 process them. */
3682 x = sccstack.pop ();
3684 VN_INFO (x)->on_sccstack = false;
3685 scc.safe_push (x);
3686 } while (x != name);
3688 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3689 if (scc.length ()
3690 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3692 if (dump_file)
3693 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3694 "SCC size %u exceeding %u\n", scc.length (),
3695 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3697 scc.release ();
3698 return false;
3701 if (scc.length () > 1)
3702 sort_scc (scc);
3704 if (dump_file && (dump_flags & TDF_DETAILS))
3705 print_scc (dump_file, scc);
3707 process_scc (scc);
3709 scc.release ();
3711 return true;
3714 /* Depth first search on NAME to discover and process SCC's in the SSA
3715 graph.
3716 Execution of this algorithm relies on the fact that the SCC's are
3717 popped off the stack in topological order.
3718 Returns true if successful, false if we stopped processing SCC's due
3719 to resource constraints. */
3721 static bool
3722 DFS (tree name)
3724 vec<ssa_op_iter> itervec = vec<ssa_op_iter>();
3725 vec<tree> namevec = vec<tree>();
3726 use_operand_p usep = NULL;
3727 gimple defstmt;
3728 tree use;
3729 ssa_op_iter iter;
3731 start_over:
3732 /* SCC info */
3733 VN_INFO (name)->dfsnum = next_dfs_num++;
3734 VN_INFO (name)->visited = true;
3735 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3737 sccstack.safe_push (name);
3738 VN_INFO (name)->on_sccstack = true;
3739 defstmt = SSA_NAME_DEF_STMT (name);
3741 /* Recursively DFS on our operands, looking for SCC's. */
3742 if (!gimple_nop_p (defstmt))
3744 /* Push a new iterator. */
3745 if (gimple_code (defstmt) == GIMPLE_PHI)
3746 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3747 else
3748 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3750 else
3751 clear_and_done_ssa_iter (&iter);
3753 while (1)
3755 /* If we are done processing uses of a name, go up the stack
3756 of iterators and process SCCs as we found them. */
3757 if (op_iter_done (&iter))
3759 /* See if we found an SCC. */
3760 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3761 if (!extract_and_process_scc_for_name (name))
3763 namevec.release ();
3764 itervec.release ();
3765 return false;
3768 /* Check if we are done. */
3769 if (namevec.is_empty ())
3771 namevec.release ();
3772 itervec.release ();
3773 return true;
3776 /* Restore the last use walker and continue walking there. */
3777 use = name;
3778 name = namevec.pop ();
3779 memcpy (&iter, &itervec.last (),
3780 sizeof (ssa_op_iter));
3781 itervec.pop ();
3782 goto continue_walking;
3785 use = USE_FROM_PTR (usep);
3787 /* Since we handle phi nodes, we will sometimes get
3788 invariants in the use expression. */
3789 if (TREE_CODE (use) == SSA_NAME)
3791 if (! (VN_INFO (use)->visited))
3793 /* Recurse by pushing the current use walking state on
3794 the stack and starting over. */
3795 itervec.safe_push (iter);
3796 namevec.safe_push (name);
3797 name = use;
3798 goto start_over;
3800 continue_walking:
3801 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3802 VN_INFO (use)->low);
3804 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3805 && VN_INFO (use)->on_sccstack)
3807 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3808 VN_INFO (name)->low);
3812 usep = op_iter_next_use (&iter);
3816 /* Allocate a value number table. */
3818 static void
3819 allocate_vn_table (vn_tables_t table)
3821 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3822 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3823 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3824 free_reference);
3826 gcc_obstack_init (&table->nary_obstack);
3827 table->phis_pool = create_alloc_pool ("VN phis",
3828 sizeof (struct vn_phi_s),
3829 30);
3830 table->references_pool = create_alloc_pool ("VN references",
3831 sizeof (struct vn_reference_s),
3832 30);
3835 /* Free a value number table. */
3837 static void
3838 free_vn_table (vn_tables_t table)
3840 htab_delete (table->phis);
3841 htab_delete (table->nary);
3842 htab_delete (table->references);
3843 obstack_free (&table->nary_obstack, NULL);
3844 free_alloc_pool (table->phis_pool);
3845 free_alloc_pool (table->references_pool);
3848 static void
3849 init_scc_vn (void)
3851 size_t i;
3852 int j;
3853 int *rpo_numbers_temp;
3855 calculate_dominance_info (CDI_DOMINATORS);
3856 sccstack.create (0);
3857 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3858 free);
3860 constant_value_ids = BITMAP_ALLOC (NULL);
3862 next_dfs_num = 1;
3863 next_value_id = 1;
3865 vn_ssa_aux_table.create (num_ssa_names + 1);
3866 /* VEC_alloc doesn't actually grow it to the right size, it just
3867 preallocates the space to do so. */
3868 vn_ssa_aux_table.safe_grow_cleared (num_ssa_names + 1);
3869 gcc_obstack_init (&vn_ssa_aux_obstack);
3871 shared_lookup_phiargs.create (0);
3872 shared_lookup_references.create (0);
3873 rpo_numbers = XNEWVEC (int, last_basic_block);
3874 rpo_numbers_temp = XNEWVEC (int, n_basic_blocks - NUM_FIXED_BLOCKS);
3875 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3877 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3878 the i'th block in RPO order is bb. We want to map bb's to RPO
3879 numbers, so we need to rearrange this array. */
3880 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3881 rpo_numbers[rpo_numbers_temp[j]] = j;
3883 XDELETE (rpo_numbers_temp);
3885 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3887 /* Create the VN_INFO structures, and initialize value numbers to
3888 TOP. */
3889 for (i = 0; i < num_ssa_names; i++)
3891 tree name = ssa_name (i);
3892 if (name)
3894 VN_INFO_GET (name)->valnum = VN_TOP;
3895 VN_INFO (name)->expr = NULL_TREE;
3896 VN_INFO (name)->value_id = 0;
3900 renumber_gimple_stmt_uids ();
3902 /* Create the valid and optimistic value numbering tables. */
3903 valid_info = XCNEW (struct vn_tables_s);
3904 allocate_vn_table (valid_info);
3905 optimistic_info = XCNEW (struct vn_tables_s);
3906 allocate_vn_table (optimistic_info);
3909 void
3910 free_scc_vn (void)
3912 size_t i;
3914 htab_delete (constant_to_value_id);
3915 BITMAP_FREE (constant_value_ids);
3916 shared_lookup_phiargs.release ();
3917 shared_lookup_references.release ();
3918 XDELETEVEC (rpo_numbers);
3920 for (i = 0; i < num_ssa_names; i++)
3922 tree name = ssa_name (i);
3923 if (name
3924 && VN_INFO (name)->needs_insertion)
3925 release_ssa_name (name);
3927 obstack_free (&vn_ssa_aux_obstack, NULL);
3928 vn_ssa_aux_table.release ();
3930 sccstack.release ();
3931 free_vn_table (valid_info);
3932 XDELETE (valid_info);
3933 free_vn_table (optimistic_info);
3934 XDELETE (optimistic_info);
3937 /* Set *ID if we computed something useful in RESULT. */
3939 static void
3940 set_value_id_for_result (tree result, unsigned int *id)
3942 if (result)
3944 if (TREE_CODE (result) == SSA_NAME)
3945 *id = VN_INFO (result)->value_id;
3946 else if (is_gimple_min_invariant (result))
3947 *id = get_or_alloc_constant_value_id (result);
3951 /* Set the value ids in the valid hash tables. */
3953 static void
3954 set_hashtable_value_ids (void)
3956 htab_iterator hi;
3957 vn_nary_op_t vno;
3958 vn_reference_t vr;
3959 vn_phi_t vp;
3961 /* Now set the value ids of the things we had put in the hash
3962 table. */
3964 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3965 vno, vn_nary_op_t, hi)
3966 set_value_id_for_result (vno->result, &vno->value_id);
3968 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3969 vp, vn_phi_t, hi)
3970 set_value_id_for_result (vp->result, &vp->value_id);
3972 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3973 vr, vn_reference_t, hi)
3974 set_value_id_for_result (vr->result, &vr->value_id);
3977 /* Do SCCVN. Returns true if it finished, false if we bailed out
3978 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3979 how we use the alias oracle walking during the VN process. */
3981 bool
3982 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3984 size_t i;
3985 tree param;
3986 bool changed = true;
3988 default_vn_walk_kind = default_vn_walk_kind_;
3990 init_scc_vn ();
3991 current_info = valid_info;
3993 for (param = DECL_ARGUMENTS (current_function_decl);
3994 param;
3995 param = DECL_CHAIN (param))
3997 tree def = ssa_default_def (cfun, param);
3998 if (def)
3999 VN_INFO (def)->valnum = def;
4002 for (i = 1; i < num_ssa_names; ++i)
4004 tree name = ssa_name (i);
4005 if (name
4006 && VN_INFO (name)->visited == false
4007 && !has_zero_uses (name))
4008 if (!DFS (name))
4010 free_scc_vn ();
4011 return false;
4015 /* Initialize the value ids. */
4017 for (i = 1; i < num_ssa_names; ++i)
4019 tree name = ssa_name (i);
4020 vn_ssa_aux_t info;
4021 if (!name)
4022 continue;
4023 info = VN_INFO (name);
4024 if (info->valnum == name
4025 || info->valnum == VN_TOP)
4026 info->value_id = get_next_value_id ();
4027 else if (is_gimple_min_invariant (info->valnum))
4028 info->value_id = get_or_alloc_constant_value_id (info->valnum);
4031 /* Propagate until they stop changing. */
4032 while (changed)
4034 changed = false;
4035 for (i = 1; i < num_ssa_names; ++i)
4037 tree name = ssa_name (i);
4038 vn_ssa_aux_t info;
4039 if (!name)
4040 continue;
4041 info = VN_INFO (name);
4042 if (TREE_CODE (info->valnum) == SSA_NAME
4043 && info->valnum != name
4044 && info->value_id != VN_INFO (info->valnum)->value_id)
4046 changed = true;
4047 info->value_id = VN_INFO (info->valnum)->value_id;
4052 set_hashtable_value_ids ();
4054 if (dump_file && (dump_flags & TDF_DETAILS))
4056 fprintf (dump_file, "Value numbers:\n");
4057 for (i = 0; i < num_ssa_names; i++)
4059 tree name = ssa_name (i);
4060 if (name
4061 && VN_INFO (name)->visited
4062 && SSA_VAL (name) != name)
4064 print_generic_expr (dump_file, name, 0);
4065 fprintf (dump_file, " = ");
4066 print_generic_expr (dump_file, SSA_VAL (name), 0);
4067 fprintf (dump_file, "\n");
4072 return true;
4075 /* Return the maximum value id we have ever seen. */
4077 unsigned int
4078 get_max_value_id (void)
4080 return next_value_id;
4083 /* Return the next unique value id. */
4085 unsigned int
4086 get_next_value_id (void)
4088 return next_value_id++;
4092 /* Compare two expressions E1 and E2 and return true if they are equal. */
4094 bool
4095 expressions_equal_p (tree e1, tree e2)
4097 /* The obvious case. */
4098 if (e1 == e2)
4099 return true;
4101 /* If only one of them is null, they cannot be equal. */
4102 if (!e1 || !e2)
4103 return false;
4105 /* Now perform the actual comparison. */
4106 if (TREE_CODE (e1) == TREE_CODE (e2)
4107 && operand_equal_p (e1, e2, OEP_PURE_SAME))
4108 return true;
4110 return false;
4114 /* Return true if the nary operation NARY may trap. This is a copy
4115 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
4117 bool
4118 vn_nary_may_trap (vn_nary_op_t nary)
4120 tree type;
4121 tree rhs2 = NULL_TREE;
4122 bool honor_nans = false;
4123 bool honor_snans = false;
4124 bool fp_operation = false;
4125 bool honor_trapv = false;
4126 bool handled, ret;
4127 unsigned i;
4129 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
4130 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
4131 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4133 type = nary->type;
4134 fp_operation = FLOAT_TYPE_P (type);
4135 if (fp_operation)
4137 honor_nans = flag_trapping_math && !flag_finite_math_only;
4138 honor_snans = flag_signaling_nans != 0;
4140 else if (INTEGRAL_TYPE_P (type)
4141 && TYPE_OVERFLOW_TRAPS (type))
4142 honor_trapv = true;
4144 if (nary->length >= 2)
4145 rhs2 = nary->op[1];
4146 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4147 honor_trapv,
4148 honor_nans, honor_snans, rhs2,
4149 &handled);
4150 if (handled
4151 && ret)
4152 return true;
4154 for (i = 0; i < nary->length; ++i)
4155 if (tree_could_trap_p (nary->op[i]))
4156 return true;
4158 return false;