2011-11-06 François Dumont <fdumont@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobc33b87dd7fe29e9d5053e318b510ad13fc7230bc
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
220 enum tree_code code;
222 if (vn->valnum == VN_TOP)
223 return name;
225 /* If the value-number is a constant it is the representative
226 expression. */
227 if (TREE_CODE (vn->valnum) != SSA_NAME)
228 return vn->valnum;
230 /* Get to the information of the value of this SSA_NAME. */
231 vn = VN_INFO (vn->valnum);
233 /* If the value-number is a constant it is the representative
234 expression. */
235 if (TREE_CODE (vn->valnum) != SSA_NAME)
236 return vn->valnum;
238 /* Else if we have an expression, return it. */
239 if (vn->expr != NULL_TREE)
240 return vn->expr;
242 /* Otherwise use the defining statement to build the expression. */
243 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
245 /* If the value number is not an assignment use it directly. */
246 if (!is_gimple_assign (def_stmt))
247 return vn->valnum;
249 /* FIXME tuples. This is incomplete and likely will miss some
250 simplifications. */
251 code = gimple_assign_rhs_code (def_stmt);
252 switch (TREE_CODE_CLASS (code))
254 case tcc_reference:
255 if ((code == REALPART_EXPR
256 || code == IMAGPART_EXPR
257 || code == VIEW_CONVERT_EXPR)
258 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (def_stmt),
259 0)) == SSA_NAME)
260 expr = fold_build1 (code,
261 gimple_expr_type (def_stmt),
262 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
263 break;
265 case tcc_unary:
266 expr = fold_build1 (code,
267 gimple_expr_type (def_stmt),
268 gimple_assign_rhs1 (def_stmt));
269 break;
271 case tcc_binary:
272 expr = fold_build2 (code,
273 gimple_expr_type (def_stmt),
274 gimple_assign_rhs1 (def_stmt),
275 gimple_assign_rhs2 (def_stmt));
276 break;
278 case tcc_exceptional:
279 if (code == CONSTRUCTOR
280 && TREE_CODE
281 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))) == VECTOR_TYPE)
282 expr = gimple_assign_rhs1 (def_stmt);
283 break;
285 default:;
287 if (expr == NULL_TREE)
288 return vn->valnum;
290 /* Cache the expression. */
291 vn->expr = expr;
293 return expr;
297 /* Free a phi operation structure VP. */
299 static void
300 free_phi (void *vp)
302 vn_phi_t phi = (vn_phi_t) vp;
303 VEC_free (tree, heap, phi->phiargs);
306 /* Free a reference operation structure VP. */
308 static void
309 free_reference (void *vp)
311 vn_reference_t vr = (vn_reference_t) vp;
312 VEC_free (vn_reference_op_s, heap, vr->operands);
315 /* Hash table equality function for vn_constant_t. */
317 static int
318 vn_constant_eq (const void *p1, const void *p2)
320 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
321 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
323 if (vc1->hashcode != vc2->hashcode)
324 return false;
326 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
329 /* Hash table hash function for vn_constant_t. */
331 static hashval_t
332 vn_constant_hash (const void *p1)
334 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
335 return vc1->hashcode;
338 /* Lookup a value id for CONSTANT and return it. If it does not
339 exist returns 0. */
341 unsigned int
342 get_constant_value_id (tree constant)
344 void **slot;
345 struct vn_constant_s vc;
347 vc.hashcode = vn_hash_constant_with_type (constant);
348 vc.constant = constant;
349 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
350 vc.hashcode, NO_INSERT);
351 if (slot)
352 return ((vn_constant_t)*slot)->value_id;
353 return 0;
356 /* Lookup a value id for CONSTANT, and if it does not exist, create a
357 new one and return it. If it does exist, return it. */
359 unsigned int
360 get_or_alloc_constant_value_id (tree constant)
362 void **slot;
363 struct vn_constant_s vc;
364 vn_constant_t vcp;
366 vc.hashcode = vn_hash_constant_with_type (constant);
367 vc.constant = constant;
368 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
369 vc.hashcode, INSERT);
370 if (*slot)
371 return ((vn_constant_t)*slot)->value_id;
373 vcp = XNEW (struct vn_constant_s);
374 vcp->hashcode = vc.hashcode;
375 vcp->constant = constant;
376 vcp->value_id = get_next_value_id ();
377 *slot = (void *) vcp;
378 bitmap_set_bit (constant_value_ids, vcp->value_id);
379 return vcp->value_id;
382 /* Return true if V is a value id for a constant. */
384 bool
385 value_id_constant_p (unsigned int v)
387 return bitmap_bit_p (constant_value_ids, v);
390 /* Compare two reference operands P1 and P2 for equality. Return true if
391 they are equal, and false otherwise. */
393 static int
394 vn_reference_op_eq (const void *p1, const void *p2)
396 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
397 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
399 return (vro1->opcode == vro2->opcode
400 /* We do not care for differences in type qualification. */
401 && (vro1->type == vro2->type
402 || (vro1->type && vro2->type
403 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
404 TYPE_MAIN_VARIANT (vro2->type))))
405 && expressions_equal_p (vro1->op0, vro2->op0)
406 && expressions_equal_p (vro1->op1, vro2->op1)
407 && expressions_equal_p (vro1->op2, vro2->op2));
410 /* Compute the hash for a reference operand VRO1. */
412 static hashval_t
413 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
415 result = iterative_hash_hashval_t (vro1->opcode, result);
416 if (vro1->op0)
417 result = iterative_hash_expr (vro1->op0, result);
418 if (vro1->op1)
419 result = iterative_hash_expr (vro1->op1, result);
420 if (vro1->op2)
421 result = iterative_hash_expr (vro1->op2, result);
422 return result;
425 /* Return the hashcode for a given reference operation P1. */
427 static hashval_t
428 vn_reference_hash (const void *p1)
430 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
431 return vr1->hashcode;
434 /* Compute a hash for the reference operation VR1 and return it. */
436 hashval_t
437 vn_reference_compute_hash (const vn_reference_t vr1)
439 hashval_t result = 0;
440 int i;
441 vn_reference_op_t vro;
442 HOST_WIDE_INT off = -1;
443 bool deref = false;
445 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
447 if (vro->opcode == MEM_REF)
448 deref = true;
449 else if (vro->opcode != ADDR_EXPR)
450 deref = false;
451 if (vro->off != -1)
453 if (off == -1)
454 off = 0;
455 off += vro->off;
457 else
459 if (off != -1
460 && off != 0)
461 result = iterative_hash_hashval_t (off, result);
462 off = -1;
463 if (deref
464 && vro->opcode == ADDR_EXPR)
466 if (vro->op0)
468 tree op = TREE_OPERAND (vro->op0, 0);
469 result = iterative_hash_hashval_t (TREE_CODE (op), result);
470 result = iterative_hash_expr (op, result);
473 else
474 result = vn_reference_op_compute_hash (vro, result);
477 if (vr1->vuse)
478 result += SSA_NAME_VERSION (vr1->vuse);
480 return result;
483 /* Return true if reference operations P1 and P2 are equivalent. This
484 means they have the same set of operands and vuses. */
487 vn_reference_eq (const void *p1, const void *p2)
489 unsigned i, j;
491 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
492 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
493 if (vr1->hashcode != vr2->hashcode)
494 return false;
496 /* Early out if this is not a hash collision. */
497 if (vr1->hashcode != vr2->hashcode)
498 return false;
500 /* The VOP needs to be the same. */
501 if (vr1->vuse != vr2->vuse)
502 return false;
504 /* If the operands are the same we are done. */
505 if (vr1->operands == vr2->operands)
506 return true;
508 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
509 return false;
511 if (INTEGRAL_TYPE_P (vr1->type)
512 && INTEGRAL_TYPE_P (vr2->type))
514 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
515 return false;
517 else if (INTEGRAL_TYPE_P (vr1->type)
518 && (TYPE_PRECISION (vr1->type)
519 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
520 return false;
521 else if (INTEGRAL_TYPE_P (vr2->type)
522 && (TYPE_PRECISION (vr2->type)
523 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
524 return false;
526 i = 0;
527 j = 0;
530 HOST_WIDE_INT off1 = 0, off2 = 0;
531 vn_reference_op_t vro1, vro2;
532 vn_reference_op_s tem1, tem2;
533 bool deref1 = false, deref2 = false;
534 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
536 if (vro1->opcode == MEM_REF)
537 deref1 = true;
538 if (vro1->off == -1)
539 break;
540 off1 += vro1->off;
542 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
544 if (vro2->opcode == MEM_REF)
545 deref2 = true;
546 if (vro2->off == -1)
547 break;
548 off2 += vro2->off;
550 if (off1 != off2)
551 return false;
552 if (deref1 && vro1->opcode == ADDR_EXPR)
554 memset (&tem1, 0, sizeof (tem1));
555 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
556 tem1.type = TREE_TYPE (tem1.op0);
557 tem1.opcode = TREE_CODE (tem1.op0);
558 vro1 = &tem1;
560 if (deref2 && vro2->opcode == ADDR_EXPR)
562 memset (&tem2, 0, sizeof (tem2));
563 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
564 tem2.type = TREE_TYPE (tem2.op0);
565 tem2.opcode = TREE_CODE (tem2.op0);
566 vro2 = &tem2;
568 if (!vn_reference_op_eq (vro1, vro2))
569 return false;
570 ++j;
571 ++i;
573 while (VEC_length (vn_reference_op_s, vr1->operands) != i
574 || VEC_length (vn_reference_op_s, vr2->operands) != j);
576 return true;
579 /* Copy the operations present in load/store REF into RESULT, a vector of
580 vn_reference_op_s's. */
582 void
583 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
585 if (TREE_CODE (ref) == TARGET_MEM_REF)
587 vn_reference_op_s temp;
589 memset (&temp, 0, sizeof (temp));
590 temp.type = TREE_TYPE (ref);
591 temp.opcode = TREE_CODE (ref);
592 temp.op0 = TMR_INDEX (ref);
593 temp.op1 = TMR_STEP (ref);
594 temp.op2 = TMR_OFFSET (ref);
595 temp.off = -1;
596 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
598 memset (&temp, 0, sizeof (temp));
599 temp.type = NULL_TREE;
600 temp.opcode = ERROR_MARK;
601 temp.op0 = TMR_INDEX2 (ref);
602 temp.off = -1;
603 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
605 memset (&temp, 0, sizeof (temp));
606 temp.type = NULL_TREE;
607 temp.opcode = TREE_CODE (TMR_BASE (ref));
608 temp.op0 = TMR_BASE (ref);
609 temp.off = -1;
610 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
611 return;
614 /* For non-calls, store the information that makes up the address. */
616 while (ref)
618 vn_reference_op_s temp;
620 memset (&temp, 0, sizeof (temp));
621 temp.type = TREE_TYPE (ref);
622 temp.opcode = TREE_CODE (ref);
623 temp.off = -1;
625 switch (temp.opcode)
627 case MEM_REF:
628 /* The base address gets its own vn_reference_op_s structure. */
629 temp.op0 = TREE_OPERAND (ref, 1);
630 if (host_integerp (TREE_OPERAND (ref, 1), 0))
631 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
632 break;
633 case BIT_FIELD_REF:
634 /* Record bits and position. */
635 temp.op0 = TREE_OPERAND (ref, 1);
636 temp.op1 = TREE_OPERAND (ref, 2);
637 break;
638 case COMPONENT_REF:
639 /* The field decl is enough to unambiguously specify the field,
640 a matching type is not necessary and a mismatching type
641 is always a spurious difference. */
642 temp.type = NULL_TREE;
643 temp.op0 = TREE_OPERAND (ref, 1);
644 temp.op1 = TREE_OPERAND (ref, 2);
646 tree this_offset = component_ref_field_offset (ref);
647 if (this_offset
648 && TREE_CODE (this_offset) == INTEGER_CST)
650 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
651 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
653 double_int off
654 = double_int_add (tree_to_double_int (this_offset),
655 double_int_rshift
656 (tree_to_double_int (bit_offset),
657 BITS_PER_UNIT == 8
658 ? 3 : exact_log2 (BITS_PER_UNIT),
659 HOST_BITS_PER_DOUBLE_INT, true));
660 if (double_int_fits_in_shwi_p (off))
661 temp.off = off.low;
665 break;
666 case ARRAY_RANGE_REF:
667 case ARRAY_REF:
668 /* Record index as operand. */
669 temp.op0 = TREE_OPERAND (ref, 1);
670 /* Always record lower bounds and element size. */
671 temp.op1 = array_ref_low_bound (ref);
672 temp.op2 = array_ref_element_size (ref);
673 if (TREE_CODE (temp.op0) == INTEGER_CST
674 && TREE_CODE (temp.op1) == INTEGER_CST
675 && TREE_CODE (temp.op2) == INTEGER_CST)
677 double_int off = tree_to_double_int (temp.op0);
678 off = double_int_add (off,
679 double_int_neg
680 (tree_to_double_int (temp.op1)));
681 off = double_int_mul (off, tree_to_double_int (temp.op2));
682 if (double_int_fits_in_shwi_p (off))
683 temp.off = off.low;
685 break;
686 case VAR_DECL:
687 if (DECL_HARD_REGISTER (ref))
689 temp.op0 = ref;
690 break;
692 /* Fallthru. */
693 case PARM_DECL:
694 case CONST_DECL:
695 case RESULT_DECL:
696 /* Canonicalize decls to MEM[&decl] which is what we end up with
697 when valueizing MEM[ptr] with ptr = &decl. */
698 temp.opcode = MEM_REF;
699 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
700 temp.off = 0;
701 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
702 temp.opcode = ADDR_EXPR;
703 temp.op0 = build_fold_addr_expr (ref);
704 temp.type = TREE_TYPE (temp.op0);
705 temp.off = -1;
706 break;
707 case STRING_CST:
708 case INTEGER_CST:
709 case COMPLEX_CST:
710 case VECTOR_CST:
711 case REAL_CST:
712 case FIXED_CST:
713 case CONSTRUCTOR:
714 case SSA_NAME:
715 temp.op0 = ref;
716 break;
717 case ADDR_EXPR:
718 if (is_gimple_min_invariant (ref))
720 temp.op0 = ref;
721 break;
723 /* Fallthrough. */
724 /* These are only interesting for their operands, their
725 existence, and their type. They will never be the last
726 ref in the chain of references (IE they require an
727 operand), so we don't have to put anything
728 for op* as it will be handled by the iteration */
729 case REALPART_EXPR:
730 case VIEW_CONVERT_EXPR:
731 temp.off = 0;
732 break;
733 case IMAGPART_EXPR:
734 /* This is only interesting for its constant offset. */
735 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
736 break;
737 default:
738 gcc_unreachable ();
740 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
742 if (REFERENCE_CLASS_P (ref)
743 || (TREE_CODE (ref) == ADDR_EXPR
744 && !is_gimple_min_invariant (ref)))
745 ref = TREE_OPERAND (ref, 0);
746 else
747 ref = NULL_TREE;
751 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
752 operands in *OPS, the reference alias set SET and the reference type TYPE.
753 Return true if something useful was produced. */
755 bool
756 ao_ref_init_from_vn_reference (ao_ref *ref,
757 alias_set_type set, tree type,
758 VEC (vn_reference_op_s, heap) *ops)
760 vn_reference_op_t op;
761 unsigned i;
762 tree base = NULL_TREE;
763 tree *op0_p = &base;
764 HOST_WIDE_INT offset = 0;
765 HOST_WIDE_INT max_size;
766 HOST_WIDE_INT size = -1;
767 tree size_tree = NULL_TREE;
768 alias_set_type base_alias_set = -1;
770 /* First get the final access size from just the outermost expression. */
771 op = VEC_index (vn_reference_op_s, ops, 0);
772 if (op->opcode == COMPONENT_REF)
773 size_tree = DECL_SIZE (op->op0);
774 else if (op->opcode == BIT_FIELD_REF)
775 size_tree = op->op0;
776 else
778 enum machine_mode mode = TYPE_MODE (type);
779 if (mode == BLKmode)
780 size_tree = TYPE_SIZE (type);
781 else
782 size = GET_MODE_BITSIZE (mode);
784 if (size_tree != NULL_TREE)
786 if (!host_integerp (size_tree, 1))
787 size = -1;
788 else
789 size = TREE_INT_CST_LOW (size_tree);
792 /* Initially, maxsize is the same as the accessed element size.
793 In the following it will only grow (or become -1). */
794 max_size = size;
796 /* Compute cumulative bit-offset for nested component-refs and array-refs,
797 and find the ultimate containing object. */
798 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
800 switch (op->opcode)
802 /* These may be in the reference ops, but we cannot do anything
803 sensible with them here. */
804 case ADDR_EXPR:
805 /* Apart from ADDR_EXPR arguments to MEM_REF. */
806 if (base != NULL_TREE
807 && TREE_CODE (base) == MEM_REF
808 && op->op0
809 && DECL_P (TREE_OPERAND (op->op0, 0)))
811 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
812 base = TREE_OPERAND (op->op0, 0);
813 if (pop->off == -1)
815 max_size = -1;
816 offset = 0;
818 else
819 offset += pop->off * BITS_PER_UNIT;
820 op0_p = NULL;
821 break;
823 /* Fallthru. */
824 case CALL_EXPR:
825 return false;
827 /* Record the base objects. */
828 case MEM_REF:
829 base_alias_set = get_deref_alias_set (op->op0);
830 *op0_p = build2 (MEM_REF, op->type,
831 NULL_TREE, op->op0);
832 op0_p = &TREE_OPERAND (*op0_p, 0);
833 break;
835 case VAR_DECL:
836 case PARM_DECL:
837 case RESULT_DECL:
838 case SSA_NAME:
839 *op0_p = op->op0;
840 op0_p = NULL;
841 break;
843 /* And now the usual component-reference style ops. */
844 case BIT_FIELD_REF:
845 offset += tree_low_cst (op->op1, 0);
846 break;
848 case COMPONENT_REF:
850 tree field = op->op0;
851 /* We do not have a complete COMPONENT_REF tree here so we
852 cannot use component_ref_field_offset. Do the interesting
853 parts manually. */
855 if (op->op1
856 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
857 max_size = -1;
858 else
860 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
861 * BITS_PER_UNIT);
862 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
864 break;
867 case ARRAY_RANGE_REF:
868 case ARRAY_REF:
869 /* We recorded the lower bound and the element size. */
870 if (!host_integerp (op->op0, 0)
871 || !host_integerp (op->op1, 0)
872 || !host_integerp (op->op2, 0))
873 max_size = -1;
874 else
876 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
877 hindex -= TREE_INT_CST_LOW (op->op1);
878 hindex *= TREE_INT_CST_LOW (op->op2);
879 hindex *= BITS_PER_UNIT;
880 offset += hindex;
882 break;
884 case REALPART_EXPR:
885 break;
887 case IMAGPART_EXPR:
888 offset += size;
889 break;
891 case VIEW_CONVERT_EXPR:
892 break;
894 case STRING_CST:
895 case INTEGER_CST:
896 case COMPLEX_CST:
897 case VECTOR_CST:
898 case REAL_CST:
899 case CONSTRUCTOR:
900 case CONST_DECL:
901 return false;
903 default:
904 return false;
908 if (base == NULL_TREE)
909 return false;
911 ref->ref = NULL_TREE;
912 ref->base = base;
913 ref->offset = offset;
914 ref->size = size;
915 ref->max_size = max_size;
916 ref->ref_alias_set = set;
917 if (base_alias_set != -1)
918 ref->base_alias_set = base_alias_set;
919 else
920 ref->base_alias_set = get_alias_set (base);
922 return true;
925 /* Copy the operations present in load/store/call REF into RESULT, a vector of
926 vn_reference_op_s's. */
928 void
929 copy_reference_ops_from_call (gimple call,
930 VEC(vn_reference_op_s, heap) **result)
932 vn_reference_op_s temp;
933 unsigned i;
935 /* Copy the type, opcode, function being called and static chain. */
936 memset (&temp, 0, sizeof (temp));
937 temp.type = gimple_call_return_type (call);
938 temp.opcode = CALL_EXPR;
939 temp.op0 = gimple_call_fn (call);
940 temp.op1 = gimple_call_chain (call);
941 temp.off = -1;
942 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
944 /* Copy the call arguments. As they can be references as well,
945 just chain them together. */
946 for (i = 0; i < gimple_call_num_args (call); ++i)
948 tree callarg = gimple_call_arg (call, i);
949 copy_reference_ops_from_ref (callarg, result);
953 /* Create a vector of vn_reference_op_s structures from REF, a
954 REFERENCE_CLASS_P tree. The vector is not shared. */
956 static VEC(vn_reference_op_s, heap) *
957 create_reference_ops_from_ref (tree ref)
959 VEC (vn_reference_op_s, heap) *result = NULL;
961 copy_reference_ops_from_ref (ref, &result);
962 return result;
965 /* Create a vector of vn_reference_op_s structures from CALL, a
966 call statement. The vector is not shared. */
968 static VEC(vn_reference_op_s, heap) *
969 create_reference_ops_from_call (gimple call)
971 VEC (vn_reference_op_s, heap) *result = NULL;
973 copy_reference_ops_from_call (call, &result);
974 return result;
977 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
978 *I_P to point to the last element of the replacement. */
979 void
980 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
981 unsigned int *i_p)
983 unsigned int i = *i_p;
984 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
985 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
986 tree addr_base;
987 HOST_WIDE_INT addr_offset;
989 /* The only thing we have to do is from &OBJ.foo.bar add the offset
990 from .foo.bar to the preceeding MEM_REF offset and replace the
991 address with &OBJ. */
992 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
993 &addr_offset);
994 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
995 if (addr_base != op->op0)
997 double_int off = tree_to_double_int (mem_op->op0);
998 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
999 off = double_int_add (off, shwi_to_double_int (addr_offset));
1000 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1001 op->op0 = build_fold_addr_expr (addr_base);
1002 if (host_integerp (mem_op->op0, 0))
1003 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1004 else
1005 mem_op->off = -1;
1009 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1010 *I_P to point to the last element of the replacement. */
1011 static void
1012 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
1013 unsigned int *i_p)
1015 unsigned int i = *i_p;
1016 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
1017 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
1018 gimple def_stmt;
1019 enum tree_code code;
1020 double_int off;
1022 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1023 if (!is_gimple_assign (def_stmt))
1024 return;
1026 code = gimple_assign_rhs_code (def_stmt);
1027 if (code != ADDR_EXPR
1028 && code != POINTER_PLUS_EXPR)
1029 return;
1031 off = tree_to_double_int (mem_op->op0);
1032 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1034 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1035 from .foo.bar to the preceeding MEM_REF offset and replace the
1036 address with &OBJ. */
1037 if (code == ADDR_EXPR)
1039 tree addr, addr_base;
1040 HOST_WIDE_INT addr_offset;
1042 addr = gimple_assign_rhs1 (def_stmt);
1043 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1044 &addr_offset);
1045 if (!addr_base
1046 || TREE_CODE (addr_base) != MEM_REF)
1047 return;
1049 off = double_int_add (off, shwi_to_double_int (addr_offset));
1050 off = double_int_add (off, mem_ref_offset (addr_base));
1051 op->op0 = TREE_OPERAND (addr_base, 0);
1053 else
1055 tree ptr, ptroff;
1056 ptr = gimple_assign_rhs1 (def_stmt);
1057 ptroff = gimple_assign_rhs2 (def_stmt);
1058 if (TREE_CODE (ptr) != SSA_NAME
1059 || TREE_CODE (ptroff) != INTEGER_CST)
1060 return;
1062 off = double_int_add (off, tree_to_double_int (ptroff));
1063 op->op0 = ptr;
1066 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1067 if (host_integerp (mem_op->op0, 0))
1068 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1069 else
1070 mem_op->off = -1;
1071 if (TREE_CODE (op->op0) == SSA_NAME)
1072 op->op0 = SSA_VAL (op->op0);
1073 if (TREE_CODE (op->op0) != SSA_NAME)
1074 op->opcode = TREE_CODE (op->op0);
1076 /* And recurse. */
1077 if (TREE_CODE (op->op0) == SSA_NAME)
1078 vn_reference_maybe_forwprop_address (ops, i_p);
1079 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1080 vn_reference_fold_indirect (ops, i_p);
1083 /* Optimize the reference REF to a constant if possible or return
1084 NULL_TREE if not. */
1086 tree
1087 fully_constant_vn_reference_p (vn_reference_t ref)
1089 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1090 vn_reference_op_t op;
1092 /* Try to simplify the translated expression if it is
1093 a call to a builtin function with at most two arguments. */
1094 op = VEC_index (vn_reference_op_s, operands, 0);
1095 if (op->opcode == CALL_EXPR
1096 && TREE_CODE (op->op0) == ADDR_EXPR
1097 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1098 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1099 && VEC_length (vn_reference_op_s, operands) >= 2
1100 && VEC_length (vn_reference_op_s, operands) <= 3)
1102 vn_reference_op_t arg0, arg1 = NULL;
1103 bool anyconst = false;
1104 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1105 if (VEC_length (vn_reference_op_s, operands) > 2)
1106 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1107 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1108 || (arg0->opcode == ADDR_EXPR
1109 && is_gimple_min_invariant (arg0->op0)))
1110 anyconst = true;
1111 if (arg1
1112 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1113 || (arg1->opcode == ADDR_EXPR
1114 && is_gimple_min_invariant (arg1->op0))))
1115 anyconst = true;
1116 if (anyconst)
1118 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1119 arg1 ? 2 : 1,
1120 arg0->op0,
1121 arg1 ? arg1->op0 : NULL);
1122 if (folded
1123 && TREE_CODE (folded) == NOP_EXPR)
1124 folded = TREE_OPERAND (folded, 0);
1125 if (folded
1126 && is_gimple_min_invariant (folded))
1127 return folded;
1131 /* Simplify reads from constant strings. */
1132 else if (op->opcode == ARRAY_REF
1133 && TREE_CODE (op->op0) == INTEGER_CST
1134 && integer_zerop (op->op1)
1135 && VEC_length (vn_reference_op_s, operands) == 2)
1137 vn_reference_op_t arg0;
1138 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1139 if (arg0->opcode == STRING_CST
1140 && (TYPE_MODE (op->type)
1141 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1142 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1143 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1144 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1145 return build_int_cst_type (op->type,
1146 (TREE_STRING_POINTER (arg0->op0)
1147 [TREE_INT_CST_LOW (op->op0)]));
1150 return NULL_TREE;
1153 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1154 structures into their value numbers. This is done in-place, and
1155 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1156 whether any operands were valueized. */
1158 static VEC (vn_reference_op_s, heap) *
1159 valueize_refs_1 (VEC (vn_reference_op_s, heap) *orig, bool *valueized_anything)
1161 vn_reference_op_t vro;
1162 unsigned int i;
1164 *valueized_anything = false;
1166 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1168 if (vro->opcode == SSA_NAME
1169 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1171 tree tem = SSA_VAL (vro->op0);
1172 if (tem != vro->op0)
1174 *valueized_anything = true;
1175 vro->op0 = tem;
1177 /* If it transforms from an SSA_NAME to a constant, update
1178 the opcode. */
1179 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1180 vro->opcode = TREE_CODE (vro->op0);
1182 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1184 tree tem = SSA_VAL (vro->op1);
1185 if (tem != vro->op1)
1187 *valueized_anything = true;
1188 vro->op1 = tem;
1191 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1193 tree tem = SSA_VAL (vro->op2);
1194 if (tem != vro->op2)
1196 *valueized_anything = true;
1197 vro->op2 = tem;
1200 /* If it transforms from an SSA_NAME to an address, fold with
1201 a preceding indirect reference. */
1202 if (i > 0
1203 && vro->op0
1204 && TREE_CODE (vro->op0) == ADDR_EXPR
1205 && VEC_index (vn_reference_op_s,
1206 orig, i - 1)->opcode == MEM_REF)
1207 vn_reference_fold_indirect (&orig, &i);
1208 else if (i > 0
1209 && vro->opcode == SSA_NAME
1210 && VEC_index (vn_reference_op_s,
1211 orig, i - 1)->opcode == MEM_REF)
1212 vn_reference_maybe_forwprop_address (&orig, &i);
1213 /* If it transforms a non-constant ARRAY_REF into a constant
1214 one, adjust the constant offset. */
1215 else if (vro->opcode == ARRAY_REF
1216 && vro->off == -1
1217 && TREE_CODE (vro->op0) == INTEGER_CST
1218 && TREE_CODE (vro->op1) == INTEGER_CST
1219 && TREE_CODE (vro->op2) == INTEGER_CST)
1221 double_int off = tree_to_double_int (vro->op0);
1222 off = double_int_add (off,
1223 double_int_neg
1224 (tree_to_double_int (vro->op1)));
1225 off = double_int_mul (off, tree_to_double_int (vro->op2));
1226 if (double_int_fits_in_shwi_p (off))
1227 vro->off = off.low;
1231 return orig;
1234 static VEC (vn_reference_op_s, heap) *
1235 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1237 bool tem;
1238 return valueize_refs_1 (orig, &tem);
1241 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1243 /* Create a vector of vn_reference_op_s structures from REF, a
1244 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1245 this function. *VALUEIZED_ANYTHING will specify whether any
1246 operands were valueized. */
1248 static VEC(vn_reference_op_s, heap) *
1249 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1251 if (!ref)
1252 return NULL;
1253 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1254 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1255 shared_lookup_references = valueize_refs_1 (shared_lookup_references,
1256 valueized_anything);
1257 return shared_lookup_references;
1260 /* Create a vector of vn_reference_op_s structures from CALL, a
1261 call statement. The vector is shared among all callers of
1262 this function. */
1264 static VEC(vn_reference_op_s, heap) *
1265 valueize_shared_reference_ops_from_call (gimple call)
1267 if (!call)
1268 return NULL;
1269 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1270 copy_reference_ops_from_call (call, &shared_lookup_references);
1271 shared_lookup_references = valueize_refs (shared_lookup_references);
1272 return shared_lookup_references;
1275 /* Lookup a SCCVN reference operation VR in the current hash table.
1276 Returns the resulting value number if it exists in the hash table,
1277 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1278 vn_reference_t stored in the hashtable if something is found. */
1280 static tree
1281 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1283 void **slot;
1284 hashval_t hash;
1286 hash = vr->hashcode;
1287 slot = htab_find_slot_with_hash (current_info->references, vr,
1288 hash, NO_INSERT);
1289 if (!slot && current_info == optimistic_info)
1290 slot = htab_find_slot_with_hash (valid_info->references, vr,
1291 hash, NO_INSERT);
1292 if (slot)
1294 if (vnresult)
1295 *vnresult = (vn_reference_t)*slot;
1296 return ((vn_reference_t)*slot)->result;
1299 return NULL_TREE;
1302 static tree *last_vuse_ptr;
1303 static vn_lookup_kind vn_walk_kind;
1304 static vn_lookup_kind default_vn_walk_kind;
1306 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1307 with the current VUSE and performs the expression lookup. */
1309 static void *
1310 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1312 vn_reference_t vr = (vn_reference_t)vr_;
1313 void **slot;
1314 hashval_t hash;
1316 if (last_vuse_ptr)
1317 *last_vuse_ptr = vuse;
1319 /* Fixup vuse and hash. */
1320 if (vr->vuse)
1321 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1322 vr->vuse = SSA_VAL (vuse);
1323 if (vr->vuse)
1324 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1326 hash = vr->hashcode;
1327 slot = htab_find_slot_with_hash (current_info->references, vr,
1328 hash, NO_INSERT);
1329 if (!slot && current_info == optimistic_info)
1330 slot = htab_find_slot_with_hash (valid_info->references, vr,
1331 hash, NO_INSERT);
1332 if (slot)
1333 return *slot;
1335 return NULL;
1338 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1339 from the statement defining VUSE and if not successful tries to
1340 translate *REFP and VR_ through an aggregate copy at the defintion
1341 of VUSE. */
1343 static void *
1344 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1346 vn_reference_t vr = (vn_reference_t)vr_;
1347 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1348 tree base;
1349 HOST_WIDE_INT offset, maxsize;
1350 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1351 ao_ref lhs_ref;
1352 bool lhs_ref_ok = false;
1354 /* First try to disambiguate after value-replacing in the definitions LHS. */
1355 if (is_gimple_assign (def_stmt))
1357 VEC (vn_reference_op_s, heap) *tem;
1358 tree lhs = gimple_assign_lhs (def_stmt);
1359 bool valueized_anything = false;
1360 /* Avoid re-allocation overhead. */
1361 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1362 copy_reference_ops_from_ref (lhs, &lhs_ops);
1363 tem = lhs_ops;
1364 lhs_ops = valueize_refs_1 (lhs_ops, &valueized_anything);
1365 gcc_assert (lhs_ops == tem);
1366 if (valueized_anything)
1368 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref,
1369 get_alias_set (lhs),
1370 TREE_TYPE (lhs), lhs_ops);
1371 if (lhs_ref_ok
1372 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1373 return NULL;
1375 else
1377 ao_ref_init (&lhs_ref, lhs);
1378 lhs_ref_ok = true;
1382 base = ao_ref_base (ref);
1383 offset = ref->offset;
1384 maxsize = ref->max_size;
1386 /* If we cannot constrain the size of the reference we cannot
1387 test if anything kills it. */
1388 if (maxsize == -1)
1389 return (void *)-1;
1391 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1392 from that defintion.
1393 1) Memset. */
1394 if (is_gimple_reg_type (vr->type)
1395 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1396 && integer_zerop (gimple_call_arg (def_stmt, 1))
1397 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1398 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1400 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1401 tree base2;
1402 HOST_WIDE_INT offset2, size2, maxsize2;
1403 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1404 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1405 if ((unsigned HOST_WIDE_INT)size2 / 8
1406 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1407 && maxsize2 != -1
1408 && operand_equal_p (base, base2, 0)
1409 && offset2 <= offset
1410 && offset2 + size2 >= offset + maxsize)
1412 tree val = build_zero_cst (vr->type);
1413 unsigned int value_id = get_or_alloc_constant_value_id (val);
1414 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1415 VEC_copy (vn_reference_op_s,
1416 heap, vr->operands),
1417 val, value_id);
1421 /* 2) Assignment from an empty CONSTRUCTOR. */
1422 else if (is_gimple_reg_type (vr->type)
1423 && gimple_assign_single_p (def_stmt)
1424 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1425 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1427 tree base2;
1428 HOST_WIDE_INT offset2, size2, maxsize2;
1429 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1430 &offset2, &size2, &maxsize2);
1431 if (maxsize2 != -1
1432 && operand_equal_p (base, base2, 0)
1433 && offset2 <= offset
1434 && offset2 + size2 >= offset + maxsize)
1436 tree val = build_zero_cst (vr->type);
1437 unsigned int value_id = get_or_alloc_constant_value_id (val);
1438 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1439 VEC_copy (vn_reference_op_s,
1440 heap, vr->operands),
1441 val, value_id);
1445 /* 3) Assignment from a constant. We can use folds native encode/interpret
1446 routines to extract the assigned bits. */
1447 else if (CHAR_BIT == 8 && BITS_PER_UNIT == 8
1448 && ref->size == maxsize
1449 && maxsize % BITS_PER_UNIT == 0
1450 && offset % BITS_PER_UNIT == 0
1451 && is_gimple_reg_type (vr->type)
1452 && gimple_assign_single_p (def_stmt)
1453 && is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt)))
1455 tree base2;
1456 HOST_WIDE_INT offset2, size2, maxsize2;
1457 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1458 &offset2, &size2, &maxsize2);
1459 if (maxsize2 != -1
1460 && maxsize2 == size2
1461 && size2 % BITS_PER_UNIT == 0
1462 && offset2 % BITS_PER_UNIT == 0
1463 && operand_equal_p (base, base2, 0)
1464 && offset2 <= offset
1465 && offset2 + size2 >= offset + maxsize)
1467 /* We support up to 512-bit values (for V8DFmode). */
1468 unsigned char buffer[64];
1469 int len;
1471 len = native_encode_expr (gimple_assign_rhs1 (def_stmt),
1472 buffer, sizeof (buffer));
1473 if (len > 0)
1475 tree val = native_interpret_expr (vr->type,
1476 buffer
1477 + ((offset - offset2)
1478 / BITS_PER_UNIT),
1479 ref->size / BITS_PER_UNIT);
1480 if (val)
1482 unsigned int value_id = get_or_alloc_constant_value_id (val);
1483 return vn_reference_insert_pieces
1484 (vuse, vr->set, vr->type,
1485 VEC_copy (vn_reference_op_s, heap, vr->operands),
1486 val, value_id);
1492 /* 4) Assignment from an SSA name which definition we may be able
1493 to access pieces from. */
1494 else if (ref->size == maxsize
1495 && is_gimple_reg_type (vr->type)
1496 && gimple_assign_single_p (def_stmt)
1497 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
1499 tree rhs1 = gimple_assign_rhs1 (def_stmt);
1500 gimple def_stmt2 = SSA_NAME_DEF_STMT (rhs1);
1501 if (is_gimple_assign (def_stmt2)
1502 && (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR
1503 || gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR)
1504 && types_compatible_p (vr->type, TREE_TYPE (TREE_TYPE (rhs1))))
1506 tree base2;
1507 HOST_WIDE_INT offset2, size2, maxsize2, off;
1508 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1509 &offset2, &size2, &maxsize2);
1510 off = offset - offset2;
1511 if (maxsize2 != -1
1512 && maxsize2 == size2
1513 && operand_equal_p (base, base2, 0)
1514 && offset2 <= offset
1515 && offset2 + size2 >= offset + maxsize)
1517 tree val = NULL_TREE;
1518 HOST_WIDE_INT elsz
1519 = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (TREE_TYPE (rhs1))));
1520 if (gimple_assign_rhs_code (def_stmt2) == COMPLEX_EXPR)
1522 if (off == 0)
1523 val = gimple_assign_rhs1 (def_stmt2);
1524 else if (off == elsz)
1525 val = gimple_assign_rhs2 (def_stmt2);
1527 else if (gimple_assign_rhs_code (def_stmt2) == CONSTRUCTOR
1528 && off % elsz == 0)
1530 tree ctor = gimple_assign_rhs1 (def_stmt2);
1531 unsigned i = off / elsz;
1532 if (i < CONSTRUCTOR_NELTS (ctor))
1534 constructor_elt *elt = CONSTRUCTOR_ELT (ctor, i);
1535 if (compare_tree_int (elt->index, i) == 0)
1536 val = elt->value;
1539 if (val)
1541 unsigned int value_id = get_or_alloc_constant_value_id (val);
1542 return vn_reference_insert_pieces
1543 (vuse, vr->set, vr->type,
1544 VEC_copy (vn_reference_op_s, heap, vr->operands),
1545 val, value_id);
1551 /* 5) For aggregate copies translate the reference through them if
1552 the copy kills ref. */
1553 else if (vn_walk_kind == VN_WALKREWRITE
1554 && gimple_assign_single_p (def_stmt)
1555 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1556 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1557 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1559 tree base2;
1560 HOST_WIDE_INT offset2, size2, maxsize2;
1561 int i, j;
1562 VEC (vn_reference_op_s, heap) *rhs = NULL;
1563 vn_reference_op_t vro;
1564 ao_ref r;
1566 if (!lhs_ref_ok)
1567 return (void *)-1;
1569 /* See if the assignment kills REF. */
1570 base2 = ao_ref_base (&lhs_ref);
1571 offset2 = lhs_ref.offset;
1572 size2 = lhs_ref.size;
1573 maxsize2 = lhs_ref.max_size;
1574 if (maxsize2 == -1
1575 || (base != base2 && !operand_equal_p (base, base2, 0))
1576 || offset2 > offset
1577 || offset2 + size2 < offset + maxsize)
1578 return (void *)-1;
1580 /* Find the common base of ref and the lhs. lhs_ops already
1581 contains valueized operands for the lhs. */
1582 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1583 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1584 while (j >= 0 && i >= 0
1585 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1586 vr->operands, i),
1587 VEC_index (vn_reference_op_s, lhs_ops, j)))
1589 i--;
1590 j--;
1593 /* ??? The innermost op should always be a MEM_REF and we already
1594 checked that the assignment to the lhs kills vr. Thus for
1595 aggregate copies using char[] types the vn_reference_op_eq
1596 may fail when comparing types for compatibility. But we really
1597 don't care here - further lookups with the rewritten operands
1598 will simply fail if we messed up types too badly. */
1599 if (j == 0 && i >= 0
1600 && VEC_index (vn_reference_op_s, lhs_ops, 0)->opcode == MEM_REF
1601 && VEC_index (vn_reference_op_s, lhs_ops, 0)->off != -1
1602 && (VEC_index (vn_reference_op_s, lhs_ops, 0)->off
1603 == VEC_index (vn_reference_op_s, vr->operands, i)->off))
1604 i--, j--;
1606 /* i now points to the first additional op.
1607 ??? LHS may not be completely contained in VR, one or more
1608 VIEW_CONVERT_EXPRs could be in its way. We could at least
1609 try handling outermost VIEW_CONVERT_EXPRs. */
1610 if (j != -1)
1611 return (void *)-1;
1613 /* Now re-write REF to be based on the rhs of the assignment. */
1614 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1615 /* We need to pre-pend vr->operands[0..i] to rhs. */
1616 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1617 > VEC_length (vn_reference_op_s, vr->operands))
1619 VEC (vn_reference_op_s, heap) *old = vr->operands;
1620 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1621 i + 1 + VEC_length (vn_reference_op_s, rhs));
1622 if (old == shared_lookup_references
1623 && vr->operands != old)
1624 shared_lookup_references = NULL;
1626 else
1627 VEC_truncate (vn_reference_op_s, vr->operands,
1628 i + 1 + VEC_length (vn_reference_op_s, rhs));
1629 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1630 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1631 VEC_free (vn_reference_op_s, heap, rhs);
1632 vr->hashcode = vn_reference_compute_hash (vr);
1634 /* Adjust *ref from the new operands. */
1635 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1636 return (void *)-1;
1637 /* This can happen with bitfields. */
1638 if (ref->size != r.size)
1639 return (void *)-1;
1640 *ref = r;
1642 /* Do not update last seen VUSE after translating. */
1643 last_vuse_ptr = NULL;
1645 /* Keep looking for the adjusted *REF / VR pair. */
1646 return NULL;
1649 /* 6) For memcpy copies translate the reference through them if
1650 the copy kills ref. */
1651 else if (vn_walk_kind == VN_WALKREWRITE
1652 && is_gimple_reg_type (vr->type)
1653 /* ??? Handle BCOPY as well. */
1654 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1655 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1656 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1657 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1658 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1659 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1660 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1661 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1663 tree lhs, rhs;
1664 ao_ref r;
1665 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1666 vn_reference_op_s op;
1667 HOST_WIDE_INT at;
1670 /* Only handle non-variable, addressable refs. */
1671 if (ref->size != maxsize
1672 || offset % BITS_PER_UNIT != 0
1673 || ref->size % BITS_PER_UNIT != 0)
1674 return (void *)-1;
1676 /* Extract a pointer base and an offset for the destination. */
1677 lhs = gimple_call_arg (def_stmt, 0);
1678 lhs_offset = 0;
1679 if (TREE_CODE (lhs) == SSA_NAME)
1680 lhs = SSA_VAL (lhs);
1681 if (TREE_CODE (lhs) == ADDR_EXPR)
1683 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1684 &lhs_offset);
1685 if (!tem)
1686 return (void *)-1;
1687 if (TREE_CODE (tem) == MEM_REF
1688 && host_integerp (TREE_OPERAND (tem, 1), 1))
1690 lhs = TREE_OPERAND (tem, 0);
1691 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1693 else if (DECL_P (tem))
1694 lhs = build_fold_addr_expr (tem);
1695 else
1696 return (void *)-1;
1698 if (TREE_CODE (lhs) != SSA_NAME
1699 && TREE_CODE (lhs) != ADDR_EXPR)
1700 return (void *)-1;
1702 /* Extract a pointer base and an offset for the source. */
1703 rhs = gimple_call_arg (def_stmt, 1);
1704 rhs_offset = 0;
1705 if (TREE_CODE (rhs) == SSA_NAME)
1706 rhs = SSA_VAL (rhs);
1707 if (TREE_CODE (rhs) == ADDR_EXPR)
1709 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1710 &rhs_offset);
1711 if (!tem)
1712 return (void *)-1;
1713 if (TREE_CODE (tem) == MEM_REF
1714 && host_integerp (TREE_OPERAND (tem, 1), 1))
1716 rhs = TREE_OPERAND (tem, 0);
1717 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1719 else if (DECL_P (tem))
1720 rhs = build_fold_addr_expr (tem);
1721 else
1722 return (void *)-1;
1724 if (TREE_CODE (rhs) != SSA_NAME
1725 && TREE_CODE (rhs) != ADDR_EXPR)
1726 return (void *)-1;
1728 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1730 /* The bases of the destination and the references have to agree. */
1731 if ((TREE_CODE (base) != MEM_REF
1732 && !DECL_P (base))
1733 || (TREE_CODE (base) == MEM_REF
1734 && (TREE_OPERAND (base, 0) != lhs
1735 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1736 || (DECL_P (base)
1737 && (TREE_CODE (lhs) != ADDR_EXPR
1738 || TREE_OPERAND (lhs, 0) != base)))
1739 return (void *)-1;
1741 /* And the access has to be contained within the memcpy destination. */
1742 at = offset / BITS_PER_UNIT;
1743 if (TREE_CODE (base) == MEM_REF)
1744 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1745 if (lhs_offset > at
1746 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1747 return (void *)-1;
1749 /* Make room for 2 operands in the new reference. */
1750 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1752 VEC (vn_reference_op_s, heap) *old = vr->operands;
1753 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1754 if (old == shared_lookup_references
1755 && vr->operands != old)
1756 shared_lookup_references = NULL;
1758 else
1759 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1761 /* The looked-through reference is a simple MEM_REF. */
1762 memset (&op, 0, sizeof (op));
1763 op.type = vr->type;
1764 op.opcode = MEM_REF;
1765 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1766 op.off = at - lhs_offset + rhs_offset;
1767 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1768 op.type = TREE_TYPE (rhs);
1769 op.opcode = TREE_CODE (rhs);
1770 op.op0 = rhs;
1771 op.off = -1;
1772 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1773 vr->hashcode = vn_reference_compute_hash (vr);
1775 /* Adjust *ref from the new operands. */
1776 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1777 return (void *)-1;
1778 /* This can happen with bitfields. */
1779 if (ref->size != r.size)
1780 return (void *)-1;
1781 *ref = r;
1783 /* Do not update last seen VUSE after translating. */
1784 last_vuse_ptr = NULL;
1786 /* Keep looking for the adjusted *REF / VR pair. */
1787 return NULL;
1790 /* Bail out and stop walking. */
1791 return (void *)-1;
1794 /* Lookup a reference operation by it's parts, in the current hash table.
1795 Returns the resulting value number if it exists in the hash table,
1796 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1797 vn_reference_t stored in the hashtable if something is found. */
1799 tree
1800 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1801 VEC (vn_reference_op_s, heap) *operands,
1802 vn_reference_t *vnresult, vn_lookup_kind kind)
1804 struct vn_reference_s vr1;
1805 vn_reference_t tmp;
1806 tree cst;
1808 if (!vnresult)
1809 vnresult = &tmp;
1810 *vnresult = NULL;
1812 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1813 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1814 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1815 VEC_length (vn_reference_op_s, operands));
1816 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1817 VEC_address (vn_reference_op_s, operands),
1818 sizeof (vn_reference_op_s)
1819 * VEC_length (vn_reference_op_s, operands));
1820 vr1.operands = operands = shared_lookup_references
1821 = valueize_refs (shared_lookup_references);
1822 vr1.type = type;
1823 vr1.set = set;
1824 vr1.hashcode = vn_reference_compute_hash (&vr1);
1825 if ((cst = fully_constant_vn_reference_p (&vr1)))
1826 return cst;
1828 vn_reference_lookup_1 (&vr1, vnresult);
1829 if (!*vnresult
1830 && kind != VN_NOWALK
1831 && vr1.vuse)
1833 ao_ref r;
1834 vn_walk_kind = kind;
1835 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1836 *vnresult =
1837 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1838 vn_reference_lookup_2,
1839 vn_reference_lookup_3, &vr1);
1840 if (vr1.operands != operands)
1841 VEC_free (vn_reference_op_s, heap, vr1.operands);
1844 if (*vnresult)
1845 return (*vnresult)->result;
1847 return NULL_TREE;
1850 /* Lookup OP in the current hash table, and return the resulting value
1851 number if it exists in the hash table. Return NULL_TREE if it does
1852 not exist in the hash table or if the result field of the structure
1853 was NULL.. VNRESULT will be filled in with the vn_reference_t
1854 stored in the hashtable if one exists. */
1856 tree
1857 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1858 vn_reference_t *vnresult)
1860 VEC (vn_reference_op_s, heap) *operands;
1861 struct vn_reference_s vr1;
1862 tree cst;
1863 bool valuezied_anything;
1865 if (vnresult)
1866 *vnresult = NULL;
1868 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1869 vr1.operands = operands
1870 = valueize_shared_reference_ops_from_ref (op, &valuezied_anything);
1871 vr1.type = TREE_TYPE (op);
1872 vr1.set = get_alias_set (op);
1873 vr1.hashcode = vn_reference_compute_hash (&vr1);
1874 if ((cst = fully_constant_vn_reference_p (&vr1)))
1875 return cst;
1877 if (kind != VN_NOWALK
1878 && vr1.vuse)
1880 vn_reference_t wvnresult;
1881 ao_ref r;
1882 /* Make sure to use a valueized reference if we valueized anything.
1883 Otherwise preserve the full reference for advanced TBAA. */
1884 if (!valuezied_anything
1885 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.type,
1886 vr1.operands))
1887 ao_ref_init (&r, op);
1888 vn_walk_kind = kind;
1889 wvnresult =
1890 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1891 vn_reference_lookup_2,
1892 vn_reference_lookup_3, &vr1);
1893 if (vr1.operands != operands)
1894 VEC_free (vn_reference_op_s, heap, vr1.operands);
1895 if (wvnresult)
1897 if (vnresult)
1898 *vnresult = wvnresult;
1899 return wvnresult->result;
1902 return NULL_TREE;
1905 return vn_reference_lookup_1 (&vr1, vnresult);
1909 /* Insert OP into the current hash table with a value number of
1910 RESULT, and return the resulting reference structure we created. */
1912 vn_reference_t
1913 vn_reference_insert (tree op, tree result, tree vuse)
1915 void **slot;
1916 vn_reference_t vr1;
1918 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1919 if (TREE_CODE (result) == SSA_NAME)
1920 vr1->value_id = VN_INFO (result)->value_id;
1921 else
1922 vr1->value_id = get_or_alloc_constant_value_id (result);
1923 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1924 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1925 vr1->type = TREE_TYPE (op);
1926 vr1->set = get_alias_set (op);
1927 vr1->hashcode = vn_reference_compute_hash (vr1);
1928 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1930 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1931 INSERT);
1933 /* Because we lookup stores using vuses, and value number failures
1934 using the vdefs (see visit_reference_op_store for how and why),
1935 it's possible that on failure we may try to insert an already
1936 inserted store. This is not wrong, there is no ssa name for a
1937 store that we could use as a differentiator anyway. Thus, unlike
1938 the other lookup functions, you cannot gcc_assert (!*slot)
1939 here. */
1941 /* But free the old slot in case of a collision. */
1942 if (*slot)
1943 free_reference (*slot);
1945 *slot = vr1;
1946 return vr1;
1949 /* Insert a reference by it's pieces into the current hash table with
1950 a value number of RESULT. Return the resulting reference
1951 structure we created. */
1953 vn_reference_t
1954 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1955 VEC (vn_reference_op_s, heap) *operands,
1956 tree result, unsigned int value_id)
1959 void **slot;
1960 vn_reference_t vr1;
1962 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1963 vr1->value_id = value_id;
1964 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1965 vr1->operands = valueize_refs (operands);
1966 vr1->type = type;
1967 vr1->set = set;
1968 vr1->hashcode = vn_reference_compute_hash (vr1);
1969 if (result && TREE_CODE (result) == SSA_NAME)
1970 result = SSA_VAL (result);
1971 vr1->result = result;
1973 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1974 INSERT);
1976 /* At this point we should have all the things inserted that we have
1977 seen before, and we should never try inserting something that
1978 already exists. */
1979 gcc_assert (!*slot);
1980 if (*slot)
1981 free_reference (*slot);
1983 *slot = vr1;
1984 return vr1;
1987 /* Compute and return the hash value for nary operation VBO1. */
1989 hashval_t
1990 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1992 hashval_t hash;
1993 unsigned i;
1995 for (i = 0; i < vno1->length; ++i)
1996 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1997 vno1->op[i] = SSA_VAL (vno1->op[i]);
1999 if (vno1->length == 2
2000 && commutative_tree_code (vno1->opcode)
2001 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
2003 tree temp = vno1->op[0];
2004 vno1->op[0] = vno1->op[1];
2005 vno1->op[1] = temp;
2008 hash = iterative_hash_hashval_t (vno1->opcode, 0);
2009 for (i = 0; i < vno1->length; ++i)
2010 hash = iterative_hash_expr (vno1->op[i], hash);
2012 return hash;
2015 /* Return the computed hashcode for nary operation P1. */
2017 static hashval_t
2018 vn_nary_op_hash (const void *p1)
2020 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2021 return vno1->hashcode;
2024 /* Compare nary operations P1 and P2 and return true if they are
2025 equivalent. */
2028 vn_nary_op_eq (const void *p1, const void *p2)
2030 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
2031 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
2032 unsigned i;
2034 if (vno1->hashcode != vno2->hashcode)
2035 return false;
2037 if (vno1->length != vno2->length)
2038 return false;
2040 if (vno1->opcode != vno2->opcode
2041 || !types_compatible_p (vno1->type, vno2->type))
2042 return false;
2044 for (i = 0; i < vno1->length; ++i)
2045 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
2046 return false;
2048 return true;
2051 /* Initialize VNO from the pieces provided. */
2053 static void
2054 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
2055 enum tree_code code, tree type, tree *ops)
2057 vno->opcode = code;
2058 vno->length = length;
2059 vno->type = type;
2060 memcpy (&vno->op[0], ops, sizeof (tree) * length);
2063 /* Initialize VNO from OP. */
2065 static void
2066 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
2068 unsigned i;
2070 vno->opcode = TREE_CODE (op);
2071 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
2072 vno->type = TREE_TYPE (op);
2073 for (i = 0; i < vno->length; ++i)
2074 vno->op[i] = TREE_OPERAND (op, i);
2077 /* Return the number of operands for a vn_nary ops structure from STMT. */
2079 static unsigned int
2080 vn_nary_length_from_stmt (gimple stmt)
2082 switch (gimple_assign_rhs_code (stmt))
2084 case REALPART_EXPR:
2085 case IMAGPART_EXPR:
2086 case VIEW_CONVERT_EXPR:
2087 return 1;
2089 case CONSTRUCTOR:
2090 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2092 default:
2093 return gimple_num_ops (stmt) - 1;
2097 /* Initialize VNO from STMT. */
2099 static void
2100 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
2102 unsigned i;
2104 vno->opcode = gimple_assign_rhs_code (stmt);
2105 vno->type = gimple_expr_type (stmt);
2106 switch (vno->opcode)
2108 case REALPART_EXPR:
2109 case IMAGPART_EXPR:
2110 case VIEW_CONVERT_EXPR:
2111 vno->length = 1;
2112 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
2113 break;
2115 case CONSTRUCTOR:
2116 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
2117 for (i = 0; i < vno->length; ++i)
2118 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
2119 break;
2121 default:
2122 vno->length = gimple_num_ops (stmt) - 1;
2123 for (i = 0; i < vno->length; ++i)
2124 vno->op[i] = gimple_op (stmt, i + 1);
2128 /* Compute the hashcode for VNO and look for it in the hash table;
2129 return the resulting value number if it exists in the hash table.
2130 Return NULL_TREE if it does not exist in the hash table or if the
2131 result field of the operation is NULL. VNRESULT will contain the
2132 vn_nary_op_t from the hashtable if it exists. */
2134 static tree
2135 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
2137 void **slot;
2139 if (vnresult)
2140 *vnresult = NULL;
2142 vno->hashcode = vn_nary_op_compute_hash (vno);
2143 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
2144 NO_INSERT);
2145 if (!slot && current_info == optimistic_info)
2146 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
2147 NO_INSERT);
2148 if (!slot)
2149 return NULL_TREE;
2150 if (vnresult)
2151 *vnresult = (vn_nary_op_t)*slot;
2152 return ((vn_nary_op_t)*slot)->result;
2155 /* Lookup a n-ary operation by its pieces and return the resulting value
2156 number if it exists in the hash table. Return NULL_TREE if it does
2157 not exist in the hash table or if the result field of the operation
2158 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2159 if it exists. */
2161 tree
2162 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
2163 tree type, tree *ops, vn_nary_op_t *vnresult)
2165 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
2166 sizeof_vn_nary_op (length));
2167 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2168 return vn_nary_op_lookup_1 (vno1, vnresult);
2171 /* Lookup OP in the current hash table, and return the resulting value
2172 number if it exists in the hash table. Return NULL_TREE if it does
2173 not exist in the hash table or if the result field of the operation
2174 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
2175 if it exists. */
2177 tree
2178 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
2180 vn_nary_op_t vno1
2181 = XALLOCAVAR (struct vn_nary_op_s,
2182 sizeof_vn_nary_op (TREE_CODE_LENGTH (TREE_CODE (op))));
2183 init_vn_nary_op_from_op (vno1, op);
2184 return vn_nary_op_lookup_1 (vno1, vnresult);
2187 /* Lookup the rhs of STMT in the current hash table, and return the resulting
2188 value number if it exists in the hash table. Return NULL_TREE if
2189 it does not exist in the hash table. VNRESULT will contain the
2190 vn_nary_op_t from the hashtable if it exists. */
2192 tree
2193 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
2195 vn_nary_op_t vno1
2196 = XALLOCAVAR (struct vn_nary_op_s,
2197 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
2198 init_vn_nary_op_from_stmt (vno1, stmt);
2199 return vn_nary_op_lookup_1 (vno1, vnresult);
2202 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
2204 static vn_nary_op_t
2205 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
2207 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
2210 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
2211 obstack. */
2213 static vn_nary_op_t
2214 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2216 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2217 &current_info->nary_obstack);
2219 vno1->value_id = value_id;
2220 vno1->length = length;
2221 vno1->result = result;
2223 return vno1;
2226 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2227 VNO->HASHCODE first. */
2229 static vn_nary_op_t
2230 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2232 void **slot;
2234 if (compute_hash)
2235 vno->hashcode = vn_nary_op_compute_hash (vno);
2237 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2238 gcc_assert (!*slot);
2240 *slot = vno;
2241 return vno;
2244 /* Insert a n-ary operation into the current hash table using it's
2245 pieces. Return the vn_nary_op_t structure we created and put in
2246 the hashtable. */
2248 vn_nary_op_t
2249 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2250 tree type, tree *ops,
2251 tree result, unsigned int value_id)
2253 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
2254 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
2255 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2258 /* Insert OP into the current hash table with a value number of
2259 RESULT. Return the vn_nary_op_t structure we created and put in
2260 the hashtable. */
2262 vn_nary_op_t
2263 vn_nary_op_insert (tree op, tree result)
2265 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2266 vn_nary_op_t vno1;
2268 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2269 init_vn_nary_op_from_op (vno1, op);
2270 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2273 /* Insert the rhs of STMT into the current hash table with a value number of
2274 RESULT. */
2276 vn_nary_op_t
2277 vn_nary_op_insert_stmt (gimple stmt, tree result)
2279 vn_nary_op_t vno1
2280 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
2281 result, VN_INFO (result)->value_id);
2282 init_vn_nary_op_from_stmt (vno1, stmt);
2283 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2286 /* Compute a hashcode for PHI operation VP1 and return it. */
2288 static inline hashval_t
2289 vn_phi_compute_hash (vn_phi_t vp1)
2291 hashval_t result;
2292 int i;
2293 tree phi1op;
2294 tree type;
2296 result = vp1->block->index;
2298 /* If all PHI arguments are constants we need to distinguish
2299 the PHI node via its type. */
2300 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2301 result += (INTEGRAL_TYPE_P (type)
2302 + (INTEGRAL_TYPE_P (type)
2303 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2305 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2307 if (phi1op == VN_TOP)
2308 continue;
2309 result = iterative_hash_expr (phi1op, result);
2312 return result;
2315 /* Return the computed hashcode for phi operation P1. */
2317 static hashval_t
2318 vn_phi_hash (const void *p1)
2320 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2321 return vp1->hashcode;
2324 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2326 static int
2327 vn_phi_eq (const void *p1, const void *p2)
2329 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2330 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2332 if (vp1->hashcode != vp2->hashcode)
2333 return false;
2335 if (vp1->block == vp2->block)
2337 int i;
2338 tree phi1op;
2340 /* If the PHI nodes do not have compatible types
2341 they are not the same. */
2342 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2343 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2344 return false;
2346 /* Any phi in the same block will have it's arguments in the
2347 same edge order, because of how we store phi nodes. */
2348 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2350 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2351 if (phi1op == VN_TOP || phi2op == VN_TOP)
2352 continue;
2353 if (!expressions_equal_p (phi1op, phi2op))
2354 return false;
2356 return true;
2358 return false;
2361 static VEC(tree, heap) *shared_lookup_phiargs;
2363 /* Lookup PHI in the current hash table, and return the resulting
2364 value number if it exists in the hash table. Return NULL_TREE if
2365 it does not exist in the hash table. */
2367 static tree
2368 vn_phi_lookup (gimple phi)
2370 void **slot;
2371 struct vn_phi_s vp1;
2372 unsigned i;
2374 VEC_truncate (tree, shared_lookup_phiargs, 0);
2376 /* Canonicalize the SSA_NAME's to their value number. */
2377 for (i = 0; i < gimple_phi_num_args (phi); i++)
2379 tree def = PHI_ARG_DEF (phi, i);
2380 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2381 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2383 vp1.phiargs = shared_lookup_phiargs;
2384 vp1.block = gimple_bb (phi);
2385 vp1.hashcode = vn_phi_compute_hash (&vp1);
2386 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2387 NO_INSERT);
2388 if (!slot && current_info == optimistic_info)
2389 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2390 NO_INSERT);
2391 if (!slot)
2392 return NULL_TREE;
2393 return ((vn_phi_t)*slot)->result;
2396 /* Insert PHI into the current hash table with a value number of
2397 RESULT. */
2399 static vn_phi_t
2400 vn_phi_insert (gimple phi, tree result)
2402 void **slot;
2403 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2404 unsigned i;
2405 VEC (tree, heap) *args = NULL;
2407 /* Canonicalize the SSA_NAME's to their value number. */
2408 for (i = 0; i < gimple_phi_num_args (phi); i++)
2410 tree def = PHI_ARG_DEF (phi, i);
2411 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2412 VEC_safe_push (tree, heap, args, def);
2414 vp1->value_id = VN_INFO (result)->value_id;
2415 vp1->phiargs = args;
2416 vp1->block = gimple_bb (phi);
2417 vp1->result = result;
2418 vp1->hashcode = vn_phi_compute_hash (vp1);
2420 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2421 INSERT);
2423 /* Because we iterate over phi operations more than once, it's
2424 possible the slot might already exist here, hence no assert.*/
2425 *slot = vp1;
2426 return vp1;
2430 /* Print set of components in strongly connected component SCC to OUT. */
2432 static void
2433 print_scc (FILE *out, VEC (tree, heap) *scc)
2435 tree var;
2436 unsigned int i;
2438 fprintf (out, "SCC consists of: ");
2439 FOR_EACH_VEC_ELT (tree, scc, i, var)
2441 print_generic_expr (out, var, 0);
2442 fprintf (out, " ");
2444 fprintf (out, "\n");
2447 /* Set the value number of FROM to TO, return true if it has changed
2448 as a result. */
2450 static inline bool
2451 set_ssa_val_to (tree from, tree to)
2453 tree currval = SSA_VAL (from);
2455 if (from != to)
2457 if (currval == from)
2459 if (dump_file && (dump_flags & TDF_DETAILS))
2461 fprintf (dump_file, "Not changing value number of ");
2462 print_generic_expr (dump_file, from, 0);
2463 fprintf (dump_file, " from VARYING to ");
2464 print_generic_expr (dump_file, to, 0);
2465 fprintf (dump_file, "\n");
2467 return false;
2469 else if (TREE_CODE (to) == SSA_NAME
2470 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2471 to = from;
2474 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2475 and invariants. So assert that here. */
2476 gcc_assert (to != NULL_TREE
2477 && (to == VN_TOP
2478 || TREE_CODE (to) == SSA_NAME
2479 || is_gimple_min_invariant (to)));
2481 if (dump_file && (dump_flags & TDF_DETAILS))
2483 fprintf (dump_file, "Setting value number of ");
2484 print_generic_expr (dump_file, from, 0);
2485 fprintf (dump_file, " to ");
2486 print_generic_expr (dump_file, to, 0);
2489 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2491 VN_INFO (from)->valnum = to;
2492 if (dump_file && (dump_flags & TDF_DETAILS))
2493 fprintf (dump_file, " (changed)\n");
2494 return true;
2496 if (dump_file && (dump_flags & TDF_DETAILS))
2497 fprintf (dump_file, "\n");
2498 return false;
2501 /* Set all definitions in STMT to value number to themselves.
2502 Return true if a value number changed. */
2504 static bool
2505 defs_to_varying (gimple stmt)
2507 bool changed = false;
2508 ssa_op_iter iter;
2509 def_operand_p defp;
2511 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2513 tree def = DEF_FROM_PTR (defp);
2515 VN_INFO (def)->use_processed = true;
2516 changed |= set_ssa_val_to (def, def);
2518 return changed;
2521 static bool expr_has_constants (tree expr);
2522 static tree valueize_expr (tree expr);
2524 /* Visit a copy between LHS and RHS, return true if the value number
2525 changed. */
2527 static bool
2528 visit_copy (tree lhs, tree rhs)
2530 /* Follow chains of copies to their destination. */
2531 while (TREE_CODE (rhs) == SSA_NAME
2532 && SSA_VAL (rhs) != rhs)
2533 rhs = SSA_VAL (rhs);
2535 /* The copy may have a more interesting constant filled expression
2536 (we don't, since we know our RHS is just an SSA name). */
2537 if (TREE_CODE (rhs) == SSA_NAME)
2539 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2540 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2543 return set_ssa_val_to (lhs, rhs);
2546 /* Visit a nary operator RHS, value number it, and return true if the
2547 value number of LHS has changed as a result. */
2549 static bool
2550 visit_nary_op (tree lhs, gimple stmt)
2552 bool changed = false;
2553 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2555 if (result)
2556 changed = set_ssa_val_to (lhs, result);
2557 else
2559 changed = set_ssa_val_to (lhs, lhs);
2560 vn_nary_op_insert_stmt (stmt, lhs);
2563 return changed;
2566 /* Visit a call STMT storing into LHS. Return true if the value number
2567 of the LHS has changed as a result. */
2569 static bool
2570 visit_reference_op_call (tree lhs, gimple stmt)
2572 bool changed = false;
2573 struct vn_reference_s vr1;
2574 tree result;
2575 tree vuse = gimple_vuse (stmt);
2577 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2578 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2579 vr1.type = gimple_expr_type (stmt);
2580 vr1.set = 0;
2581 vr1.hashcode = vn_reference_compute_hash (&vr1);
2582 result = vn_reference_lookup_1 (&vr1, NULL);
2583 if (result)
2585 changed = set_ssa_val_to (lhs, result);
2586 if (TREE_CODE (result) == SSA_NAME
2587 && VN_INFO (result)->has_constants)
2588 VN_INFO (lhs)->has_constants = true;
2590 else
2592 void **slot;
2593 vn_reference_t vr2;
2594 changed = set_ssa_val_to (lhs, lhs);
2595 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2596 vr2->vuse = vr1.vuse;
2597 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2598 vr2->type = vr1.type;
2599 vr2->set = vr1.set;
2600 vr2->hashcode = vr1.hashcode;
2601 vr2->result = lhs;
2602 slot = htab_find_slot_with_hash (current_info->references,
2603 vr2, vr2->hashcode, INSERT);
2604 if (*slot)
2605 free_reference (*slot);
2606 *slot = vr2;
2609 return changed;
2612 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2613 and return true if the value number of the LHS has changed as a result. */
2615 static bool
2616 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2618 bool changed = false;
2619 tree last_vuse;
2620 tree result;
2622 last_vuse = gimple_vuse (stmt);
2623 last_vuse_ptr = &last_vuse;
2624 result = vn_reference_lookup (op, gimple_vuse (stmt),
2625 default_vn_walk_kind, NULL);
2626 last_vuse_ptr = NULL;
2628 /* If we have a VCE, try looking up its operand as it might be stored in
2629 a different type. */
2630 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2631 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2632 default_vn_walk_kind, NULL);
2634 /* We handle type-punning through unions by value-numbering based
2635 on offset and size of the access. Be prepared to handle a
2636 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2637 if (result
2638 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2640 /* We will be setting the value number of lhs to the value number
2641 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2642 So first simplify and lookup this expression to see if it
2643 is already available. */
2644 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2645 if ((CONVERT_EXPR_P (val)
2646 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2647 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2649 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2650 if ((CONVERT_EXPR_P (tem)
2651 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2652 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2653 TREE_TYPE (val), tem)))
2654 val = tem;
2656 result = val;
2657 if (!is_gimple_min_invariant (val)
2658 && TREE_CODE (val) != SSA_NAME)
2659 result = vn_nary_op_lookup (val, NULL);
2660 /* If the expression is not yet available, value-number lhs to
2661 a new SSA_NAME we create. */
2662 if (!result)
2664 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2665 /* Initialize value-number information properly. */
2666 VN_INFO_GET (result)->valnum = result;
2667 VN_INFO (result)->value_id = get_next_value_id ();
2668 VN_INFO (result)->expr = val;
2669 VN_INFO (result)->has_constants = expr_has_constants (val);
2670 VN_INFO (result)->needs_insertion = true;
2671 /* As all "inserted" statements are singleton SCCs, insert
2672 to the valid table. This is strictly needed to
2673 avoid re-generating new value SSA_NAMEs for the same
2674 expression during SCC iteration over and over (the
2675 optimistic table gets cleared after each iteration).
2676 We do not need to insert into the optimistic table, as
2677 lookups there will fall back to the valid table. */
2678 if (current_info == optimistic_info)
2680 current_info = valid_info;
2681 vn_nary_op_insert (val, result);
2682 current_info = optimistic_info;
2684 else
2685 vn_nary_op_insert (val, result);
2686 if (dump_file && (dump_flags & TDF_DETAILS))
2688 fprintf (dump_file, "Inserting name ");
2689 print_generic_expr (dump_file, result, 0);
2690 fprintf (dump_file, " for expression ");
2691 print_generic_expr (dump_file, val, 0);
2692 fprintf (dump_file, "\n");
2697 if (result)
2699 changed = set_ssa_val_to (lhs, result);
2700 if (TREE_CODE (result) == SSA_NAME
2701 && VN_INFO (result)->has_constants)
2703 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2704 VN_INFO (lhs)->has_constants = true;
2707 else
2709 changed = set_ssa_val_to (lhs, lhs);
2710 vn_reference_insert (op, lhs, last_vuse);
2713 return changed;
2717 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2718 and return true if the value number of the LHS has changed as a result. */
2720 static bool
2721 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2723 bool changed = false;
2724 tree result;
2725 bool resultsame = false;
2727 /* First we want to lookup using the *vuses* from the store and see
2728 if there the last store to this location with the same address
2729 had the same value.
2731 The vuses represent the memory state before the store. If the
2732 memory state, address, and value of the store is the same as the
2733 last store to this location, then this store will produce the
2734 same memory state as that store.
2736 In this case the vdef versions for this store are value numbered to those
2737 vuse versions, since they represent the same memory state after
2738 this store.
2740 Otherwise, the vdefs for the store are used when inserting into
2741 the table, since the store generates a new memory state. */
2743 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2745 if (result)
2747 if (TREE_CODE (result) == SSA_NAME)
2748 result = SSA_VAL (result);
2749 if (TREE_CODE (op) == SSA_NAME)
2750 op = SSA_VAL (op);
2751 resultsame = expressions_equal_p (result, op);
2754 if (!result || !resultsame)
2756 tree vdef;
2758 if (dump_file && (dump_flags & TDF_DETAILS))
2760 fprintf (dump_file, "No store match\n");
2761 fprintf (dump_file, "Value numbering store ");
2762 print_generic_expr (dump_file, lhs, 0);
2763 fprintf (dump_file, " to ");
2764 print_generic_expr (dump_file, op, 0);
2765 fprintf (dump_file, "\n");
2767 /* Have to set value numbers before insert, since insert is
2768 going to valueize the references in-place. */
2769 if ((vdef = gimple_vdef (stmt)))
2771 VN_INFO (vdef)->use_processed = true;
2772 changed |= set_ssa_val_to (vdef, vdef);
2775 /* Do not insert structure copies into the tables. */
2776 if (is_gimple_min_invariant (op)
2777 || is_gimple_reg (op))
2778 vn_reference_insert (lhs, op, vdef);
2780 else
2782 /* We had a match, so value number the vdef to have the value
2783 number of the vuse it came from. */
2784 tree def, use;
2786 if (dump_file && (dump_flags & TDF_DETAILS))
2787 fprintf (dump_file, "Store matched earlier value,"
2788 "value numbering store vdefs to matching vuses.\n");
2790 def = gimple_vdef (stmt);
2791 use = gimple_vuse (stmt);
2793 VN_INFO (def)->use_processed = true;
2794 changed |= set_ssa_val_to (def, SSA_VAL (use));
2797 return changed;
2800 /* Visit and value number PHI, return true if the value number
2801 changed. */
2803 static bool
2804 visit_phi (gimple phi)
2806 bool changed = false;
2807 tree result;
2808 tree sameval = VN_TOP;
2809 bool allsame = true;
2810 unsigned i;
2812 /* TODO: We could check for this in init_sccvn, and replace this
2813 with a gcc_assert. */
2814 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2815 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2817 /* See if all non-TOP arguments have the same value. TOP is
2818 equivalent to everything, so we can ignore it. */
2819 for (i = 0; i < gimple_phi_num_args (phi); i++)
2821 tree def = PHI_ARG_DEF (phi, i);
2823 if (TREE_CODE (def) == SSA_NAME)
2824 def = SSA_VAL (def);
2825 if (def == VN_TOP)
2826 continue;
2827 if (sameval == VN_TOP)
2829 sameval = def;
2831 else
2833 if (!expressions_equal_p (def, sameval))
2835 allsame = false;
2836 break;
2841 /* If all value numbered to the same value, the phi node has that
2842 value. */
2843 if (allsame)
2845 if (is_gimple_min_invariant (sameval))
2847 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2848 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2850 else
2852 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2853 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2856 if (TREE_CODE (sameval) == SSA_NAME)
2857 return visit_copy (PHI_RESULT (phi), sameval);
2859 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2862 /* Otherwise, see if it is equivalent to a phi node in this block. */
2863 result = vn_phi_lookup (phi);
2864 if (result)
2866 if (TREE_CODE (result) == SSA_NAME)
2867 changed = visit_copy (PHI_RESULT (phi), result);
2868 else
2869 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2871 else
2873 vn_phi_insert (phi, PHI_RESULT (phi));
2874 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2875 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2876 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2879 return changed;
2882 /* Return true if EXPR contains constants. */
2884 static bool
2885 expr_has_constants (tree expr)
2887 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2889 case tcc_unary:
2890 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2892 case tcc_binary:
2893 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2894 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2895 /* Constants inside reference ops are rarely interesting, but
2896 it can take a lot of looking to find them. */
2897 case tcc_reference:
2898 case tcc_declaration:
2899 return false;
2900 default:
2901 return is_gimple_min_invariant (expr);
2903 return false;
2906 /* Return true if STMT contains constants. */
2908 static bool
2909 stmt_has_constants (gimple stmt)
2911 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2912 return false;
2914 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2916 case GIMPLE_UNARY_RHS:
2917 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2919 case GIMPLE_BINARY_RHS:
2920 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2921 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2922 case GIMPLE_TERNARY_RHS:
2923 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2924 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2925 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2926 case GIMPLE_SINGLE_RHS:
2927 /* Constants inside reference ops are rarely interesting, but
2928 it can take a lot of looking to find them. */
2929 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2930 default:
2931 gcc_unreachable ();
2933 return false;
2936 /* Replace SSA_NAMES in expr with their value numbers, and return the
2937 result.
2938 This is performed in place. */
2940 static tree
2941 valueize_expr (tree expr)
2943 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2945 case tcc_binary:
2946 TREE_OPERAND (expr, 1) = vn_valueize (TREE_OPERAND (expr, 1));
2947 /* Fallthru. */
2948 case tcc_unary:
2949 TREE_OPERAND (expr, 0) = vn_valueize (TREE_OPERAND (expr, 0));
2950 break;
2951 default:;
2953 return expr;
2956 /* Simplify the binary expression RHS, and return the result if
2957 simplified. */
2959 static tree
2960 simplify_binary_expression (gimple stmt)
2962 tree result = NULL_TREE;
2963 tree op0 = gimple_assign_rhs1 (stmt);
2964 tree op1 = gimple_assign_rhs2 (stmt);
2965 enum tree_code code = gimple_assign_rhs_code (stmt);
2967 /* This will not catch every single case we could combine, but will
2968 catch those with constants. The goal here is to simultaneously
2969 combine constants between expressions, but avoid infinite
2970 expansion of expressions during simplification. */
2971 if (TREE_CODE (op0) == SSA_NAME)
2973 if (VN_INFO (op0)->has_constants
2974 || TREE_CODE_CLASS (code) == tcc_comparison
2975 || code == COMPLEX_EXPR)
2976 op0 = valueize_expr (vn_get_expr_for (op0));
2977 else
2978 op0 = vn_valueize (op0);
2981 if (TREE_CODE (op1) == SSA_NAME)
2983 if (VN_INFO (op1)->has_constants
2984 || code == COMPLEX_EXPR)
2985 op1 = valueize_expr (vn_get_expr_for (op1));
2986 else
2987 op1 = vn_valueize (op1);
2990 /* Pointer plus constant can be represented as invariant address.
2991 Do so to allow further propatation, see also tree forwprop. */
2992 if (code == POINTER_PLUS_EXPR
2993 && host_integerp (op1, 1)
2994 && TREE_CODE (op0) == ADDR_EXPR
2995 && is_gimple_min_invariant (op0))
2996 return build_invariant_address (TREE_TYPE (op0),
2997 TREE_OPERAND (op0, 0),
2998 TREE_INT_CST_LOW (op1));
3000 /* Avoid folding if nothing changed. */
3001 if (op0 == gimple_assign_rhs1 (stmt)
3002 && op1 == gimple_assign_rhs2 (stmt))
3003 return NULL_TREE;
3005 fold_defer_overflow_warnings ();
3007 result = fold_binary (code, gimple_expr_type (stmt), op0, op1);
3008 if (result)
3009 STRIP_USELESS_TYPE_CONVERSION (result);
3011 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
3012 stmt, 0);
3014 /* Make sure result is not a complex expression consisting
3015 of operators of operators (IE (a + b) + (a + c))
3016 Otherwise, we will end up with unbounded expressions if
3017 fold does anything at all. */
3018 if (result && valid_gimple_rhs_p (result))
3019 return result;
3021 return NULL_TREE;
3024 /* Simplify the unary expression RHS, and return the result if
3025 simplified. */
3027 static tree
3028 simplify_unary_expression (gimple stmt)
3030 tree result = NULL_TREE;
3031 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
3032 enum tree_code code = gimple_assign_rhs_code (stmt);
3034 /* We handle some tcc_reference codes here that are all
3035 GIMPLE_ASSIGN_SINGLE codes. */
3036 if (code == REALPART_EXPR
3037 || code == IMAGPART_EXPR
3038 || code == VIEW_CONVERT_EXPR
3039 || code == BIT_FIELD_REF)
3040 op0 = TREE_OPERAND (op0, 0);
3042 if (TREE_CODE (op0) != SSA_NAME)
3043 return NULL_TREE;
3045 orig_op0 = op0;
3046 if (VN_INFO (op0)->has_constants)
3047 op0 = valueize_expr (vn_get_expr_for (op0));
3048 else if (CONVERT_EXPR_CODE_P (code)
3049 || code == REALPART_EXPR
3050 || code == IMAGPART_EXPR
3051 || code == VIEW_CONVERT_EXPR
3052 || code == BIT_FIELD_REF)
3054 /* We want to do tree-combining on conversion-like expressions.
3055 Make sure we feed only SSA_NAMEs or constants to fold though. */
3056 tree tem = valueize_expr (vn_get_expr_for (op0));
3057 if (UNARY_CLASS_P (tem)
3058 || BINARY_CLASS_P (tem)
3059 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
3060 || TREE_CODE (tem) == SSA_NAME
3061 || TREE_CODE (tem) == CONSTRUCTOR
3062 || is_gimple_min_invariant (tem))
3063 op0 = tem;
3066 /* Avoid folding if nothing changed, but remember the expression. */
3067 if (op0 == orig_op0)
3068 return NULL_TREE;
3070 if (code == BIT_FIELD_REF)
3072 tree rhs = gimple_assign_rhs1 (stmt);
3073 result = fold_ternary (BIT_FIELD_REF, TREE_TYPE (rhs),
3074 op0, TREE_OPERAND (rhs, 1), TREE_OPERAND (rhs, 2));
3076 else
3077 result = fold_unary_ignore_overflow (code, gimple_expr_type (stmt), op0);
3078 if (result)
3080 STRIP_USELESS_TYPE_CONVERSION (result);
3081 if (valid_gimple_rhs_p (result))
3082 return result;
3085 return NULL_TREE;
3088 /* Try to simplify RHS using equivalences and constant folding. */
3090 static tree
3091 try_to_simplify (gimple stmt)
3093 enum tree_code code = gimple_assign_rhs_code (stmt);
3094 tree tem;
3096 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
3097 in this case, there is no point in doing extra work. */
3098 if (code == SSA_NAME)
3099 return NULL_TREE;
3101 /* First try constant folding based on our current lattice. */
3102 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize);
3103 if (tem
3104 && (TREE_CODE (tem) == SSA_NAME
3105 || is_gimple_min_invariant (tem)))
3106 return tem;
3108 /* If that didn't work try combining multiple statements. */
3109 switch (TREE_CODE_CLASS (code))
3111 case tcc_reference:
3112 /* Fallthrough for some unary codes that can operate on registers. */
3113 if (!(code == REALPART_EXPR
3114 || code == IMAGPART_EXPR
3115 || code == VIEW_CONVERT_EXPR
3116 || code == BIT_FIELD_REF))
3117 break;
3118 /* We could do a little more with unary ops, if they expand
3119 into binary ops, but it's debatable whether it is worth it. */
3120 case tcc_unary:
3121 return simplify_unary_expression (stmt);
3123 case tcc_comparison:
3124 case tcc_binary:
3125 return simplify_binary_expression (stmt);
3127 default:
3128 break;
3131 return NULL_TREE;
3134 /* Visit and value number USE, return true if the value number
3135 changed. */
3137 static bool
3138 visit_use (tree use)
3140 bool changed = false;
3141 gimple stmt = SSA_NAME_DEF_STMT (use);
3143 VN_INFO (use)->use_processed = true;
3145 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
3146 if (dump_file && (dump_flags & TDF_DETAILS)
3147 && !SSA_NAME_IS_DEFAULT_DEF (use))
3149 fprintf (dump_file, "Value numbering ");
3150 print_generic_expr (dump_file, use, 0);
3151 fprintf (dump_file, " stmt = ");
3152 print_gimple_stmt (dump_file, stmt, 0, 0);
3155 /* Handle uninitialized uses. */
3156 if (SSA_NAME_IS_DEFAULT_DEF (use))
3157 changed = set_ssa_val_to (use, use);
3158 else
3160 if (gimple_code (stmt) == GIMPLE_PHI)
3161 changed = visit_phi (stmt);
3162 else if (!gimple_has_lhs (stmt)
3163 || gimple_has_volatile_ops (stmt)
3164 || stmt_could_throw_p (stmt))
3165 changed = defs_to_varying (stmt);
3166 else if (is_gimple_assign (stmt))
3168 enum tree_code code = gimple_assign_rhs_code (stmt);
3169 tree lhs = gimple_assign_lhs (stmt);
3170 tree rhs1 = gimple_assign_rhs1 (stmt);
3171 tree simplified;
3173 /* Shortcut for copies. Simplifying copies is pointless,
3174 since we copy the expression and value they represent. */
3175 if (code == SSA_NAME
3176 && TREE_CODE (lhs) == SSA_NAME)
3178 changed = visit_copy (lhs, rhs1);
3179 goto done;
3181 simplified = try_to_simplify (stmt);
3182 if (simplified)
3184 if (dump_file && (dump_flags & TDF_DETAILS))
3186 fprintf (dump_file, "RHS ");
3187 print_gimple_expr (dump_file, stmt, 0, 0);
3188 fprintf (dump_file, " simplified to ");
3189 print_generic_expr (dump_file, simplified, 0);
3190 if (TREE_CODE (lhs) == SSA_NAME)
3191 fprintf (dump_file, " has constants %d\n",
3192 expr_has_constants (simplified));
3193 else
3194 fprintf (dump_file, "\n");
3197 /* Setting value numbers to constants will occasionally
3198 screw up phi congruence because constants are not
3199 uniquely associated with a single ssa name that can be
3200 looked up. */
3201 if (simplified
3202 && is_gimple_min_invariant (simplified)
3203 && TREE_CODE (lhs) == SSA_NAME)
3205 VN_INFO (lhs)->expr = simplified;
3206 VN_INFO (lhs)->has_constants = true;
3207 changed = set_ssa_val_to (lhs, simplified);
3208 goto done;
3210 else if (simplified
3211 && TREE_CODE (simplified) == SSA_NAME
3212 && TREE_CODE (lhs) == SSA_NAME)
3214 changed = visit_copy (lhs, simplified);
3215 goto done;
3217 else if (simplified)
3219 if (TREE_CODE (lhs) == SSA_NAME)
3221 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3222 /* We have to unshare the expression or else
3223 valuizing may change the IL stream. */
3224 VN_INFO (lhs)->expr = unshare_expr (simplified);
3227 else if (stmt_has_constants (stmt)
3228 && TREE_CODE (lhs) == SSA_NAME)
3229 VN_INFO (lhs)->has_constants = true;
3230 else if (TREE_CODE (lhs) == SSA_NAME)
3232 /* We reset expr and constantness here because we may
3233 have been value numbering optimistically, and
3234 iterating. They may become non-constant in this case,
3235 even if they were optimistically constant. */
3237 VN_INFO (lhs)->has_constants = false;
3238 VN_INFO (lhs)->expr = NULL_TREE;
3241 if ((TREE_CODE (lhs) == SSA_NAME
3242 /* We can substitute SSA_NAMEs that are live over
3243 abnormal edges with their constant value. */
3244 && !(gimple_assign_copy_p (stmt)
3245 && is_gimple_min_invariant (rhs1))
3246 && !(simplified
3247 && is_gimple_min_invariant (simplified))
3248 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3249 /* Stores or copies from SSA_NAMEs that are live over
3250 abnormal edges are a problem. */
3251 || (code == SSA_NAME
3252 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
3253 changed = defs_to_varying (stmt);
3254 else if (REFERENCE_CLASS_P (lhs)
3255 || DECL_P (lhs))
3256 changed = visit_reference_op_store (lhs, rhs1, stmt);
3257 else if (TREE_CODE (lhs) == SSA_NAME)
3259 if ((gimple_assign_copy_p (stmt)
3260 && is_gimple_min_invariant (rhs1))
3261 || (simplified
3262 && is_gimple_min_invariant (simplified)))
3264 VN_INFO (lhs)->has_constants = true;
3265 if (simplified)
3266 changed = set_ssa_val_to (lhs, simplified);
3267 else
3268 changed = set_ssa_val_to (lhs, rhs1);
3270 else
3272 switch (get_gimple_rhs_class (code))
3274 case GIMPLE_UNARY_RHS:
3275 case GIMPLE_BINARY_RHS:
3276 case GIMPLE_TERNARY_RHS:
3277 changed = visit_nary_op (lhs, stmt);
3278 break;
3279 case GIMPLE_SINGLE_RHS:
3280 switch (TREE_CODE_CLASS (code))
3282 case tcc_reference:
3283 /* VOP-less references can go through unary case. */
3284 if ((code == REALPART_EXPR
3285 || code == IMAGPART_EXPR
3286 || code == VIEW_CONVERT_EXPR
3287 || code == BIT_FIELD_REF)
3288 && TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME)
3290 changed = visit_nary_op (lhs, stmt);
3291 break;
3293 /* Fallthrough. */
3294 case tcc_declaration:
3295 changed = visit_reference_op_load (lhs, rhs1, stmt);
3296 break;
3297 default:
3298 if (code == ADDR_EXPR)
3300 changed = visit_nary_op (lhs, stmt);
3301 break;
3303 else if (code == CONSTRUCTOR)
3305 changed = visit_nary_op (lhs, stmt);
3306 break;
3308 changed = defs_to_varying (stmt);
3310 break;
3311 default:
3312 changed = defs_to_varying (stmt);
3313 break;
3317 else
3318 changed = defs_to_varying (stmt);
3320 else if (is_gimple_call (stmt))
3322 tree lhs = gimple_call_lhs (stmt);
3324 /* ??? We could try to simplify calls. */
3326 if (stmt_has_constants (stmt)
3327 && TREE_CODE (lhs) == SSA_NAME)
3328 VN_INFO (lhs)->has_constants = true;
3329 else if (TREE_CODE (lhs) == SSA_NAME)
3331 /* We reset expr and constantness here because we may
3332 have been value numbering optimistically, and
3333 iterating. They may become non-constant in this case,
3334 even if they were optimistically constant. */
3335 VN_INFO (lhs)->has_constants = false;
3336 VN_INFO (lhs)->expr = NULL_TREE;
3339 if (TREE_CODE (lhs) == SSA_NAME
3340 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3341 changed = defs_to_varying (stmt);
3342 /* ??? We should handle stores from calls. */
3343 else if (TREE_CODE (lhs) == SSA_NAME)
3345 if (!gimple_call_internal_p (stmt)
3346 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3347 changed = visit_reference_op_call (lhs, stmt);
3348 else
3349 changed = defs_to_varying (stmt);
3351 else
3352 changed = defs_to_varying (stmt);
3355 done:
3356 return changed;
3359 /* Compare two operands by reverse postorder index */
3361 static int
3362 compare_ops (const void *pa, const void *pb)
3364 const tree opa = *((const tree *)pa);
3365 const tree opb = *((const tree *)pb);
3366 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3367 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3368 basic_block bba;
3369 basic_block bbb;
3371 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3372 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3373 else if (gimple_nop_p (opstmta))
3374 return -1;
3375 else if (gimple_nop_p (opstmtb))
3376 return 1;
3378 bba = gimple_bb (opstmta);
3379 bbb = gimple_bb (opstmtb);
3381 if (!bba && !bbb)
3382 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3383 else if (!bba)
3384 return -1;
3385 else if (!bbb)
3386 return 1;
3388 if (bba == bbb)
3390 if (gimple_code (opstmta) == GIMPLE_PHI
3391 && gimple_code (opstmtb) == GIMPLE_PHI)
3392 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3393 else if (gimple_code (opstmta) == GIMPLE_PHI)
3394 return -1;
3395 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3396 return 1;
3397 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3398 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3399 else
3400 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3402 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3405 /* Sort an array containing members of a strongly connected component
3406 SCC so that the members are ordered by RPO number.
3407 This means that when the sort is complete, iterating through the
3408 array will give you the members in RPO order. */
3410 static void
3411 sort_scc (VEC (tree, heap) *scc)
3413 VEC_qsort (tree, scc, compare_ops);
3416 /* Insert the no longer used nary ONARY to the hash INFO. */
3418 static void
3419 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3421 size_t size = sizeof_vn_nary_op (onary->length);
3422 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3423 &info->nary_obstack);
3424 memcpy (nary, onary, size);
3425 vn_nary_op_insert_into (nary, info->nary, false);
3428 /* Insert the no longer used phi OPHI to the hash INFO. */
3430 static void
3431 copy_phi (vn_phi_t ophi, vn_tables_t info)
3433 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3434 void **slot;
3435 memcpy (phi, ophi, sizeof (*phi));
3436 ophi->phiargs = NULL;
3437 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3438 gcc_assert (!*slot);
3439 *slot = phi;
3442 /* Insert the no longer used reference OREF to the hash INFO. */
3444 static void
3445 copy_reference (vn_reference_t oref, vn_tables_t info)
3447 vn_reference_t ref;
3448 void **slot;
3449 ref = (vn_reference_t) pool_alloc (info->references_pool);
3450 memcpy (ref, oref, sizeof (*ref));
3451 oref->operands = NULL;
3452 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3453 INSERT);
3454 if (*slot)
3455 free_reference (*slot);
3456 *slot = ref;
3459 /* Process a strongly connected component in the SSA graph. */
3461 static void
3462 process_scc (VEC (tree, heap) *scc)
3464 tree var;
3465 unsigned int i;
3466 unsigned int iterations = 0;
3467 bool changed = true;
3468 htab_iterator hi;
3469 vn_nary_op_t nary;
3470 vn_phi_t phi;
3471 vn_reference_t ref;
3473 /* If the SCC has a single member, just visit it. */
3474 if (VEC_length (tree, scc) == 1)
3476 tree use = VEC_index (tree, scc, 0);
3477 if (VN_INFO (use)->use_processed)
3478 return;
3479 /* We need to make sure it doesn't form a cycle itself, which can
3480 happen for self-referential PHI nodes. In that case we would
3481 end up inserting an expression with VN_TOP operands into the
3482 valid table which makes us derive bogus equivalences later.
3483 The cheapest way to check this is to assume it for all PHI nodes. */
3484 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3485 /* Fallthru to iteration. */ ;
3486 else
3488 visit_use (use);
3489 return;
3493 /* Iterate over the SCC with the optimistic table until it stops
3494 changing. */
3495 current_info = optimistic_info;
3496 while (changed)
3498 changed = false;
3499 iterations++;
3500 if (dump_file && (dump_flags & TDF_DETAILS))
3501 fprintf (dump_file, "Starting iteration %d\n", iterations);
3502 /* As we are value-numbering optimistically we have to
3503 clear the expression tables and the simplified expressions
3504 in each iteration until we converge. */
3505 htab_empty (optimistic_info->nary);
3506 htab_empty (optimistic_info->phis);
3507 htab_empty (optimistic_info->references);
3508 obstack_free (&optimistic_info->nary_obstack, NULL);
3509 gcc_obstack_init (&optimistic_info->nary_obstack);
3510 empty_alloc_pool (optimistic_info->phis_pool);
3511 empty_alloc_pool (optimistic_info->references_pool);
3512 FOR_EACH_VEC_ELT (tree, scc, i, var)
3513 VN_INFO (var)->expr = NULL_TREE;
3514 FOR_EACH_VEC_ELT (tree, scc, i, var)
3515 changed |= visit_use (var);
3518 statistics_histogram_event (cfun, "SCC iterations", iterations);
3520 /* Finally, copy the contents of the no longer used optimistic
3521 table to the valid table. */
3522 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3523 copy_nary (nary, valid_info);
3524 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3525 copy_phi (phi, valid_info);
3526 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3527 copy_reference (ref, valid_info);
3529 current_info = valid_info;
3532 DEF_VEC_O(ssa_op_iter);
3533 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3535 /* Pop the components of the found SCC for NAME off the SCC stack
3536 and process them. Returns true if all went well, false if
3537 we run into resource limits. */
3539 static bool
3540 extract_and_process_scc_for_name (tree name)
3542 VEC (tree, heap) *scc = NULL;
3543 tree x;
3545 /* Found an SCC, pop the components off the SCC stack and
3546 process them. */
3549 x = VEC_pop (tree, sccstack);
3551 VN_INFO (x)->on_sccstack = false;
3552 VEC_safe_push (tree, heap, scc, x);
3553 } while (x != name);
3555 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3556 if (VEC_length (tree, scc)
3557 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3559 if (dump_file)
3560 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3561 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3562 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3563 return false;
3566 if (VEC_length (tree, scc) > 1)
3567 sort_scc (scc);
3569 if (dump_file && (dump_flags & TDF_DETAILS))
3570 print_scc (dump_file, scc);
3572 process_scc (scc);
3574 VEC_free (tree, heap, scc);
3576 return true;
3579 /* Depth first search on NAME to discover and process SCC's in the SSA
3580 graph.
3581 Execution of this algorithm relies on the fact that the SCC's are
3582 popped off the stack in topological order.
3583 Returns true if successful, false if we stopped processing SCC's due
3584 to resource constraints. */
3586 static bool
3587 DFS (tree name)
3589 VEC(ssa_op_iter, heap) *itervec = NULL;
3590 VEC(tree, heap) *namevec = NULL;
3591 use_operand_p usep = NULL;
3592 gimple defstmt;
3593 tree use;
3594 ssa_op_iter iter;
3596 start_over:
3597 /* SCC info */
3598 VN_INFO (name)->dfsnum = next_dfs_num++;
3599 VN_INFO (name)->visited = true;
3600 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3602 VEC_safe_push (tree, heap, sccstack, name);
3603 VN_INFO (name)->on_sccstack = true;
3604 defstmt = SSA_NAME_DEF_STMT (name);
3606 /* Recursively DFS on our operands, looking for SCC's. */
3607 if (!gimple_nop_p (defstmt))
3609 /* Push a new iterator. */
3610 if (gimple_code (defstmt) == GIMPLE_PHI)
3611 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3612 else
3613 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3615 else
3616 clear_and_done_ssa_iter (&iter);
3618 while (1)
3620 /* If we are done processing uses of a name, go up the stack
3621 of iterators and process SCCs as we found them. */
3622 if (op_iter_done (&iter))
3624 /* See if we found an SCC. */
3625 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3626 if (!extract_and_process_scc_for_name (name))
3628 VEC_free (tree, heap, namevec);
3629 VEC_free (ssa_op_iter, heap, itervec);
3630 return false;
3633 /* Check if we are done. */
3634 if (VEC_empty (tree, namevec))
3636 VEC_free (tree, heap, namevec);
3637 VEC_free (ssa_op_iter, heap, itervec);
3638 return true;
3641 /* Restore the last use walker and continue walking there. */
3642 use = name;
3643 name = VEC_pop (tree, namevec);
3644 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3645 sizeof (ssa_op_iter));
3646 VEC_pop (ssa_op_iter, itervec);
3647 goto continue_walking;
3650 use = USE_FROM_PTR (usep);
3652 /* Since we handle phi nodes, we will sometimes get
3653 invariants in the use expression. */
3654 if (TREE_CODE (use) == SSA_NAME)
3656 if (! (VN_INFO (use)->visited))
3658 /* Recurse by pushing the current use walking state on
3659 the stack and starting over. */
3660 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3661 VEC_safe_push(tree, heap, namevec, name);
3662 name = use;
3663 goto start_over;
3665 continue_walking:
3666 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3667 VN_INFO (use)->low);
3669 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3670 && VN_INFO (use)->on_sccstack)
3672 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3673 VN_INFO (name)->low);
3677 usep = op_iter_next_use (&iter);
3681 /* Allocate a value number table. */
3683 static void
3684 allocate_vn_table (vn_tables_t table)
3686 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3687 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3688 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3689 free_reference);
3691 gcc_obstack_init (&table->nary_obstack);
3692 table->phis_pool = create_alloc_pool ("VN phis",
3693 sizeof (struct vn_phi_s),
3694 30);
3695 table->references_pool = create_alloc_pool ("VN references",
3696 sizeof (struct vn_reference_s),
3697 30);
3700 /* Free a value number table. */
3702 static void
3703 free_vn_table (vn_tables_t table)
3705 htab_delete (table->phis);
3706 htab_delete (table->nary);
3707 htab_delete (table->references);
3708 obstack_free (&table->nary_obstack, NULL);
3709 free_alloc_pool (table->phis_pool);
3710 free_alloc_pool (table->references_pool);
3713 static void
3714 init_scc_vn (void)
3716 size_t i;
3717 int j;
3718 int *rpo_numbers_temp;
3720 calculate_dominance_info (CDI_DOMINATORS);
3721 sccstack = NULL;
3722 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3723 free);
3725 constant_value_ids = BITMAP_ALLOC (NULL);
3727 next_dfs_num = 1;
3728 next_value_id = 1;
3730 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3731 /* VEC_alloc doesn't actually grow it to the right size, it just
3732 preallocates the space to do so. */
3733 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3734 gcc_obstack_init (&vn_ssa_aux_obstack);
3736 shared_lookup_phiargs = NULL;
3737 shared_lookup_references = NULL;
3738 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3739 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3740 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3742 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3743 the i'th block in RPO order is bb. We want to map bb's to RPO
3744 numbers, so we need to rearrange this array. */
3745 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3746 rpo_numbers[rpo_numbers_temp[j]] = j;
3748 XDELETE (rpo_numbers_temp);
3750 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3752 /* Create the VN_INFO structures, and initialize value numbers to
3753 TOP. */
3754 for (i = 0; i < num_ssa_names; i++)
3756 tree name = ssa_name (i);
3757 if (name)
3759 VN_INFO_GET (name)->valnum = VN_TOP;
3760 VN_INFO (name)->expr = NULL_TREE;
3761 VN_INFO (name)->value_id = 0;
3765 renumber_gimple_stmt_uids ();
3767 /* Create the valid and optimistic value numbering tables. */
3768 valid_info = XCNEW (struct vn_tables_s);
3769 allocate_vn_table (valid_info);
3770 optimistic_info = XCNEW (struct vn_tables_s);
3771 allocate_vn_table (optimistic_info);
3774 void
3775 free_scc_vn (void)
3777 size_t i;
3779 htab_delete (constant_to_value_id);
3780 BITMAP_FREE (constant_value_ids);
3781 VEC_free (tree, heap, shared_lookup_phiargs);
3782 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3783 XDELETEVEC (rpo_numbers);
3785 for (i = 0; i < num_ssa_names; i++)
3787 tree name = ssa_name (i);
3788 if (name
3789 && VN_INFO (name)->needs_insertion)
3790 release_ssa_name (name);
3792 obstack_free (&vn_ssa_aux_obstack, NULL);
3793 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3795 VEC_free (tree, heap, sccstack);
3796 free_vn_table (valid_info);
3797 XDELETE (valid_info);
3798 free_vn_table (optimistic_info);
3799 XDELETE (optimistic_info);
3802 /* Set *ID if we computed something useful in RESULT. */
3804 static void
3805 set_value_id_for_result (tree result, unsigned int *id)
3807 if (result)
3809 if (TREE_CODE (result) == SSA_NAME)
3810 *id = VN_INFO (result)->value_id;
3811 else if (is_gimple_min_invariant (result))
3812 *id = get_or_alloc_constant_value_id (result);
3816 /* Set the value ids in the valid hash tables. */
3818 static void
3819 set_hashtable_value_ids (void)
3821 htab_iterator hi;
3822 vn_nary_op_t vno;
3823 vn_reference_t vr;
3824 vn_phi_t vp;
3826 /* Now set the value ids of the things we had put in the hash
3827 table. */
3829 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3830 vno, vn_nary_op_t, hi)
3831 set_value_id_for_result (vno->result, &vno->value_id);
3833 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3834 vp, vn_phi_t, hi)
3835 set_value_id_for_result (vp->result, &vp->value_id);
3837 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3838 vr, vn_reference_t, hi)
3839 set_value_id_for_result (vr->result, &vr->value_id);
3842 /* Do SCCVN. Returns true if it finished, false if we bailed out
3843 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3844 how we use the alias oracle walking during the VN process. */
3846 bool
3847 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3849 size_t i;
3850 tree param;
3851 bool changed = true;
3853 default_vn_walk_kind = default_vn_walk_kind_;
3855 init_scc_vn ();
3856 current_info = valid_info;
3858 for (param = DECL_ARGUMENTS (current_function_decl);
3859 param;
3860 param = DECL_CHAIN (param))
3862 if (gimple_default_def (cfun, param) != NULL)
3864 tree def = gimple_default_def (cfun, param);
3865 VN_INFO (def)->valnum = def;
3869 for (i = 1; i < num_ssa_names; ++i)
3871 tree name = ssa_name (i);
3872 if (name
3873 && VN_INFO (name)->visited == false
3874 && !has_zero_uses (name))
3875 if (!DFS (name))
3877 free_scc_vn ();
3878 return false;
3882 /* Initialize the value ids. */
3884 for (i = 1; i < num_ssa_names; ++i)
3886 tree name = ssa_name (i);
3887 vn_ssa_aux_t info;
3888 if (!name)
3889 continue;
3890 info = VN_INFO (name);
3891 if (info->valnum == name
3892 || info->valnum == VN_TOP)
3893 info->value_id = get_next_value_id ();
3894 else if (is_gimple_min_invariant (info->valnum))
3895 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3898 /* Propagate until they stop changing. */
3899 while (changed)
3901 changed = false;
3902 for (i = 1; i < num_ssa_names; ++i)
3904 tree name = ssa_name (i);
3905 vn_ssa_aux_t info;
3906 if (!name)
3907 continue;
3908 info = VN_INFO (name);
3909 if (TREE_CODE (info->valnum) == SSA_NAME
3910 && info->valnum != name
3911 && info->value_id != VN_INFO (info->valnum)->value_id)
3913 changed = true;
3914 info->value_id = VN_INFO (info->valnum)->value_id;
3919 set_hashtable_value_ids ();
3921 if (dump_file && (dump_flags & TDF_DETAILS))
3923 fprintf (dump_file, "Value numbers:\n");
3924 for (i = 0; i < num_ssa_names; i++)
3926 tree name = ssa_name (i);
3927 if (name
3928 && VN_INFO (name)->visited
3929 && SSA_VAL (name) != name)
3931 print_generic_expr (dump_file, name, 0);
3932 fprintf (dump_file, " = ");
3933 print_generic_expr (dump_file, SSA_VAL (name), 0);
3934 fprintf (dump_file, "\n");
3939 return true;
3942 /* Return the maximum value id we have ever seen. */
3944 unsigned int
3945 get_max_value_id (void)
3947 return next_value_id;
3950 /* Return the next unique value id. */
3952 unsigned int
3953 get_next_value_id (void)
3955 return next_value_id++;
3959 /* Compare two expressions E1 and E2 and return true if they are equal. */
3961 bool
3962 expressions_equal_p (tree e1, tree e2)
3964 /* The obvious case. */
3965 if (e1 == e2)
3966 return true;
3968 /* If only one of them is null, they cannot be equal. */
3969 if (!e1 || !e2)
3970 return false;
3972 /* Now perform the actual comparison. */
3973 if (TREE_CODE (e1) == TREE_CODE (e2)
3974 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3975 return true;
3977 return false;
3981 /* Return true if the nary operation NARY may trap. This is a copy
3982 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3984 bool
3985 vn_nary_may_trap (vn_nary_op_t nary)
3987 tree type;
3988 tree rhs2 = NULL_TREE;
3989 bool honor_nans = false;
3990 bool honor_snans = false;
3991 bool fp_operation = false;
3992 bool honor_trapv = false;
3993 bool handled, ret;
3994 unsigned i;
3996 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3997 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3998 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
4000 type = nary->type;
4001 fp_operation = FLOAT_TYPE_P (type);
4002 if (fp_operation)
4004 honor_nans = flag_trapping_math && !flag_finite_math_only;
4005 honor_snans = flag_signaling_nans != 0;
4007 else if (INTEGRAL_TYPE_P (type)
4008 && TYPE_OVERFLOW_TRAPS (type))
4009 honor_trapv = true;
4011 if (nary->length >= 2)
4012 rhs2 = nary->op[1];
4013 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
4014 honor_trapv,
4015 honor_nans, honor_snans, rhs2,
4016 &handled);
4017 if (handled
4018 && ret)
4019 return true;
4021 for (i = 0; i < nary->length; ++i)
4022 if (tree_could_trap_p (nary->op[i]))
4023 return true;
4025 return false;