2011-04-23 Tobias Burnus <burnus@net-b.de>
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobab56e3d534a0c9954b315b0ab527cbd3b1fce4cf
1 /* SCC value numbering for trees
2 Copyright (C) 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
4 Contributed by Daniel Berlin <dan@dberlin.org>
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify
9 it under the terms of the GNU General Public License as published by
10 the Free Software Foundation; either version 3, or (at your option)
11 any later version.
13 GCC is distributed in the hope that it will be useful,
14 but WITHOUT ANY WARRANTY; without even the implied warranty of
15 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 GNU General Public License for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 #include "config.h"
23 #include "system.h"
24 #include "coretypes.h"
25 #include "tm.h"
26 #include "tree.h"
27 #include "basic-block.h"
28 #include "tree-pretty-print.h"
29 #include "gimple-pretty-print.h"
30 #include "tree-inline.h"
31 #include "tree-flow.h"
32 #include "gimple.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "fibheap.h"
36 #include "hashtab.h"
37 #include "tree-iterator.h"
38 #include "alloc-pool.h"
39 #include "tree-pass.h"
40 #include "flags.h"
41 #include "bitmap.h"
42 #include "langhooks.h"
43 #include "cfgloop.h"
44 #include "params.h"
45 #include "tree-ssa-propagate.h"
46 #include "tree-ssa-sccvn.h"
47 #include "gimple-fold.h"
49 /* This algorithm is based on the SCC algorithm presented by Keith
50 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
51 (http://citeseer.ist.psu.edu/41805.html). In
52 straight line code, it is equivalent to a regular hash based value
53 numbering that is performed in reverse postorder.
55 For code with cycles, there are two alternatives, both of which
56 require keeping the hashtables separate from the actual list of
57 value numbers for SSA names.
59 1. Iterate value numbering in an RPO walk of the blocks, removing
60 all the entries from the hashtable after each iteration (but
61 keeping the SSA name->value number mapping between iterations).
62 Iterate until it does not change.
64 2. Perform value numbering as part of an SCC walk on the SSA graph,
65 iterating only the cycles in the SSA graph until they do not change
66 (using a separate, optimistic hashtable for value numbering the SCC
67 operands).
69 The second is not just faster in practice (because most SSA graph
70 cycles do not involve all the variables in the graph), it also has
71 some nice properties.
73 One of these nice properties is that when we pop an SCC off the
74 stack, we are guaranteed to have processed all the operands coming from
75 *outside of that SCC*, so we do not need to do anything special to
76 ensure they have value numbers.
78 Another nice property is that the SCC walk is done as part of a DFS
79 of the SSA graph, which makes it easy to perform combining and
80 simplifying operations at the same time.
82 The code below is deliberately written in a way that makes it easy
83 to separate the SCC walk from the other work it does.
85 In order to propagate constants through the code, we track which
86 expressions contain constants, and use those while folding. In
87 theory, we could also track expressions whose value numbers are
88 replaced, in case we end up folding based on expression
89 identities.
91 In order to value number memory, we assign value numbers to vuses.
92 This enables us to note that, for example, stores to the same
93 address of the same value from the same starting memory states are
94 equivalent.
95 TODO:
97 1. We can iterate only the changing portions of the SCC's, but
98 I have not seen an SCC big enough for this to be a win.
99 2. If you differentiate between phi nodes for loops and phi nodes
100 for if-then-else, you can properly consider phi nodes in different
101 blocks for equivalence.
102 3. We could value number vuses in more cases, particularly, whole
103 structure copies.
106 /* The set of hashtables and alloc_pool's for their items. */
108 typedef struct vn_tables_s
110 htab_t nary;
111 htab_t phis;
112 htab_t references;
113 struct obstack nary_obstack;
114 alloc_pool phis_pool;
115 alloc_pool references_pool;
116 } *vn_tables_t;
118 static htab_t constant_to_value_id;
119 static bitmap constant_value_ids;
122 /* Valid hashtables storing information we have proven to be
123 correct. */
125 static vn_tables_t valid_info;
127 /* Optimistic hashtables storing information we are making assumptions about
128 during iterations. */
130 static vn_tables_t optimistic_info;
132 /* Pointer to the set of hashtables that is currently being used.
133 Should always point to either the optimistic_info, or the
134 valid_info. */
136 static vn_tables_t current_info;
139 /* Reverse post order index for each basic block. */
141 static int *rpo_numbers;
143 #define SSA_VAL(x) (VN_INFO ((x))->valnum)
145 /* This represents the top of the VN lattice, which is the universal
146 value. */
148 tree VN_TOP;
150 /* Unique counter for our value ids. */
152 static unsigned int next_value_id;
154 /* Next DFS number and the stack for strongly connected component
155 detection. */
157 static unsigned int next_dfs_num;
158 static VEC (tree, heap) *sccstack;
161 DEF_VEC_P(vn_ssa_aux_t);
162 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
164 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
165 are allocated on an obstack for locality reasons, and to free them
166 without looping over the VEC. */
168 static VEC (vn_ssa_aux_t, heap) *vn_ssa_aux_table;
169 static struct obstack vn_ssa_aux_obstack;
171 /* Return the value numbering information for a given SSA name. */
173 vn_ssa_aux_t
174 VN_INFO (tree name)
176 vn_ssa_aux_t res = VEC_index (vn_ssa_aux_t, vn_ssa_aux_table,
177 SSA_NAME_VERSION (name));
178 gcc_checking_assert (res);
179 return res;
182 /* Set the value numbering info for a given SSA name to a given
183 value. */
185 static inline void
186 VN_INFO_SET (tree name, vn_ssa_aux_t value)
188 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
189 SSA_NAME_VERSION (name), value);
192 /* Initialize the value numbering info for a given SSA name.
193 This should be called just once for every SSA name. */
195 vn_ssa_aux_t
196 VN_INFO_GET (tree name)
198 vn_ssa_aux_t newinfo;
200 newinfo = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
201 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
202 if (SSA_NAME_VERSION (name) >= VEC_length (vn_ssa_aux_t, vn_ssa_aux_table))
203 VEC_safe_grow (vn_ssa_aux_t, heap, vn_ssa_aux_table,
204 SSA_NAME_VERSION (name) + 1);
205 VEC_replace (vn_ssa_aux_t, vn_ssa_aux_table,
206 SSA_NAME_VERSION (name), newinfo);
207 return newinfo;
211 /* Get the representative expression for the SSA_NAME NAME. Returns
212 the representative SSA_NAME if there is no expression associated with it. */
214 tree
215 vn_get_expr_for (tree name)
217 vn_ssa_aux_t vn = VN_INFO (name);
218 gimple def_stmt;
219 tree expr = NULL_TREE;
221 if (vn->valnum == VN_TOP)
222 return name;
224 /* If the value-number is a constant it is the representative
225 expression. */
226 if (TREE_CODE (vn->valnum) != SSA_NAME)
227 return vn->valnum;
229 /* Get to the information of the value of this SSA_NAME. */
230 vn = VN_INFO (vn->valnum);
232 /* If the value-number is a constant it is the representative
233 expression. */
234 if (TREE_CODE (vn->valnum) != SSA_NAME)
235 return vn->valnum;
237 /* Else if we have an expression, return it. */
238 if (vn->expr != NULL_TREE)
239 return vn->expr;
241 /* Otherwise use the defining statement to build the expression. */
242 def_stmt = SSA_NAME_DEF_STMT (vn->valnum);
244 /* If the value number is a default-definition or a PHI result
245 use it directly. */
246 if (gimple_nop_p (def_stmt)
247 || gimple_code (def_stmt) == GIMPLE_PHI)
248 return vn->valnum;
250 if (!is_gimple_assign (def_stmt))
251 return vn->valnum;
253 /* FIXME tuples. This is incomplete and likely will miss some
254 simplifications. */
255 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (def_stmt)))
257 case tcc_reference:
258 if ((gimple_assign_rhs_code (def_stmt) == VIEW_CONVERT_EXPR
259 || gimple_assign_rhs_code (def_stmt) == REALPART_EXPR
260 || gimple_assign_rhs_code (def_stmt) == IMAGPART_EXPR)
261 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
262 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
263 gimple_expr_type (def_stmt),
264 TREE_OPERAND (gimple_assign_rhs1 (def_stmt), 0));
265 break;
267 case tcc_unary:
268 expr = fold_build1 (gimple_assign_rhs_code (def_stmt),
269 gimple_expr_type (def_stmt),
270 gimple_assign_rhs1 (def_stmt));
271 break;
273 case tcc_binary:
274 expr = fold_build2 (gimple_assign_rhs_code (def_stmt),
275 gimple_expr_type (def_stmt),
276 gimple_assign_rhs1 (def_stmt),
277 gimple_assign_rhs2 (def_stmt));
278 break;
280 default:;
282 if (expr == NULL_TREE)
283 return vn->valnum;
285 /* Cache the expression. */
286 vn->expr = expr;
288 return expr;
292 /* Free a phi operation structure VP. */
294 static void
295 free_phi (void *vp)
297 vn_phi_t phi = (vn_phi_t) vp;
298 VEC_free (tree, heap, phi->phiargs);
301 /* Free a reference operation structure VP. */
303 static void
304 free_reference (void *vp)
306 vn_reference_t vr = (vn_reference_t) vp;
307 VEC_free (vn_reference_op_s, heap, vr->operands);
310 /* Hash table equality function for vn_constant_t. */
312 static int
313 vn_constant_eq (const void *p1, const void *p2)
315 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
316 const struct vn_constant_s *vc2 = (const struct vn_constant_s *) p2;
318 if (vc1->hashcode != vc2->hashcode)
319 return false;
321 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
324 /* Hash table hash function for vn_constant_t. */
326 static hashval_t
327 vn_constant_hash (const void *p1)
329 const struct vn_constant_s *vc1 = (const struct vn_constant_s *) p1;
330 return vc1->hashcode;
333 /* Lookup a value id for CONSTANT and return it. If it does not
334 exist returns 0. */
336 unsigned int
337 get_constant_value_id (tree constant)
339 void **slot;
340 struct vn_constant_s vc;
342 vc.hashcode = vn_hash_constant_with_type (constant);
343 vc.constant = constant;
344 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
345 vc.hashcode, NO_INSERT);
346 if (slot)
347 return ((vn_constant_t)*slot)->value_id;
348 return 0;
351 /* Lookup a value id for CONSTANT, and if it does not exist, create a
352 new one and return it. If it does exist, return it. */
354 unsigned int
355 get_or_alloc_constant_value_id (tree constant)
357 void **slot;
358 struct vn_constant_s vc;
359 vn_constant_t vcp;
361 vc.hashcode = vn_hash_constant_with_type (constant);
362 vc.constant = constant;
363 slot = htab_find_slot_with_hash (constant_to_value_id, &vc,
364 vc.hashcode, INSERT);
365 if (*slot)
366 return ((vn_constant_t)*slot)->value_id;
368 vcp = XNEW (struct vn_constant_s);
369 vcp->hashcode = vc.hashcode;
370 vcp->constant = constant;
371 vcp->value_id = get_next_value_id ();
372 *slot = (void *) vcp;
373 bitmap_set_bit (constant_value_ids, vcp->value_id);
374 return vcp->value_id;
377 /* Return true if V is a value id for a constant. */
379 bool
380 value_id_constant_p (unsigned int v)
382 return bitmap_bit_p (constant_value_ids, v);
385 /* Compare two reference operands P1 and P2 for equality. Return true if
386 they are equal, and false otherwise. */
388 static int
389 vn_reference_op_eq (const void *p1, const void *p2)
391 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
392 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
394 return vro1->opcode == vro2->opcode
395 && types_compatible_p (vro1->type, vro2->type)
396 && expressions_equal_p (vro1->op0, vro2->op0)
397 && expressions_equal_p (vro1->op1, vro2->op1)
398 && expressions_equal_p (vro1->op2, vro2->op2);
401 /* Compute the hash for a reference operand VRO1. */
403 static hashval_t
404 vn_reference_op_compute_hash (const vn_reference_op_t vro1, hashval_t result)
406 result = iterative_hash_hashval_t (vro1->opcode, result);
407 if (vro1->op0)
408 result = iterative_hash_expr (vro1->op0, result);
409 if (vro1->op1)
410 result = iterative_hash_expr (vro1->op1, result);
411 if (vro1->op2)
412 result = iterative_hash_expr (vro1->op2, result);
413 return result;
416 /* Return the hashcode for a given reference operation P1. */
418 static hashval_t
419 vn_reference_hash (const void *p1)
421 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
422 return vr1->hashcode;
425 /* Compute a hash for the reference operation VR1 and return it. */
427 hashval_t
428 vn_reference_compute_hash (const vn_reference_t vr1)
430 hashval_t result = 0;
431 int i;
432 vn_reference_op_t vro;
433 HOST_WIDE_INT off = -1;
434 bool deref = false;
436 FOR_EACH_VEC_ELT (vn_reference_op_s, vr1->operands, i, vro)
438 if (vro->opcode == MEM_REF)
439 deref = true;
440 else if (vro->opcode != ADDR_EXPR)
441 deref = false;
442 if (vro->off != -1)
444 if (off == -1)
445 off = 0;
446 off += vro->off;
448 else
450 if (off != -1
451 && off != 0)
452 result = iterative_hash_hashval_t (off, result);
453 off = -1;
454 if (deref
455 && vro->opcode == ADDR_EXPR)
457 if (vro->op0)
459 tree op = TREE_OPERAND (vro->op0, 0);
460 result = iterative_hash_hashval_t (TREE_CODE (op), result);
461 result = iterative_hash_expr (op, result);
464 else
465 result = vn_reference_op_compute_hash (vro, result);
468 if (vr1->vuse)
469 result += SSA_NAME_VERSION (vr1->vuse);
471 return result;
474 /* Return true if reference operations P1 and P2 are equivalent. This
475 means they have the same set of operands and vuses. */
478 vn_reference_eq (const void *p1, const void *p2)
480 unsigned i, j;
482 const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
483 const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
484 if (vr1->hashcode != vr2->hashcode)
485 return false;
487 /* Early out if this is not a hash collision. */
488 if (vr1->hashcode != vr2->hashcode)
489 return false;
491 /* The VOP needs to be the same. */
492 if (vr1->vuse != vr2->vuse)
493 return false;
495 /* If the operands are the same we are done. */
496 if (vr1->operands == vr2->operands)
497 return true;
499 if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
500 return false;
502 if (INTEGRAL_TYPE_P (vr1->type)
503 && INTEGRAL_TYPE_P (vr2->type))
505 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
506 return false;
508 else if (INTEGRAL_TYPE_P (vr1->type)
509 && (TYPE_PRECISION (vr1->type)
510 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
511 return false;
512 else if (INTEGRAL_TYPE_P (vr2->type)
513 && (TYPE_PRECISION (vr2->type)
514 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
515 return false;
517 i = 0;
518 j = 0;
521 HOST_WIDE_INT off1 = 0, off2 = 0;
522 vn_reference_op_t vro1, vro2;
523 vn_reference_op_s tem1, tem2;
524 bool deref1 = false, deref2 = false;
525 for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
527 if (vro1->opcode == MEM_REF)
528 deref1 = true;
529 if (vro1->off == -1)
530 break;
531 off1 += vro1->off;
533 for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
535 if (vro2->opcode == MEM_REF)
536 deref2 = true;
537 if (vro2->off == -1)
538 break;
539 off2 += vro2->off;
541 if (off1 != off2)
542 return false;
543 if (deref1 && vro1->opcode == ADDR_EXPR)
545 memset (&tem1, 0, sizeof (tem1));
546 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
547 tem1.type = TREE_TYPE (tem1.op0);
548 tem1.opcode = TREE_CODE (tem1.op0);
549 vro1 = &tem1;
551 if (deref2 && vro2->opcode == ADDR_EXPR)
553 memset (&tem2, 0, sizeof (tem2));
554 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
555 tem2.type = TREE_TYPE (tem2.op0);
556 tem2.opcode = TREE_CODE (tem2.op0);
557 vro2 = &tem2;
559 if (!vn_reference_op_eq (vro1, vro2))
560 return false;
561 ++j;
562 ++i;
564 while (VEC_length (vn_reference_op_s, vr1->operands) != i
565 || VEC_length (vn_reference_op_s, vr2->operands) != j);
567 return true;
570 /* Copy the operations present in load/store REF into RESULT, a vector of
571 vn_reference_op_s's. */
573 void
574 copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
576 if (TREE_CODE (ref) == TARGET_MEM_REF)
578 vn_reference_op_s temp;
580 memset (&temp, 0, sizeof (temp));
581 /* We do not care for spurious type qualifications. */
582 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
583 temp.opcode = TREE_CODE (ref);
584 temp.op0 = TMR_INDEX (ref);
585 temp.op1 = TMR_STEP (ref);
586 temp.op2 = TMR_OFFSET (ref);
587 temp.off = -1;
588 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
590 memset (&temp, 0, sizeof (temp));
591 temp.type = NULL_TREE;
592 temp.opcode = ERROR_MARK;
593 temp.op0 = TMR_INDEX2 (ref);
594 temp.off = -1;
595 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
597 memset (&temp, 0, sizeof (temp));
598 temp.type = NULL_TREE;
599 temp.opcode = TREE_CODE (TMR_BASE (ref));
600 temp.op0 = TMR_BASE (ref);
601 temp.off = -1;
602 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
603 return;
606 /* For non-calls, store the information that makes up the address. */
608 while (ref)
610 vn_reference_op_s temp;
612 memset (&temp, 0, sizeof (temp));
613 /* We do not care for spurious type qualifications. */
614 temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
615 temp.opcode = TREE_CODE (ref);
616 temp.off = -1;
618 switch (temp.opcode)
620 case MEM_REF:
621 /* The base address gets its own vn_reference_op_s structure. */
622 temp.op0 = TREE_OPERAND (ref, 1);
623 if (host_integerp (TREE_OPERAND (ref, 1), 0))
624 temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
625 break;
626 case BIT_FIELD_REF:
627 /* Record bits and position. */
628 temp.op0 = TREE_OPERAND (ref, 1);
629 temp.op1 = TREE_OPERAND (ref, 2);
630 break;
631 case COMPONENT_REF:
632 /* The field decl is enough to unambiguously specify the field,
633 a matching type is not necessary and a mismatching type
634 is always a spurious difference. */
635 temp.type = NULL_TREE;
636 temp.op0 = TREE_OPERAND (ref, 1);
637 temp.op1 = TREE_OPERAND (ref, 2);
639 tree this_offset = component_ref_field_offset (ref);
640 if (this_offset
641 && TREE_CODE (this_offset) == INTEGER_CST)
643 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
644 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
646 double_int off
647 = double_int_add (tree_to_double_int (this_offset),
648 double_int_rshift
649 (tree_to_double_int (bit_offset),
650 BITS_PER_UNIT == 8
651 ? 3 : exact_log2 (BITS_PER_UNIT),
652 HOST_BITS_PER_DOUBLE_INT, true));
653 if (double_int_fits_in_shwi_p (off))
654 temp.off = off.low;
658 break;
659 case ARRAY_RANGE_REF:
660 case ARRAY_REF:
661 /* Record index as operand. */
662 temp.op0 = TREE_OPERAND (ref, 1);
663 /* Always record lower bounds and element size. */
664 temp.op1 = array_ref_low_bound (ref);
665 temp.op2 = array_ref_element_size (ref);
666 if (TREE_CODE (temp.op0) == INTEGER_CST
667 && TREE_CODE (temp.op1) == INTEGER_CST
668 && TREE_CODE (temp.op2) == INTEGER_CST)
670 double_int off = tree_to_double_int (temp.op0);
671 off = double_int_add (off,
672 double_int_neg
673 (tree_to_double_int (temp.op1)));
674 off = double_int_mul (off, tree_to_double_int (temp.op2));
675 if (double_int_fits_in_shwi_p (off))
676 temp.off = off.low;
678 break;
679 case STRING_CST:
680 case INTEGER_CST:
681 case COMPLEX_CST:
682 case VECTOR_CST:
683 case REAL_CST:
684 case CONSTRUCTOR:
685 case VAR_DECL:
686 case PARM_DECL:
687 case CONST_DECL:
688 case RESULT_DECL:
689 case SSA_NAME:
690 temp.op0 = ref;
691 break;
692 case ADDR_EXPR:
693 if (is_gimple_min_invariant (ref))
695 temp.op0 = ref;
696 break;
698 /* Fallthrough. */
699 /* These are only interesting for their operands, their
700 existence, and their type. They will never be the last
701 ref in the chain of references (IE they require an
702 operand), so we don't have to put anything
703 for op* as it will be handled by the iteration */
704 case REALPART_EXPR:
705 case VIEW_CONVERT_EXPR:
706 temp.off = 0;
707 break;
708 case IMAGPART_EXPR:
709 /* This is only interesting for its constant offset. */
710 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
711 break;
712 default:
713 gcc_unreachable ();
715 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
717 if (REFERENCE_CLASS_P (ref)
718 || (TREE_CODE (ref) == ADDR_EXPR
719 && !is_gimple_min_invariant (ref)))
720 ref = TREE_OPERAND (ref, 0);
721 else
722 ref = NULL_TREE;
726 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
727 operands in *OPS, the reference alias set SET and the reference type TYPE.
728 Return true if something useful was produced. */
730 bool
731 ao_ref_init_from_vn_reference (ao_ref *ref,
732 alias_set_type set, tree type,
733 VEC (vn_reference_op_s, heap) *ops)
735 vn_reference_op_t op;
736 unsigned i;
737 tree base = NULL_TREE;
738 tree *op0_p = &base;
739 HOST_WIDE_INT offset = 0;
740 HOST_WIDE_INT max_size;
741 HOST_WIDE_INT size = -1;
742 tree size_tree = NULL_TREE;
743 alias_set_type base_alias_set = -1;
745 /* First get the final access size from just the outermost expression. */
746 op = VEC_index (vn_reference_op_s, ops, 0);
747 if (op->opcode == COMPONENT_REF)
748 size_tree = DECL_SIZE (op->op0);
749 else if (op->opcode == BIT_FIELD_REF)
750 size_tree = op->op0;
751 else
753 enum machine_mode mode = TYPE_MODE (type);
754 if (mode == BLKmode)
755 size_tree = TYPE_SIZE (type);
756 else
757 size = GET_MODE_BITSIZE (mode);
759 if (size_tree != NULL_TREE)
761 if (!host_integerp (size_tree, 1))
762 size = -1;
763 else
764 size = TREE_INT_CST_LOW (size_tree);
767 /* Initially, maxsize is the same as the accessed element size.
768 In the following it will only grow (or become -1). */
769 max_size = size;
771 /* Compute cumulative bit-offset for nested component-refs and array-refs,
772 and find the ultimate containing object. */
773 FOR_EACH_VEC_ELT (vn_reference_op_s, ops, i, op)
775 switch (op->opcode)
777 /* These may be in the reference ops, but we cannot do anything
778 sensible with them here. */
779 case ADDR_EXPR:
780 /* Apart from ADDR_EXPR arguments to MEM_REF. */
781 if (base != NULL_TREE
782 && TREE_CODE (base) == MEM_REF
783 && op->op0
784 && DECL_P (TREE_OPERAND (op->op0, 0)))
786 vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
787 base = TREE_OPERAND (op->op0, 0);
788 if (pop->off == -1)
790 max_size = -1;
791 offset = 0;
793 else
794 offset += pop->off * BITS_PER_UNIT;
795 op0_p = NULL;
796 break;
798 /* Fallthru. */
799 case CALL_EXPR:
800 return false;
802 /* Record the base objects. */
803 case MEM_REF:
804 base_alias_set = get_deref_alias_set (op->op0);
805 *op0_p = build2 (MEM_REF, op->type,
806 NULL_TREE, op->op0);
807 op0_p = &TREE_OPERAND (*op0_p, 0);
808 break;
810 case VAR_DECL:
811 case PARM_DECL:
812 case RESULT_DECL:
813 case SSA_NAME:
814 *op0_p = op->op0;
815 op0_p = NULL;
816 break;
818 /* And now the usual component-reference style ops. */
819 case BIT_FIELD_REF:
820 offset += tree_low_cst (op->op1, 0);
821 break;
823 case COMPONENT_REF:
825 tree field = op->op0;
826 /* We do not have a complete COMPONENT_REF tree here so we
827 cannot use component_ref_field_offset. Do the interesting
828 parts manually. */
830 if (op->op1
831 || !host_integerp (DECL_FIELD_OFFSET (field), 1))
832 max_size = -1;
833 else
835 offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field))
836 * BITS_PER_UNIT);
837 offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field));
839 break;
842 case ARRAY_RANGE_REF:
843 case ARRAY_REF:
844 /* We recorded the lower bound and the element size. */
845 if (!host_integerp (op->op0, 0)
846 || !host_integerp (op->op1, 0)
847 || !host_integerp (op->op2, 0))
848 max_size = -1;
849 else
851 HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0);
852 hindex -= TREE_INT_CST_LOW (op->op1);
853 hindex *= TREE_INT_CST_LOW (op->op2);
854 hindex *= BITS_PER_UNIT;
855 offset += hindex;
857 break;
859 case REALPART_EXPR:
860 break;
862 case IMAGPART_EXPR:
863 offset += size;
864 break;
866 case VIEW_CONVERT_EXPR:
867 break;
869 case STRING_CST:
870 case INTEGER_CST:
871 case COMPLEX_CST:
872 case VECTOR_CST:
873 case REAL_CST:
874 case CONSTRUCTOR:
875 case CONST_DECL:
876 return false;
878 default:
879 return false;
883 if (base == NULL_TREE)
884 return false;
886 ref->ref = NULL_TREE;
887 ref->base = base;
888 ref->offset = offset;
889 ref->size = size;
890 ref->max_size = max_size;
891 ref->ref_alias_set = set;
892 if (base_alias_set != -1)
893 ref->base_alias_set = base_alias_set;
894 else
895 ref->base_alias_set = get_alias_set (base);
897 return true;
900 /* Copy the operations present in load/store/call REF into RESULT, a vector of
901 vn_reference_op_s's. */
903 void
904 copy_reference_ops_from_call (gimple call,
905 VEC(vn_reference_op_s, heap) **result)
907 vn_reference_op_s temp;
908 unsigned i;
910 /* Copy the type, opcode, function being called and static chain. */
911 memset (&temp, 0, sizeof (temp));
912 temp.type = gimple_call_return_type (call);
913 temp.opcode = CALL_EXPR;
914 temp.op0 = gimple_call_fn (call);
915 temp.op1 = gimple_call_chain (call);
916 temp.off = -1;
917 VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
919 /* Copy the call arguments. As they can be references as well,
920 just chain them together. */
921 for (i = 0; i < gimple_call_num_args (call); ++i)
923 tree callarg = gimple_call_arg (call, i);
924 copy_reference_ops_from_ref (callarg, result);
928 /* Create a vector of vn_reference_op_s structures from REF, a
929 REFERENCE_CLASS_P tree. The vector is not shared. */
931 static VEC(vn_reference_op_s, heap) *
932 create_reference_ops_from_ref (tree ref)
934 VEC (vn_reference_op_s, heap) *result = NULL;
936 copy_reference_ops_from_ref (ref, &result);
937 return result;
940 /* Create a vector of vn_reference_op_s structures from CALL, a
941 call statement. The vector is not shared. */
943 static VEC(vn_reference_op_s, heap) *
944 create_reference_ops_from_call (gimple call)
946 VEC (vn_reference_op_s, heap) *result = NULL;
948 copy_reference_ops_from_call (call, &result);
949 return result;
952 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
953 *I_P to point to the last element of the replacement. */
954 void
955 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
956 unsigned int *i_p)
958 unsigned int i = *i_p;
959 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
960 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
961 tree addr_base;
962 HOST_WIDE_INT addr_offset;
964 /* The only thing we have to do is from &OBJ.foo.bar add the offset
965 from .foo.bar to the preceeding MEM_REF offset and replace the
966 address with &OBJ. */
967 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
968 &addr_offset);
969 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
970 if (addr_base != op->op0)
972 double_int off = tree_to_double_int (mem_op->op0);
973 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
974 off = double_int_add (off, shwi_to_double_int (addr_offset));
975 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
976 op->op0 = build_fold_addr_expr (addr_base);
977 if (host_integerp (mem_op->op0, 0))
978 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
979 else
980 mem_op->off = -1;
984 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
985 *I_P to point to the last element of the replacement. */
986 static void
987 vn_reference_maybe_forwprop_address (VEC (vn_reference_op_s, heap) **ops,
988 unsigned int *i_p)
990 unsigned int i = *i_p;
991 vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
992 vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
993 gimple def_stmt;
994 enum tree_code code;
995 double_int off;
997 def_stmt = SSA_NAME_DEF_STMT (op->op0);
998 if (!is_gimple_assign (def_stmt))
999 return;
1001 code = gimple_assign_rhs_code (def_stmt);
1002 if (code != ADDR_EXPR
1003 && code != POINTER_PLUS_EXPR)
1004 return;
1006 off = tree_to_double_int (mem_op->op0);
1007 off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
1009 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1010 from .foo.bar to the preceeding MEM_REF offset and replace the
1011 address with &OBJ. */
1012 if (code == ADDR_EXPR)
1014 tree addr, addr_base;
1015 HOST_WIDE_INT addr_offset;
1017 addr = gimple_assign_rhs1 (def_stmt);
1018 addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (addr, 0),
1019 &addr_offset);
1020 if (!addr_base
1021 || TREE_CODE (addr_base) != MEM_REF)
1022 return;
1024 off = double_int_add (off, shwi_to_double_int (addr_offset));
1025 off = double_int_add (off, mem_ref_offset (addr_base));
1026 op->op0 = TREE_OPERAND (addr_base, 0);
1028 else
1030 tree ptr, ptroff;
1031 ptr = gimple_assign_rhs1 (def_stmt);
1032 ptroff = gimple_assign_rhs2 (def_stmt);
1033 if (TREE_CODE (ptr) != SSA_NAME
1034 || TREE_CODE (ptroff) != INTEGER_CST)
1035 return;
1037 off = double_int_add (off, tree_to_double_int (ptroff));
1038 op->op0 = ptr;
1041 mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
1042 if (host_integerp (mem_op->op0, 0))
1043 mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
1044 else
1045 mem_op->off = -1;
1046 if (TREE_CODE (op->op0) == SSA_NAME)
1047 op->op0 = SSA_VAL (op->op0);
1048 if (TREE_CODE (op->op0) != SSA_NAME)
1049 op->opcode = TREE_CODE (op->op0);
1051 /* And recurse. */
1052 if (TREE_CODE (op->op0) == SSA_NAME)
1053 vn_reference_maybe_forwprop_address (ops, i_p);
1054 else if (TREE_CODE (op->op0) == ADDR_EXPR)
1055 vn_reference_fold_indirect (ops, i_p);
1058 /* Optimize the reference REF to a constant if possible or return
1059 NULL_TREE if not. */
1061 tree
1062 fully_constant_vn_reference_p (vn_reference_t ref)
1064 VEC (vn_reference_op_s, heap) *operands = ref->operands;
1065 vn_reference_op_t op;
1067 /* Try to simplify the translated expression if it is
1068 a call to a builtin function with at most two arguments. */
1069 op = VEC_index (vn_reference_op_s, operands, 0);
1070 if (op->opcode == CALL_EXPR
1071 && TREE_CODE (op->op0) == ADDR_EXPR
1072 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1073 && DECL_BUILT_IN (TREE_OPERAND (op->op0, 0))
1074 && VEC_length (vn_reference_op_s, operands) >= 2
1075 && VEC_length (vn_reference_op_s, operands) <= 3)
1077 vn_reference_op_t arg0, arg1 = NULL;
1078 bool anyconst = false;
1079 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1080 if (VEC_length (vn_reference_op_s, operands) > 2)
1081 arg1 = VEC_index (vn_reference_op_s, operands, 2);
1082 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1083 || (arg0->opcode == ADDR_EXPR
1084 && is_gimple_min_invariant (arg0->op0)))
1085 anyconst = true;
1086 if (arg1
1087 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1088 || (arg1->opcode == ADDR_EXPR
1089 && is_gimple_min_invariant (arg1->op0))))
1090 anyconst = true;
1091 if (anyconst)
1093 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1094 arg1 ? 2 : 1,
1095 arg0->op0,
1096 arg1 ? arg1->op0 : NULL);
1097 if (folded
1098 && TREE_CODE (folded) == NOP_EXPR)
1099 folded = TREE_OPERAND (folded, 0);
1100 if (folded
1101 && is_gimple_min_invariant (folded))
1102 return folded;
1106 /* Simplify reads from constant strings. */
1107 else if (op->opcode == ARRAY_REF
1108 && TREE_CODE (op->op0) == INTEGER_CST
1109 && integer_zerop (op->op1)
1110 && VEC_length (vn_reference_op_s, operands) == 2)
1112 vn_reference_op_t arg0;
1113 arg0 = VEC_index (vn_reference_op_s, operands, 1);
1114 if (arg0->opcode == STRING_CST
1115 && (TYPE_MODE (op->type)
1116 == TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0->op0))))
1117 && GET_MODE_CLASS (TYPE_MODE (op->type)) == MODE_INT
1118 && GET_MODE_SIZE (TYPE_MODE (op->type)) == 1
1119 && compare_tree_int (op->op0, TREE_STRING_LENGTH (arg0->op0)) < 0)
1120 return build_int_cst_type (op->type,
1121 (TREE_STRING_POINTER (arg0->op0)
1122 [TREE_INT_CST_LOW (op->op0)]));
1125 return NULL_TREE;
1128 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1129 structures into their value numbers. This is done in-place, and
1130 the vector passed in is returned. */
1132 static VEC (vn_reference_op_s, heap) *
1133 valueize_refs (VEC (vn_reference_op_s, heap) *orig)
1135 vn_reference_op_t vro;
1136 unsigned int i;
1138 FOR_EACH_VEC_ELT (vn_reference_op_s, orig, i, vro)
1140 if (vro->opcode == SSA_NAME
1141 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1143 vro->op0 = SSA_VAL (vro->op0);
1144 /* If it transforms from an SSA_NAME to a constant, update
1145 the opcode. */
1146 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1147 vro->opcode = TREE_CODE (vro->op0);
1149 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1150 vro->op1 = SSA_VAL (vro->op1);
1151 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1152 vro->op2 = SSA_VAL (vro->op2);
1153 /* If it transforms from an SSA_NAME to an address, fold with
1154 a preceding indirect reference. */
1155 if (i > 0
1156 && vro->op0
1157 && TREE_CODE (vro->op0) == ADDR_EXPR
1158 && VEC_index (vn_reference_op_s,
1159 orig, i - 1)->opcode == MEM_REF)
1160 vn_reference_fold_indirect (&orig, &i);
1161 else if (i > 0
1162 && vro->opcode == SSA_NAME
1163 && VEC_index (vn_reference_op_s,
1164 orig, i - 1)->opcode == MEM_REF)
1165 vn_reference_maybe_forwprop_address (&orig, &i);
1166 /* If it transforms a non-constant ARRAY_REF into a constant
1167 one, adjust the constant offset. */
1168 else if (vro->opcode == ARRAY_REF
1169 && vro->off == -1
1170 && TREE_CODE (vro->op0) == INTEGER_CST
1171 && TREE_CODE (vro->op1) == INTEGER_CST
1172 && TREE_CODE (vro->op2) == INTEGER_CST)
1174 double_int off = tree_to_double_int (vro->op0);
1175 off = double_int_add (off,
1176 double_int_neg
1177 (tree_to_double_int (vro->op1)));
1178 off = double_int_mul (off, tree_to_double_int (vro->op2));
1179 if (double_int_fits_in_shwi_p (off))
1180 vro->off = off.low;
1184 return orig;
1187 static VEC(vn_reference_op_s, heap) *shared_lookup_references;
1189 /* Create a vector of vn_reference_op_s structures from REF, a
1190 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1191 this function. */
1193 static VEC(vn_reference_op_s, heap) *
1194 valueize_shared_reference_ops_from_ref (tree ref)
1196 if (!ref)
1197 return NULL;
1198 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1199 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1200 shared_lookup_references = valueize_refs (shared_lookup_references);
1201 return shared_lookup_references;
1204 /* Create a vector of vn_reference_op_s structures from CALL, a
1205 call statement. The vector is shared among all callers of
1206 this function. */
1208 static VEC(vn_reference_op_s, heap) *
1209 valueize_shared_reference_ops_from_call (gimple call)
1211 if (!call)
1212 return NULL;
1213 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1214 copy_reference_ops_from_call (call, &shared_lookup_references);
1215 shared_lookup_references = valueize_refs (shared_lookup_references);
1216 return shared_lookup_references;
1219 /* Lookup a SCCVN reference operation VR in the current hash table.
1220 Returns the resulting value number if it exists in the hash table,
1221 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1222 vn_reference_t stored in the hashtable if something is found. */
1224 static tree
1225 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1227 void **slot;
1228 hashval_t hash;
1230 hash = vr->hashcode;
1231 slot = htab_find_slot_with_hash (current_info->references, vr,
1232 hash, NO_INSERT);
1233 if (!slot && current_info == optimistic_info)
1234 slot = htab_find_slot_with_hash (valid_info->references, vr,
1235 hash, NO_INSERT);
1236 if (slot)
1238 if (vnresult)
1239 *vnresult = (vn_reference_t)*slot;
1240 return ((vn_reference_t)*slot)->result;
1243 return NULL_TREE;
1246 static tree *last_vuse_ptr;
1247 static vn_lookup_kind vn_walk_kind;
1248 static vn_lookup_kind default_vn_walk_kind;
1250 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
1251 with the current VUSE and performs the expression lookup. */
1253 static void *
1254 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *vr_)
1256 vn_reference_t vr = (vn_reference_t)vr_;
1257 void **slot;
1258 hashval_t hash;
1260 if (last_vuse_ptr)
1261 *last_vuse_ptr = vuse;
1263 /* Fixup vuse and hash. */
1264 if (vr->vuse)
1265 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
1266 vr->vuse = SSA_VAL (vuse);
1267 if (vr->vuse)
1268 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
1270 hash = vr->hashcode;
1271 slot = htab_find_slot_with_hash (current_info->references, vr,
1272 hash, NO_INSERT);
1273 if (!slot && current_info == optimistic_info)
1274 slot = htab_find_slot_with_hash (valid_info->references, vr,
1275 hash, NO_INSERT);
1276 if (slot)
1277 return *slot;
1279 return NULL;
1282 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
1283 from the statement defining VUSE and if not successful tries to
1284 translate *REFP and VR_ through an aggregate copy at the defintion
1285 of VUSE. */
1287 static void *
1288 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
1290 vn_reference_t vr = (vn_reference_t)vr_;
1291 gimple def_stmt = SSA_NAME_DEF_STMT (vuse);
1292 tree base;
1293 HOST_WIDE_INT offset, maxsize;
1294 static VEC (vn_reference_op_s, heap) *lhs_ops = NULL;
1295 ao_ref lhs_ref;
1296 bool lhs_ref_ok = false;
1298 /* First try to disambiguate after value-replacing in the definitions LHS. */
1299 if (is_gimple_assign (def_stmt))
1301 VEC (vn_reference_op_s, heap) *tem;
1302 tree lhs = gimple_assign_lhs (def_stmt);
1303 /* Avoid re-allocation overhead. */
1304 VEC_truncate (vn_reference_op_s, lhs_ops, 0);
1305 copy_reference_ops_from_ref (lhs, &lhs_ops);
1306 tem = lhs_ops;
1307 lhs_ops = valueize_refs (lhs_ops);
1308 gcc_assert (lhs_ops == tem);
1309 lhs_ref_ok = ao_ref_init_from_vn_reference (&lhs_ref, get_alias_set (lhs),
1310 TREE_TYPE (lhs), lhs_ops);
1311 if (lhs_ref_ok
1312 && !refs_may_alias_p_1 (ref, &lhs_ref, true))
1313 return NULL;
1316 base = ao_ref_base (ref);
1317 offset = ref->offset;
1318 maxsize = ref->max_size;
1320 /* If we cannot constrain the size of the reference we cannot
1321 test if anything kills it. */
1322 if (maxsize == -1)
1323 return (void *)-1;
1325 /* def_stmt may-defs *ref. See if we can derive a value for *ref
1326 from that defintion.
1327 1) Memset. */
1328 if (is_gimple_reg_type (vr->type)
1329 && gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
1330 && integer_zerop (gimple_call_arg (def_stmt, 1))
1331 && host_integerp (gimple_call_arg (def_stmt, 2), 1)
1332 && TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR)
1334 tree ref2 = TREE_OPERAND (gimple_call_arg (def_stmt, 0), 0);
1335 tree base2;
1336 HOST_WIDE_INT offset2, size2, maxsize2;
1337 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2);
1338 size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8;
1339 if ((unsigned HOST_WIDE_INT)size2 / 8
1340 == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2))
1341 && maxsize2 != -1
1342 && operand_equal_p (base, base2, 0)
1343 && offset2 <= offset
1344 && offset2 + size2 >= offset + maxsize)
1346 tree val = build_zero_cst (vr->type);
1347 unsigned int value_id = get_or_alloc_constant_value_id (val);
1348 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1349 VEC_copy (vn_reference_op_s,
1350 heap, vr->operands),
1351 val, value_id);
1355 /* 2) Assignment from an empty CONSTRUCTOR. */
1356 else if (is_gimple_reg_type (vr->type)
1357 && gimple_assign_single_p (def_stmt)
1358 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
1359 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
1361 tree base2;
1362 HOST_WIDE_INT offset2, size2, maxsize2;
1363 base2 = get_ref_base_and_extent (gimple_assign_lhs (def_stmt),
1364 &offset2, &size2, &maxsize2);
1365 if (maxsize2 != -1
1366 && operand_equal_p (base, base2, 0)
1367 && offset2 <= offset
1368 && offset2 + size2 >= offset + maxsize)
1370 tree val = build_zero_cst (vr->type);
1371 unsigned int value_id = get_or_alloc_constant_value_id (val);
1372 return vn_reference_insert_pieces (vuse, vr->set, vr->type,
1373 VEC_copy (vn_reference_op_s,
1374 heap, vr->operands),
1375 val, value_id);
1379 /* 3) For aggregate copies translate the reference through them if
1380 the copy kills ref. */
1381 else if (vn_walk_kind == VN_WALKREWRITE
1382 && gimple_assign_single_p (def_stmt)
1383 && (DECL_P (gimple_assign_rhs1 (def_stmt))
1384 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
1385 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
1387 tree base2;
1388 HOST_WIDE_INT offset2, size2, maxsize2;
1389 int i, j;
1390 VEC (vn_reference_op_s, heap) *rhs = NULL;
1391 vn_reference_op_t vro;
1392 ao_ref r;
1394 if (!lhs_ref_ok)
1395 return (void *)-1;
1397 /* See if the assignment kills REF. */
1398 base2 = ao_ref_base (&lhs_ref);
1399 offset2 = lhs_ref.offset;
1400 size2 = lhs_ref.size;
1401 maxsize2 = lhs_ref.max_size;
1402 if (maxsize2 == -1
1403 || (base != base2 && !operand_equal_p (base, base2, 0))
1404 || offset2 > offset
1405 || offset2 + size2 < offset + maxsize)
1406 return (void *)-1;
1408 /* Find the common base of ref and the lhs. lhs_ops already
1409 contains valueized operands for the lhs. */
1410 i = VEC_length (vn_reference_op_s, vr->operands) - 1;
1411 j = VEC_length (vn_reference_op_s, lhs_ops) - 1;
1412 while (j >= 0 && i >= 0
1413 && vn_reference_op_eq (VEC_index (vn_reference_op_s,
1414 vr->operands, i),
1415 VEC_index (vn_reference_op_s, lhs_ops, j)))
1417 i--;
1418 j--;
1421 /* i now points to the first additional op.
1422 ??? LHS may not be completely contained in VR, one or more
1423 VIEW_CONVERT_EXPRs could be in its way. We could at least
1424 try handling outermost VIEW_CONVERT_EXPRs. */
1425 if (j != -1)
1426 return (void *)-1;
1428 /* Now re-write REF to be based on the rhs of the assignment. */
1429 copy_reference_ops_from_ref (gimple_assign_rhs1 (def_stmt), &rhs);
1430 /* We need to pre-pend vr->operands[0..i] to rhs. */
1431 if (i + 1 + VEC_length (vn_reference_op_s, rhs)
1432 > VEC_length (vn_reference_op_s, vr->operands))
1434 VEC (vn_reference_op_s, heap) *old = vr->operands;
1435 VEC_safe_grow (vn_reference_op_s, heap, vr->operands,
1436 i + 1 + VEC_length (vn_reference_op_s, rhs));
1437 if (old == shared_lookup_references
1438 && vr->operands != old)
1439 shared_lookup_references = NULL;
1441 else
1442 VEC_truncate (vn_reference_op_s, vr->operands,
1443 i + 1 + VEC_length (vn_reference_op_s, rhs));
1444 FOR_EACH_VEC_ELT (vn_reference_op_s, rhs, j, vro)
1445 VEC_replace (vn_reference_op_s, vr->operands, i + 1 + j, vro);
1446 VEC_free (vn_reference_op_s, heap, rhs);
1447 vr->hashcode = vn_reference_compute_hash (vr);
1449 /* Adjust *ref from the new operands. */
1450 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1451 return (void *)-1;
1452 /* This can happen with bitfields. */
1453 if (ref->size != r.size)
1454 return (void *)-1;
1455 *ref = r;
1457 /* Do not update last seen VUSE after translating. */
1458 last_vuse_ptr = NULL;
1460 /* Keep looking for the adjusted *REF / VR pair. */
1461 return NULL;
1464 /* 4) For memcpy copies translate the reference through them if
1465 the copy kills ref. */
1466 else if (vn_walk_kind == VN_WALKREWRITE
1467 && is_gimple_reg_type (vr->type)
1468 /* ??? Handle BCOPY as well. */
1469 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
1470 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
1471 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE))
1472 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
1473 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
1474 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
1475 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
1476 && host_integerp (gimple_call_arg (def_stmt, 2), 1))
1478 tree lhs, rhs;
1479 ao_ref r;
1480 HOST_WIDE_INT rhs_offset, copy_size, lhs_offset;
1481 vn_reference_op_s op;
1482 HOST_WIDE_INT at;
1485 /* Only handle non-variable, addressable refs. */
1486 if (ref->size != maxsize
1487 || offset % BITS_PER_UNIT != 0
1488 || ref->size % BITS_PER_UNIT != 0)
1489 return (void *)-1;
1491 /* Extract a pointer base and an offset for the destination. */
1492 lhs = gimple_call_arg (def_stmt, 0);
1493 lhs_offset = 0;
1494 if (TREE_CODE (lhs) == SSA_NAME)
1495 lhs = SSA_VAL (lhs);
1496 if (TREE_CODE (lhs) == ADDR_EXPR)
1498 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
1499 &lhs_offset);
1500 if (!tem)
1501 return (void *)-1;
1502 if (TREE_CODE (tem) == MEM_REF
1503 && host_integerp (TREE_OPERAND (tem, 1), 1))
1505 lhs = TREE_OPERAND (tem, 0);
1506 lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1508 else if (DECL_P (tem))
1509 lhs = build_fold_addr_expr (tem);
1510 else
1511 return (void *)-1;
1513 if (TREE_CODE (lhs) != SSA_NAME
1514 && TREE_CODE (lhs) != ADDR_EXPR)
1515 return (void *)-1;
1517 /* Extract a pointer base and an offset for the source. */
1518 rhs = gimple_call_arg (def_stmt, 1);
1519 rhs_offset = 0;
1520 if (TREE_CODE (rhs) == SSA_NAME)
1521 rhs = SSA_VAL (rhs);
1522 if (TREE_CODE (rhs) == ADDR_EXPR)
1524 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
1525 &rhs_offset);
1526 if (!tem)
1527 return (void *)-1;
1528 if (TREE_CODE (tem) == MEM_REF
1529 && host_integerp (TREE_OPERAND (tem, 1), 1))
1531 rhs = TREE_OPERAND (tem, 0);
1532 rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1));
1534 else if (DECL_P (tem))
1535 rhs = build_fold_addr_expr (tem);
1536 else
1537 return (void *)-1;
1539 if (TREE_CODE (rhs) != SSA_NAME
1540 && TREE_CODE (rhs) != ADDR_EXPR)
1541 return (void *)-1;
1543 copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2));
1545 /* The bases of the destination and the references have to agree. */
1546 if ((TREE_CODE (base) != MEM_REF
1547 && !DECL_P (base))
1548 || (TREE_CODE (base) == MEM_REF
1549 && (TREE_OPERAND (base, 0) != lhs
1550 || !host_integerp (TREE_OPERAND (base, 1), 1)))
1551 || (DECL_P (base)
1552 && (TREE_CODE (lhs) != ADDR_EXPR
1553 || TREE_OPERAND (lhs, 0) != base)))
1554 return (void *)-1;
1556 /* And the access has to be contained within the memcpy destination. */
1557 at = offset / BITS_PER_UNIT;
1558 if (TREE_CODE (base) == MEM_REF)
1559 at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1));
1560 if (lhs_offset > at
1561 || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT)
1562 return (void *)-1;
1564 /* Make room for 2 operands in the new reference. */
1565 if (VEC_length (vn_reference_op_s, vr->operands) < 2)
1567 VEC (vn_reference_op_s, heap) *old = vr->operands;
1568 VEC_safe_grow (vn_reference_op_s, heap, vr->operands, 2);
1569 if (old == shared_lookup_references
1570 && vr->operands != old)
1571 shared_lookup_references = NULL;
1573 else
1574 VEC_truncate (vn_reference_op_s, vr->operands, 2);
1576 /* The looked-through reference is a simple MEM_REF. */
1577 memset (&op, 0, sizeof (op));
1578 op.type = vr->type;
1579 op.opcode = MEM_REF;
1580 op.op0 = build_int_cst (ptr_type_node, at - rhs_offset);
1581 op.off = at - lhs_offset + rhs_offset;
1582 VEC_replace (vn_reference_op_s, vr->operands, 0, &op);
1583 op.type = TYPE_MAIN_VARIANT (TREE_TYPE (rhs));
1584 op.opcode = TREE_CODE (rhs);
1585 op.op0 = rhs;
1586 op.off = -1;
1587 VEC_replace (vn_reference_op_s, vr->operands, 1, &op);
1588 vr->hashcode = vn_reference_compute_hash (vr);
1590 /* Adjust *ref from the new operands. */
1591 if (!ao_ref_init_from_vn_reference (&r, vr->set, vr->type, vr->operands))
1592 return (void *)-1;
1593 /* This can happen with bitfields. */
1594 if (ref->size != r.size)
1595 return (void *)-1;
1596 *ref = r;
1598 /* Do not update last seen VUSE after translating. */
1599 last_vuse_ptr = NULL;
1601 /* Keep looking for the adjusted *REF / VR pair. */
1602 return NULL;
1605 /* Bail out and stop walking. */
1606 return (void *)-1;
1609 /* Lookup a reference operation by it's parts, in the current hash table.
1610 Returns the resulting value number if it exists in the hash table,
1611 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1612 vn_reference_t stored in the hashtable if something is found. */
1614 tree
1615 vn_reference_lookup_pieces (tree vuse, alias_set_type set, tree type,
1616 VEC (vn_reference_op_s, heap) *operands,
1617 vn_reference_t *vnresult, vn_lookup_kind kind)
1619 struct vn_reference_s vr1;
1620 vn_reference_t tmp;
1621 tree cst;
1623 if (!vnresult)
1624 vnresult = &tmp;
1625 *vnresult = NULL;
1627 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1628 VEC_truncate (vn_reference_op_s, shared_lookup_references, 0);
1629 VEC_safe_grow (vn_reference_op_s, heap, shared_lookup_references,
1630 VEC_length (vn_reference_op_s, operands));
1631 memcpy (VEC_address (vn_reference_op_s, shared_lookup_references),
1632 VEC_address (vn_reference_op_s, operands),
1633 sizeof (vn_reference_op_s)
1634 * VEC_length (vn_reference_op_s, operands));
1635 vr1.operands = operands = shared_lookup_references
1636 = valueize_refs (shared_lookup_references);
1637 vr1.type = type;
1638 vr1.set = set;
1639 vr1.hashcode = vn_reference_compute_hash (&vr1);
1640 if ((cst = fully_constant_vn_reference_p (&vr1)))
1641 return cst;
1643 vn_reference_lookup_1 (&vr1, vnresult);
1644 if (!*vnresult
1645 && kind != VN_NOWALK
1646 && vr1.vuse)
1648 ao_ref r;
1649 vn_walk_kind = kind;
1650 if (ao_ref_init_from_vn_reference (&r, set, type, vr1.operands))
1651 *vnresult =
1652 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1653 vn_reference_lookup_2,
1654 vn_reference_lookup_3, &vr1);
1655 if (vr1.operands != operands)
1656 VEC_free (vn_reference_op_s, heap, vr1.operands);
1659 if (*vnresult)
1660 return (*vnresult)->result;
1662 return NULL_TREE;
1665 /* Lookup OP in the current hash table, and return the resulting value
1666 number if it exists in the hash table. Return NULL_TREE if it does
1667 not exist in the hash table or if the result field of the structure
1668 was NULL.. VNRESULT will be filled in with the vn_reference_t
1669 stored in the hashtable if one exists. */
1671 tree
1672 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
1673 vn_reference_t *vnresult)
1675 VEC (vn_reference_op_s, heap) *operands;
1676 struct vn_reference_s vr1;
1677 tree cst;
1679 if (vnresult)
1680 *vnresult = NULL;
1682 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1683 vr1.operands = operands = valueize_shared_reference_ops_from_ref (op);
1684 vr1.type = TREE_TYPE (op);
1685 vr1.set = get_alias_set (op);
1686 vr1.hashcode = vn_reference_compute_hash (&vr1);
1687 if ((cst = fully_constant_vn_reference_p (&vr1)))
1688 return cst;
1690 if (kind != VN_NOWALK
1691 && vr1.vuse)
1693 vn_reference_t wvnresult;
1694 ao_ref r;
1695 ao_ref_init (&r, op);
1696 vn_walk_kind = kind;
1697 wvnresult =
1698 (vn_reference_t)walk_non_aliased_vuses (&r, vr1.vuse,
1699 vn_reference_lookup_2,
1700 vn_reference_lookup_3, &vr1);
1701 if (vr1.operands != operands)
1702 VEC_free (vn_reference_op_s, heap, vr1.operands);
1703 if (wvnresult)
1705 if (vnresult)
1706 *vnresult = wvnresult;
1707 return wvnresult->result;
1710 return NULL_TREE;
1713 return vn_reference_lookup_1 (&vr1, vnresult);
1717 /* Insert OP into the current hash table with a value number of
1718 RESULT, and return the resulting reference structure we created. */
1720 vn_reference_t
1721 vn_reference_insert (tree op, tree result, tree vuse)
1723 void **slot;
1724 vn_reference_t vr1;
1726 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1727 if (TREE_CODE (result) == SSA_NAME)
1728 vr1->value_id = VN_INFO (result)->value_id;
1729 else
1730 vr1->value_id = get_or_alloc_constant_value_id (result);
1731 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1732 vr1->operands = valueize_refs (create_reference_ops_from_ref (op));
1733 vr1->type = TREE_TYPE (op);
1734 vr1->set = get_alias_set (op);
1735 vr1->hashcode = vn_reference_compute_hash (vr1);
1736 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
1738 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1739 INSERT);
1741 /* Because we lookup stores using vuses, and value number failures
1742 using the vdefs (see visit_reference_op_store for how and why),
1743 it's possible that on failure we may try to insert an already
1744 inserted store. This is not wrong, there is no ssa name for a
1745 store that we could use as a differentiator anyway. Thus, unlike
1746 the other lookup functions, you cannot gcc_assert (!*slot)
1747 here. */
1749 /* But free the old slot in case of a collision. */
1750 if (*slot)
1751 free_reference (*slot);
1753 *slot = vr1;
1754 return vr1;
1757 /* Insert a reference by it's pieces into the current hash table with
1758 a value number of RESULT. Return the resulting reference
1759 structure we created. */
1761 vn_reference_t
1762 vn_reference_insert_pieces (tree vuse, alias_set_type set, tree type,
1763 VEC (vn_reference_op_s, heap) *operands,
1764 tree result, unsigned int value_id)
1767 void **slot;
1768 vn_reference_t vr1;
1770 vr1 = (vn_reference_t) pool_alloc (current_info->references_pool);
1771 vr1->value_id = value_id;
1772 vr1->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
1773 vr1->operands = valueize_refs (operands);
1774 vr1->type = type;
1775 vr1->set = set;
1776 vr1->hashcode = vn_reference_compute_hash (vr1);
1777 if (result && TREE_CODE (result) == SSA_NAME)
1778 result = SSA_VAL (result);
1779 vr1->result = result;
1781 slot = htab_find_slot_with_hash (current_info->references, vr1, vr1->hashcode,
1782 INSERT);
1784 /* At this point we should have all the things inserted that we have
1785 seen before, and we should never try inserting something that
1786 already exists. */
1787 gcc_assert (!*slot);
1788 if (*slot)
1789 free_reference (*slot);
1791 *slot = vr1;
1792 return vr1;
1795 /* Compute and return the hash value for nary operation VBO1. */
1797 hashval_t
1798 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
1800 hashval_t hash;
1801 unsigned i;
1803 for (i = 0; i < vno1->length; ++i)
1804 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
1805 vno1->op[i] = SSA_VAL (vno1->op[i]);
1807 if (vno1->length == 2
1808 && commutative_tree_code (vno1->opcode)
1809 && tree_swap_operands_p (vno1->op[0], vno1->op[1], false))
1811 tree temp = vno1->op[0];
1812 vno1->op[0] = vno1->op[1];
1813 vno1->op[1] = temp;
1816 hash = iterative_hash_hashval_t (vno1->opcode, 0);
1817 for (i = 0; i < vno1->length; ++i)
1818 hash = iterative_hash_expr (vno1->op[i], hash);
1820 return hash;
1823 /* Return the computed hashcode for nary operation P1. */
1825 static hashval_t
1826 vn_nary_op_hash (const void *p1)
1828 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1829 return vno1->hashcode;
1832 /* Compare nary operations P1 and P2 and return true if they are
1833 equivalent. */
1836 vn_nary_op_eq (const void *p1, const void *p2)
1838 const_vn_nary_op_t const vno1 = (const_vn_nary_op_t) p1;
1839 const_vn_nary_op_t const vno2 = (const_vn_nary_op_t) p2;
1840 unsigned i;
1842 if (vno1->hashcode != vno2->hashcode)
1843 return false;
1845 if (vno1->opcode != vno2->opcode
1846 || !types_compatible_p (vno1->type, vno2->type))
1847 return false;
1849 for (i = 0; i < vno1->length; ++i)
1850 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
1851 return false;
1853 return true;
1856 /* Initialize VNO from the pieces provided. */
1858 static void
1859 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
1860 enum tree_code code, tree type, tree op0,
1861 tree op1, tree op2, tree op3)
1863 vno->opcode = code;
1864 vno->length = length;
1865 vno->type = type;
1866 switch (length)
1868 /* The fallthrus here are deliberate. */
1869 case 4: vno->op[3] = op3;
1870 case 3: vno->op[2] = op2;
1871 case 2: vno->op[1] = op1;
1872 case 1: vno->op[0] = op0;
1873 default:
1874 break;
1878 /* Initialize VNO from OP. */
1880 static void
1881 init_vn_nary_op_from_op (vn_nary_op_t vno, tree op)
1883 unsigned i;
1885 vno->opcode = TREE_CODE (op);
1886 vno->length = TREE_CODE_LENGTH (TREE_CODE (op));
1887 vno->type = TREE_TYPE (op);
1888 for (i = 0; i < vno->length; ++i)
1889 vno->op[i] = TREE_OPERAND (op, i);
1892 /* Initialize VNO from STMT. */
1894 static void
1895 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gimple stmt)
1897 unsigned i;
1899 vno->opcode = gimple_assign_rhs_code (stmt);
1900 vno->length = gimple_num_ops (stmt) - 1;
1901 vno->type = gimple_expr_type (stmt);
1902 for (i = 0; i < vno->length; ++i)
1903 vno->op[i] = gimple_op (stmt, i + 1);
1904 if (vno->opcode == REALPART_EXPR
1905 || vno->opcode == IMAGPART_EXPR
1906 || vno->opcode == VIEW_CONVERT_EXPR)
1907 vno->op[0] = TREE_OPERAND (vno->op[0], 0);
1910 /* Compute the hashcode for VNO and look for it in the hash table;
1911 return the resulting value number if it exists in the hash table.
1912 Return NULL_TREE if it does not exist in the hash table or if the
1913 result field of the operation is NULL. VNRESULT will contain the
1914 vn_nary_op_t from the hashtable if it exists. */
1916 static tree
1917 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
1919 void **slot;
1921 if (vnresult)
1922 *vnresult = NULL;
1924 vno->hashcode = vn_nary_op_compute_hash (vno);
1925 slot = htab_find_slot_with_hash (current_info->nary, vno, vno->hashcode,
1926 NO_INSERT);
1927 if (!slot && current_info == optimistic_info)
1928 slot = htab_find_slot_with_hash (valid_info->nary, vno, vno->hashcode,
1929 NO_INSERT);
1930 if (!slot)
1931 return NULL_TREE;
1932 if (vnresult)
1933 *vnresult = (vn_nary_op_t)*slot;
1934 return ((vn_nary_op_t)*slot)->result;
1937 /* Lookup a n-ary operation by its pieces and return the resulting value
1938 number if it exists in the hash table. Return NULL_TREE if it does
1939 not exist in the hash table or if the result field of the operation
1940 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1941 if it exists. */
1943 tree
1944 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
1945 tree type, tree op0, tree op1, tree op2,
1946 tree op3, vn_nary_op_t *vnresult)
1948 struct vn_nary_op_s vno1;
1949 init_vn_nary_op_from_pieces (&vno1, length, code, type, op0, op1, op2, op3);
1950 return vn_nary_op_lookup_1 (&vno1, vnresult);
1953 /* Lookup OP in the current hash table, and return the resulting value
1954 number if it exists in the hash table. Return NULL_TREE if it does
1955 not exist in the hash table or if the result field of the operation
1956 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
1957 if it exists. */
1959 tree
1960 vn_nary_op_lookup (tree op, vn_nary_op_t *vnresult)
1962 struct vn_nary_op_s vno1;
1963 init_vn_nary_op_from_op (&vno1, op);
1964 return vn_nary_op_lookup_1 (&vno1, vnresult);
1967 /* Lookup the rhs of STMT in the current hash table, and return the resulting
1968 value number if it exists in the hash table. Return NULL_TREE if
1969 it does not exist in the hash table. VNRESULT will contain the
1970 vn_nary_op_t from the hashtable if it exists. */
1972 tree
1973 vn_nary_op_lookup_stmt (gimple stmt, vn_nary_op_t *vnresult)
1975 struct vn_nary_op_s vno1;
1976 init_vn_nary_op_from_stmt (&vno1, stmt);
1977 return vn_nary_op_lookup_1 (&vno1, vnresult);
1980 /* Return the size of a vn_nary_op_t with LENGTH operands. */
1982 static size_t
1983 sizeof_vn_nary_op (unsigned int length)
1985 return sizeof (struct vn_nary_op_s) - sizeof (tree) * (4 - length);
1988 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
1990 static vn_nary_op_t
1991 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
1993 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
1996 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
1997 obstack. */
1999 static vn_nary_op_t
2000 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
2002 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length,
2003 &current_info->nary_obstack);
2005 vno1->value_id = value_id;
2006 vno1->length = length;
2007 vno1->result = result;
2009 return vno1;
2012 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
2013 VNO->HASHCODE first. */
2015 static vn_nary_op_t
2016 vn_nary_op_insert_into (vn_nary_op_t vno, htab_t table, bool compute_hash)
2018 void **slot;
2020 if (compute_hash)
2021 vno->hashcode = vn_nary_op_compute_hash (vno);
2023 slot = htab_find_slot_with_hash (table, vno, vno->hashcode, INSERT);
2024 gcc_assert (!*slot);
2026 *slot = vno;
2027 return vno;
2030 /* Insert a n-ary operation into the current hash table using it's
2031 pieces. Return the vn_nary_op_t structure we created and put in
2032 the hashtable. */
2034 vn_nary_op_t
2035 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
2036 tree type, tree op0,
2037 tree op1, tree op2, tree op3,
2038 tree result,
2039 unsigned int value_id)
2041 vn_nary_op_t vno1;
2043 vno1 = alloc_vn_nary_op (length, result, value_id);
2044 init_vn_nary_op_from_pieces (vno1, length, code, type, op0, op1, op2, op3);
2045 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2048 /* Insert OP into the current hash table with a value number of
2049 RESULT. Return the vn_nary_op_t structure we created and put in
2050 the hashtable. */
2052 vn_nary_op_t
2053 vn_nary_op_insert (tree op, tree result)
2055 unsigned length = TREE_CODE_LENGTH (TREE_CODE (op));
2056 vn_nary_op_t vno1;
2058 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2059 init_vn_nary_op_from_op (vno1, op);
2060 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2063 /* Insert the rhs of STMT into the current hash table with a value number of
2064 RESULT. */
2066 vn_nary_op_t
2067 vn_nary_op_insert_stmt (gimple stmt, tree result)
2069 unsigned length = gimple_num_ops (stmt) - 1;
2070 vn_nary_op_t vno1;
2072 vno1 = alloc_vn_nary_op (length, result, VN_INFO (result)->value_id);
2073 init_vn_nary_op_from_stmt (vno1, stmt);
2074 return vn_nary_op_insert_into (vno1, current_info->nary, true);
2077 /* Compute a hashcode for PHI operation VP1 and return it. */
2079 static inline hashval_t
2080 vn_phi_compute_hash (vn_phi_t vp1)
2082 hashval_t result;
2083 int i;
2084 tree phi1op;
2085 tree type;
2087 result = vp1->block->index;
2089 /* If all PHI arguments are constants we need to distinguish
2090 the PHI node via its type. */
2091 type = TREE_TYPE (VEC_index (tree, vp1->phiargs, 0));
2092 result += (INTEGRAL_TYPE_P (type)
2093 + (INTEGRAL_TYPE_P (type)
2094 ? TYPE_PRECISION (type) + TYPE_UNSIGNED (type) : 0));
2096 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2098 if (phi1op == VN_TOP)
2099 continue;
2100 result = iterative_hash_expr (phi1op, result);
2103 return result;
2106 /* Return the computed hashcode for phi operation P1. */
2108 static hashval_t
2109 vn_phi_hash (const void *p1)
2111 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2112 return vp1->hashcode;
2115 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
2117 static int
2118 vn_phi_eq (const void *p1, const void *p2)
2120 const_vn_phi_t const vp1 = (const_vn_phi_t) p1;
2121 const_vn_phi_t const vp2 = (const_vn_phi_t) p2;
2123 if (vp1->hashcode != vp2->hashcode)
2124 return false;
2126 if (vp1->block == vp2->block)
2128 int i;
2129 tree phi1op;
2131 /* If the PHI nodes do not have compatible types
2132 they are not the same. */
2133 if (!types_compatible_p (TREE_TYPE (VEC_index (tree, vp1->phiargs, 0)),
2134 TREE_TYPE (VEC_index (tree, vp2->phiargs, 0))))
2135 return false;
2137 /* Any phi in the same block will have it's arguments in the
2138 same edge order, because of how we store phi nodes. */
2139 FOR_EACH_VEC_ELT (tree, vp1->phiargs, i, phi1op)
2141 tree phi2op = VEC_index (tree, vp2->phiargs, i);
2142 if (phi1op == VN_TOP || phi2op == VN_TOP)
2143 continue;
2144 if (!expressions_equal_p (phi1op, phi2op))
2145 return false;
2147 return true;
2149 return false;
2152 static VEC(tree, heap) *shared_lookup_phiargs;
2154 /* Lookup PHI in the current hash table, and return the resulting
2155 value number if it exists in the hash table. Return NULL_TREE if
2156 it does not exist in the hash table. */
2158 static tree
2159 vn_phi_lookup (gimple phi)
2161 void **slot;
2162 struct vn_phi_s vp1;
2163 unsigned i;
2165 VEC_truncate (tree, shared_lookup_phiargs, 0);
2167 /* Canonicalize the SSA_NAME's to their value number. */
2168 for (i = 0; i < gimple_phi_num_args (phi); i++)
2170 tree def = PHI_ARG_DEF (phi, i);
2171 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2172 VEC_safe_push (tree, heap, shared_lookup_phiargs, def);
2174 vp1.phiargs = shared_lookup_phiargs;
2175 vp1.block = gimple_bb (phi);
2176 vp1.hashcode = vn_phi_compute_hash (&vp1);
2177 slot = htab_find_slot_with_hash (current_info->phis, &vp1, vp1.hashcode,
2178 NO_INSERT);
2179 if (!slot && current_info == optimistic_info)
2180 slot = htab_find_slot_with_hash (valid_info->phis, &vp1, vp1.hashcode,
2181 NO_INSERT);
2182 if (!slot)
2183 return NULL_TREE;
2184 return ((vn_phi_t)*slot)->result;
2187 /* Insert PHI into the current hash table with a value number of
2188 RESULT. */
2190 static vn_phi_t
2191 vn_phi_insert (gimple phi, tree result)
2193 void **slot;
2194 vn_phi_t vp1 = (vn_phi_t) pool_alloc (current_info->phis_pool);
2195 unsigned i;
2196 VEC (tree, heap) *args = NULL;
2198 /* Canonicalize the SSA_NAME's to their value number. */
2199 for (i = 0; i < gimple_phi_num_args (phi); i++)
2201 tree def = PHI_ARG_DEF (phi, i);
2202 def = TREE_CODE (def) == SSA_NAME ? SSA_VAL (def) : def;
2203 VEC_safe_push (tree, heap, args, def);
2205 vp1->value_id = VN_INFO (result)->value_id;
2206 vp1->phiargs = args;
2207 vp1->block = gimple_bb (phi);
2208 vp1->result = result;
2209 vp1->hashcode = vn_phi_compute_hash (vp1);
2211 slot = htab_find_slot_with_hash (current_info->phis, vp1, vp1->hashcode,
2212 INSERT);
2214 /* Because we iterate over phi operations more than once, it's
2215 possible the slot might already exist here, hence no assert.*/
2216 *slot = vp1;
2217 return vp1;
2221 /* Print set of components in strongly connected component SCC to OUT. */
2223 static void
2224 print_scc (FILE *out, VEC (tree, heap) *scc)
2226 tree var;
2227 unsigned int i;
2229 fprintf (out, "SCC consists of: ");
2230 FOR_EACH_VEC_ELT (tree, scc, i, var)
2232 print_generic_expr (out, var, 0);
2233 fprintf (out, " ");
2235 fprintf (out, "\n");
2238 /* Set the value number of FROM to TO, return true if it has changed
2239 as a result. */
2241 static inline bool
2242 set_ssa_val_to (tree from, tree to)
2244 tree currval;
2246 if (from != to
2247 && TREE_CODE (to) == SSA_NAME
2248 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
2249 to = from;
2251 /* The only thing we allow as value numbers are VN_TOP, ssa_names
2252 and invariants. So assert that here. */
2253 gcc_assert (to != NULL_TREE
2254 && (to == VN_TOP
2255 || TREE_CODE (to) == SSA_NAME
2256 || is_gimple_min_invariant (to)));
2258 if (dump_file && (dump_flags & TDF_DETAILS))
2260 fprintf (dump_file, "Setting value number of ");
2261 print_generic_expr (dump_file, from, 0);
2262 fprintf (dump_file, " to ");
2263 print_generic_expr (dump_file, to, 0);
2266 currval = SSA_VAL (from);
2268 if (currval != to && !operand_equal_p (currval, to, OEP_PURE_SAME))
2270 VN_INFO (from)->valnum = to;
2271 if (dump_file && (dump_flags & TDF_DETAILS))
2272 fprintf (dump_file, " (changed)\n");
2273 return true;
2275 if (dump_file && (dump_flags & TDF_DETAILS))
2276 fprintf (dump_file, "\n");
2277 return false;
2280 /* Set all definitions in STMT to value number to themselves.
2281 Return true if a value number changed. */
2283 static bool
2284 defs_to_varying (gimple stmt)
2286 bool changed = false;
2287 ssa_op_iter iter;
2288 def_operand_p defp;
2290 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
2292 tree def = DEF_FROM_PTR (defp);
2294 VN_INFO (def)->use_processed = true;
2295 changed |= set_ssa_val_to (def, def);
2297 return changed;
2300 static bool expr_has_constants (tree expr);
2301 static tree valueize_expr (tree expr);
2303 /* Visit a copy between LHS and RHS, return true if the value number
2304 changed. */
2306 static bool
2307 visit_copy (tree lhs, tree rhs)
2309 /* Follow chains of copies to their destination. */
2310 while (TREE_CODE (rhs) == SSA_NAME
2311 && SSA_VAL (rhs) != rhs)
2312 rhs = SSA_VAL (rhs);
2314 /* The copy may have a more interesting constant filled expression
2315 (we don't, since we know our RHS is just an SSA name). */
2316 if (TREE_CODE (rhs) == SSA_NAME)
2318 VN_INFO (lhs)->has_constants = VN_INFO (rhs)->has_constants;
2319 VN_INFO (lhs)->expr = VN_INFO (rhs)->expr;
2322 return set_ssa_val_to (lhs, rhs);
2325 /* Visit a nary operator RHS, value number it, and return true if the
2326 value number of LHS has changed as a result. */
2328 static bool
2329 visit_nary_op (tree lhs, gimple stmt)
2331 bool changed = false;
2332 tree result = vn_nary_op_lookup_stmt (stmt, NULL);
2334 if (result)
2335 changed = set_ssa_val_to (lhs, result);
2336 else
2338 changed = set_ssa_val_to (lhs, lhs);
2339 vn_nary_op_insert_stmt (stmt, lhs);
2342 return changed;
2345 /* Visit a call STMT storing into LHS. Return true if the value number
2346 of the LHS has changed as a result. */
2348 static bool
2349 visit_reference_op_call (tree lhs, gimple stmt)
2351 bool changed = false;
2352 struct vn_reference_s vr1;
2353 tree result;
2354 tree vuse = gimple_vuse (stmt);
2356 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2357 vr1.operands = valueize_shared_reference_ops_from_call (stmt);
2358 vr1.type = gimple_expr_type (stmt);
2359 vr1.set = 0;
2360 vr1.hashcode = vn_reference_compute_hash (&vr1);
2361 result = vn_reference_lookup_1 (&vr1, NULL);
2362 if (result)
2364 changed = set_ssa_val_to (lhs, result);
2365 if (TREE_CODE (result) == SSA_NAME
2366 && VN_INFO (result)->has_constants)
2367 VN_INFO (lhs)->has_constants = true;
2369 else
2371 void **slot;
2372 vn_reference_t vr2;
2373 changed = set_ssa_val_to (lhs, lhs);
2374 vr2 = (vn_reference_t) pool_alloc (current_info->references_pool);
2375 vr2->vuse = vr1.vuse;
2376 vr2->operands = valueize_refs (create_reference_ops_from_call (stmt));
2377 vr2->type = vr1.type;
2378 vr2->set = vr1.set;
2379 vr2->hashcode = vr1.hashcode;
2380 vr2->result = lhs;
2381 slot = htab_find_slot_with_hash (current_info->references,
2382 vr2, vr2->hashcode, INSERT);
2383 if (*slot)
2384 free_reference (*slot);
2385 *slot = vr2;
2388 return changed;
2391 /* Visit a load from a reference operator RHS, part of STMT, value number it,
2392 and return true if the value number of the LHS has changed as a result. */
2394 static bool
2395 visit_reference_op_load (tree lhs, tree op, gimple stmt)
2397 bool changed = false;
2398 tree last_vuse;
2399 tree result;
2401 last_vuse = gimple_vuse (stmt);
2402 last_vuse_ptr = &last_vuse;
2403 result = vn_reference_lookup (op, gimple_vuse (stmt),
2404 default_vn_walk_kind, NULL);
2405 last_vuse_ptr = NULL;
2407 /* If we have a VCE, try looking up its operand as it might be stored in
2408 a different type. */
2409 if (!result && TREE_CODE (op) == VIEW_CONVERT_EXPR)
2410 result = vn_reference_lookup (TREE_OPERAND (op, 0), gimple_vuse (stmt),
2411 default_vn_walk_kind, NULL);
2413 /* We handle type-punning through unions by value-numbering based
2414 on offset and size of the access. Be prepared to handle a
2415 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
2416 if (result
2417 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
2419 /* We will be setting the value number of lhs to the value number
2420 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
2421 So first simplify and lookup this expression to see if it
2422 is already available. */
2423 tree val = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
2424 if ((CONVERT_EXPR_P (val)
2425 || TREE_CODE (val) == VIEW_CONVERT_EXPR)
2426 && TREE_CODE (TREE_OPERAND (val, 0)) == SSA_NAME)
2428 tree tem = valueize_expr (vn_get_expr_for (TREE_OPERAND (val, 0)));
2429 if ((CONVERT_EXPR_P (tem)
2430 || TREE_CODE (tem) == VIEW_CONVERT_EXPR)
2431 && (tem = fold_unary_ignore_overflow (TREE_CODE (val),
2432 TREE_TYPE (val), tem)))
2433 val = tem;
2435 result = val;
2436 if (!is_gimple_min_invariant (val)
2437 && TREE_CODE (val) != SSA_NAME)
2438 result = vn_nary_op_lookup (val, NULL);
2439 /* If the expression is not yet available, value-number lhs to
2440 a new SSA_NAME we create. */
2441 if (!result)
2443 result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
2444 /* Initialize value-number information properly. */
2445 VN_INFO_GET (result)->valnum = result;
2446 VN_INFO (result)->value_id = get_next_value_id ();
2447 VN_INFO (result)->expr = val;
2448 VN_INFO (result)->has_constants = expr_has_constants (val);
2449 VN_INFO (result)->needs_insertion = true;
2450 /* As all "inserted" statements are singleton SCCs, insert
2451 to the valid table. This is strictly needed to
2452 avoid re-generating new value SSA_NAMEs for the same
2453 expression during SCC iteration over and over (the
2454 optimistic table gets cleared after each iteration).
2455 We do not need to insert into the optimistic table, as
2456 lookups there will fall back to the valid table. */
2457 if (current_info == optimistic_info)
2459 current_info = valid_info;
2460 vn_nary_op_insert (val, result);
2461 current_info = optimistic_info;
2463 else
2464 vn_nary_op_insert (val, result);
2465 if (dump_file && (dump_flags & TDF_DETAILS))
2467 fprintf (dump_file, "Inserting name ");
2468 print_generic_expr (dump_file, result, 0);
2469 fprintf (dump_file, " for expression ");
2470 print_generic_expr (dump_file, val, 0);
2471 fprintf (dump_file, "\n");
2476 if (result)
2478 changed = set_ssa_val_to (lhs, result);
2479 if (TREE_CODE (result) == SSA_NAME
2480 && VN_INFO (result)->has_constants)
2482 VN_INFO (lhs)->expr = VN_INFO (result)->expr;
2483 VN_INFO (lhs)->has_constants = true;
2486 else
2488 changed = set_ssa_val_to (lhs, lhs);
2489 vn_reference_insert (op, lhs, last_vuse);
2492 return changed;
2496 /* Visit a store to a reference operator LHS, part of STMT, value number it,
2497 and return true if the value number of the LHS has changed as a result. */
2499 static bool
2500 visit_reference_op_store (tree lhs, tree op, gimple stmt)
2502 bool changed = false;
2503 tree result;
2504 bool resultsame = false;
2506 /* First we want to lookup using the *vuses* from the store and see
2507 if there the last store to this location with the same address
2508 had the same value.
2510 The vuses represent the memory state before the store. If the
2511 memory state, address, and value of the store is the same as the
2512 last store to this location, then this store will produce the
2513 same memory state as that store.
2515 In this case the vdef versions for this store are value numbered to those
2516 vuse versions, since they represent the same memory state after
2517 this store.
2519 Otherwise, the vdefs for the store are used when inserting into
2520 the table, since the store generates a new memory state. */
2522 result = vn_reference_lookup (lhs, gimple_vuse (stmt), VN_NOWALK, NULL);
2524 if (result)
2526 if (TREE_CODE (result) == SSA_NAME)
2527 result = SSA_VAL (result);
2528 if (TREE_CODE (op) == SSA_NAME)
2529 op = SSA_VAL (op);
2530 resultsame = expressions_equal_p (result, op);
2533 if (!result || !resultsame)
2535 tree vdef;
2537 if (dump_file && (dump_flags & TDF_DETAILS))
2539 fprintf (dump_file, "No store match\n");
2540 fprintf (dump_file, "Value numbering store ");
2541 print_generic_expr (dump_file, lhs, 0);
2542 fprintf (dump_file, " to ");
2543 print_generic_expr (dump_file, op, 0);
2544 fprintf (dump_file, "\n");
2546 /* Have to set value numbers before insert, since insert is
2547 going to valueize the references in-place. */
2548 if ((vdef = gimple_vdef (stmt)))
2550 VN_INFO (vdef)->use_processed = true;
2551 changed |= set_ssa_val_to (vdef, vdef);
2554 /* Do not insert structure copies into the tables. */
2555 if (is_gimple_min_invariant (op)
2556 || is_gimple_reg (op))
2557 vn_reference_insert (lhs, op, vdef);
2559 else
2561 /* We had a match, so value number the vdef to have the value
2562 number of the vuse it came from. */
2563 tree def, use;
2565 if (dump_file && (dump_flags & TDF_DETAILS))
2566 fprintf (dump_file, "Store matched earlier value,"
2567 "value numbering store vdefs to matching vuses.\n");
2569 def = gimple_vdef (stmt);
2570 use = gimple_vuse (stmt);
2572 VN_INFO (def)->use_processed = true;
2573 changed |= set_ssa_val_to (def, SSA_VAL (use));
2576 return changed;
2579 /* Visit and value number PHI, return true if the value number
2580 changed. */
2582 static bool
2583 visit_phi (gimple phi)
2585 bool changed = false;
2586 tree result;
2587 tree sameval = VN_TOP;
2588 bool allsame = true;
2589 unsigned i;
2591 /* TODO: We could check for this in init_sccvn, and replace this
2592 with a gcc_assert. */
2593 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
2594 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2596 /* See if all non-TOP arguments have the same value. TOP is
2597 equivalent to everything, so we can ignore it. */
2598 for (i = 0; i < gimple_phi_num_args (phi); i++)
2600 tree def = PHI_ARG_DEF (phi, i);
2602 if (TREE_CODE (def) == SSA_NAME)
2603 def = SSA_VAL (def);
2604 if (def == VN_TOP)
2605 continue;
2606 if (sameval == VN_TOP)
2608 sameval = def;
2610 else
2612 if (!expressions_equal_p (def, sameval))
2614 allsame = false;
2615 break;
2620 /* If all value numbered to the same value, the phi node has that
2621 value. */
2622 if (allsame)
2624 if (is_gimple_min_invariant (sameval))
2626 VN_INFO (PHI_RESULT (phi))->has_constants = true;
2627 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2629 else
2631 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2632 VN_INFO (PHI_RESULT (phi))->expr = sameval;
2635 if (TREE_CODE (sameval) == SSA_NAME)
2636 return visit_copy (PHI_RESULT (phi), sameval);
2638 return set_ssa_val_to (PHI_RESULT (phi), sameval);
2641 /* Otherwise, see if it is equivalent to a phi node in this block. */
2642 result = vn_phi_lookup (phi);
2643 if (result)
2645 if (TREE_CODE (result) == SSA_NAME)
2646 changed = visit_copy (PHI_RESULT (phi), result);
2647 else
2648 changed = set_ssa_val_to (PHI_RESULT (phi), result);
2650 else
2652 vn_phi_insert (phi, PHI_RESULT (phi));
2653 VN_INFO (PHI_RESULT (phi))->has_constants = false;
2654 VN_INFO (PHI_RESULT (phi))->expr = PHI_RESULT (phi);
2655 changed = set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
2658 return changed;
2661 /* Return true if EXPR contains constants. */
2663 static bool
2664 expr_has_constants (tree expr)
2666 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2668 case tcc_unary:
2669 return is_gimple_min_invariant (TREE_OPERAND (expr, 0));
2671 case tcc_binary:
2672 return is_gimple_min_invariant (TREE_OPERAND (expr, 0))
2673 || is_gimple_min_invariant (TREE_OPERAND (expr, 1));
2674 /* Constants inside reference ops are rarely interesting, but
2675 it can take a lot of looking to find them. */
2676 case tcc_reference:
2677 case tcc_declaration:
2678 return false;
2679 default:
2680 return is_gimple_min_invariant (expr);
2682 return false;
2685 /* Return true if STMT contains constants. */
2687 static bool
2688 stmt_has_constants (gimple stmt)
2690 if (gimple_code (stmt) != GIMPLE_ASSIGN)
2691 return false;
2693 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
2695 case GIMPLE_UNARY_RHS:
2696 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2698 case GIMPLE_BINARY_RHS:
2699 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2700 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt)));
2701 case GIMPLE_TERNARY_RHS:
2702 return (is_gimple_min_invariant (gimple_assign_rhs1 (stmt))
2703 || is_gimple_min_invariant (gimple_assign_rhs2 (stmt))
2704 || is_gimple_min_invariant (gimple_assign_rhs3 (stmt)));
2705 case GIMPLE_SINGLE_RHS:
2706 /* Constants inside reference ops are rarely interesting, but
2707 it can take a lot of looking to find them. */
2708 return is_gimple_min_invariant (gimple_assign_rhs1 (stmt));
2709 default:
2710 gcc_unreachable ();
2712 return false;
2715 /* Replace SSA_NAMES in expr with their value numbers, and return the
2716 result.
2717 This is performed in place. */
2719 static tree
2720 valueize_expr (tree expr)
2722 switch (TREE_CODE_CLASS (TREE_CODE (expr)))
2724 case tcc_unary:
2725 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2726 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2727 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2728 break;
2729 case tcc_binary:
2730 if (TREE_CODE (TREE_OPERAND (expr, 0)) == SSA_NAME
2731 && SSA_VAL (TREE_OPERAND (expr, 0)) != VN_TOP)
2732 TREE_OPERAND (expr, 0) = SSA_VAL (TREE_OPERAND (expr, 0));
2733 if (TREE_CODE (TREE_OPERAND (expr, 1)) == SSA_NAME
2734 && SSA_VAL (TREE_OPERAND (expr, 1)) != VN_TOP)
2735 TREE_OPERAND (expr, 1) = SSA_VAL (TREE_OPERAND (expr, 1));
2736 break;
2737 default:
2738 break;
2740 return expr;
2743 /* Simplify the binary expression RHS, and return the result if
2744 simplified. */
2746 static tree
2747 simplify_binary_expression (gimple stmt)
2749 tree result = NULL_TREE;
2750 tree op0 = gimple_assign_rhs1 (stmt);
2751 tree op1 = gimple_assign_rhs2 (stmt);
2753 /* This will not catch every single case we could combine, but will
2754 catch those with constants. The goal here is to simultaneously
2755 combine constants between expressions, but avoid infinite
2756 expansion of expressions during simplification. */
2757 if (TREE_CODE (op0) == SSA_NAME)
2759 if (VN_INFO (op0)->has_constants
2760 || TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)) == tcc_comparison)
2761 op0 = valueize_expr (vn_get_expr_for (op0));
2762 else if (SSA_VAL (op0) != VN_TOP && SSA_VAL (op0) != op0)
2763 op0 = SSA_VAL (op0);
2766 if (TREE_CODE (op1) == SSA_NAME)
2768 if (VN_INFO (op1)->has_constants)
2769 op1 = valueize_expr (vn_get_expr_for (op1));
2770 else if (SSA_VAL (op1) != VN_TOP && SSA_VAL (op1) != op1)
2771 op1 = SSA_VAL (op1);
2774 /* Pointer plus constant can be represented as invariant address.
2775 Do so to allow further propatation, see also tree forwprop. */
2776 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
2777 && host_integerp (op1, 1)
2778 && TREE_CODE (op0) == ADDR_EXPR
2779 && is_gimple_min_invariant (op0))
2780 return build_invariant_address (TREE_TYPE (op0),
2781 TREE_OPERAND (op0, 0),
2782 TREE_INT_CST_LOW (op1));
2784 /* Avoid folding if nothing changed. */
2785 if (op0 == gimple_assign_rhs1 (stmt)
2786 && op1 == gimple_assign_rhs2 (stmt))
2787 return NULL_TREE;
2789 fold_defer_overflow_warnings ();
2791 result = fold_binary (gimple_assign_rhs_code (stmt),
2792 gimple_expr_type (stmt), op0, op1);
2793 if (result)
2794 STRIP_USELESS_TYPE_CONVERSION (result);
2796 fold_undefer_overflow_warnings (result && valid_gimple_rhs_p (result),
2797 stmt, 0);
2799 /* Make sure result is not a complex expression consisting
2800 of operators of operators (IE (a + b) + (a + c))
2801 Otherwise, we will end up with unbounded expressions if
2802 fold does anything at all. */
2803 if (result && valid_gimple_rhs_p (result))
2804 return result;
2806 return NULL_TREE;
2809 /* Simplify the unary expression RHS, and return the result if
2810 simplified. */
2812 static tree
2813 simplify_unary_expression (gimple stmt)
2815 tree result = NULL_TREE;
2816 tree orig_op0, op0 = gimple_assign_rhs1 (stmt);
2818 /* We handle some tcc_reference codes here that are all
2819 GIMPLE_ASSIGN_SINGLE codes. */
2820 if (gimple_assign_rhs_code (stmt) == REALPART_EXPR
2821 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2822 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2823 op0 = TREE_OPERAND (op0, 0);
2825 if (TREE_CODE (op0) != SSA_NAME)
2826 return NULL_TREE;
2828 orig_op0 = op0;
2829 if (VN_INFO (op0)->has_constants)
2830 op0 = valueize_expr (vn_get_expr_for (op0));
2831 else if (gimple_assign_cast_p (stmt)
2832 || gimple_assign_rhs_code (stmt) == REALPART_EXPR
2833 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
2834 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
2836 /* We want to do tree-combining on conversion-like expressions.
2837 Make sure we feed only SSA_NAMEs or constants to fold though. */
2838 tree tem = valueize_expr (vn_get_expr_for (op0));
2839 if (UNARY_CLASS_P (tem)
2840 || BINARY_CLASS_P (tem)
2841 || TREE_CODE (tem) == VIEW_CONVERT_EXPR
2842 || TREE_CODE (tem) == SSA_NAME
2843 || is_gimple_min_invariant (tem))
2844 op0 = tem;
2847 /* Avoid folding if nothing changed, but remember the expression. */
2848 if (op0 == orig_op0)
2849 return NULL_TREE;
2851 result = fold_unary_ignore_overflow (gimple_assign_rhs_code (stmt),
2852 gimple_expr_type (stmt), op0);
2853 if (result)
2855 STRIP_USELESS_TYPE_CONVERSION (result);
2856 if (valid_gimple_rhs_p (result))
2857 return result;
2860 return NULL_TREE;
2863 /* Valueize NAME if it is an SSA name, otherwise just return it. */
2865 static inline tree
2866 vn_valueize (tree name)
2868 if (TREE_CODE (name) == SSA_NAME)
2870 tree tem = SSA_VAL (name);
2871 return tem == VN_TOP ? name : tem;
2873 return name;
2876 /* Try to simplify RHS using equivalences and constant folding. */
2878 static tree
2879 try_to_simplify (gimple stmt)
2881 tree tem;
2883 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
2884 in this case, there is no point in doing extra work. */
2885 if (gimple_assign_copy_p (stmt)
2886 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME)
2887 return NULL_TREE;
2889 /* First try constant folding based on our current lattice. */
2890 tem = gimple_fold_stmt_to_constant (stmt, vn_valueize);
2891 if (tem)
2892 return tem;
2894 /* If that didn't work try combining multiple statements. */
2895 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
2897 case tcc_reference:
2898 /* Fallthrough for some codes that can operate on registers. */
2899 if (!(TREE_CODE (gimple_assign_rhs1 (stmt)) == REALPART_EXPR
2900 || TREE_CODE (gimple_assign_rhs1 (stmt)) == IMAGPART_EXPR
2901 || TREE_CODE (gimple_assign_rhs1 (stmt)) == VIEW_CONVERT_EXPR))
2902 break;
2903 /* We could do a little more with unary ops, if they expand
2904 into binary ops, but it's debatable whether it is worth it. */
2905 case tcc_unary:
2906 return simplify_unary_expression (stmt);
2908 case tcc_comparison:
2909 case tcc_binary:
2910 return simplify_binary_expression (stmt);
2912 default:
2913 break;
2916 return NULL_TREE;
2919 /* Visit and value number USE, return true if the value number
2920 changed. */
2922 static bool
2923 visit_use (tree use)
2925 bool changed = false;
2926 gimple stmt = SSA_NAME_DEF_STMT (use);
2928 VN_INFO (use)->use_processed = true;
2930 gcc_assert (!SSA_NAME_IN_FREE_LIST (use));
2931 if (dump_file && (dump_flags & TDF_DETAILS)
2932 && !SSA_NAME_IS_DEFAULT_DEF (use))
2934 fprintf (dump_file, "Value numbering ");
2935 print_generic_expr (dump_file, use, 0);
2936 fprintf (dump_file, " stmt = ");
2937 print_gimple_stmt (dump_file, stmt, 0, 0);
2940 /* Handle uninitialized uses. */
2941 if (SSA_NAME_IS_DEFAULT_DEF (use))
2942 changed = set_ssa_val_to (use, use);
2943 else
2945 if (gimple_code (stmt) == GIMPLE_PHI)
2946 changed = visit_phi (stmt);
2947 else if (!gimple_has_lhs (stmt)
2948 || gimple_has_volatile_ops (stmt)
2949 || stmt_could_throw_p (stmt))
2950 changed = defs_to_varying (stmt);
2951 else if (is_gimple_assign (stmt))
2953 tree lhs = gimple_assign_lhs (stmt);
2954 tree simplified;
2956 /* Shortcut for copies. Simplifying copies is pointless,
2957 since we copy the expression and value they represent. */
2958 if (gimple_assign_copy_p (stmt)
2959 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
2960 && TREE_CODE (lhs) == SSA_NAME)
2962 changed = visit_copy (lhs, gimple_assign_rhs1 (stmt));
2963 goto done;
2965 simplified = try_to_simplify (stmt);
2966 if (simplified)
2968 if (dump_file && (dump_flags & TDF_DETAILS))
2970 fprintf (dump_file, "RHS ");
2971 print_gimple_expr (dump_file, stmt, 0, 0);
2972 fprintf (dump_file, " simplified to ");
2973 print_generic_expr (dump_file, simplified, 0);
2974 if (TREE_CODE (lhs) == SSA_NAME)
2975 fprintf (dump_file, " has constants %d\n",
2976 expr_has_constants (simplified));
2977 else
2978 fprintf (dump_file, "\n");
2981 /* Setting value numbers to constants will occasionally
2982 screw up phi congruence because constants are not
2983 uniquely associated with a single ssa name that can be
2984 looked up. */
2985 if (simplified
2986 && is_gimple_min_invariant (simplified)
2987 && TREE_CODE (lhs) == SSA_NAME)
2989 VN_INFO (lhs)->expr = simplified;
2990 VN_INFO (lhs)->has_constants = true;
2991 changed = set_ssa_val_to (lhs, simplified);
2992 goto done;
2994 else if (simplified
2995 && TREE_CODE (simplified) == SSA_NAME
2996 && TREE_CODE (lhs) == SSA_NAME)
2998 changed = visit_copy (lhs, simplified);
2999 goto done;
3001 else if (simplified)
3003 if (TREE_CODE (lhs) == SSA_NAME)
3005 VN_INFO (lhs)->has_constants = expr_has_constants (simplified);
3006 /* We have to unshare the expression or else
3007 valuizing may change the IL stream. */
3008 VN_INFO (lhs)->expr = unshare_expr (simplified);
3011 else if (stmt_has_constants (stmt)
3012 && TREE_CODE (lhs) == SSA_NAME)
3013 VN_INFO (lhs)->has_constants = true;
3014 else if (TREE_CODE (lhs) == SSA_NAME)
3016 /* We reset expr and constantness here because we may
3017 have been value numbering optimistically, and
3018 iterating. They may become non-constant in this case,
3019 even if they were optimistically constant. */
3021 VN_INFO (lhs)->has_constants = false;
3022 VN_INFO (lhs)->expr = NULL_TREE;
3025 if ((TREE_CODE (lhs) == SSA_NAME
3026 /* We can substitute SSA_NAMEs that are live over
3027 abnormal edges with their constant value. */
3028 && !(gimple_assign_copy_p (stmt)
3029 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3030 && !(simplified
3031 && is_gimple_min_invariant (simplified))
3032 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3033 /* Stores or copies from SSA_NAMEs that are live over
3034 abnormal edges are a problem. */
3035 || (gimple_assign_single_p (stmt)
3036 && TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
3037 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_assign_rhs1 (stmt))))
3038 changed = defs_to_varying (stmt);
3039 else if (REFERENCE_CLASS_P (lhs) || DECL_P (lhs))
3041 changed = visit_reference_op_store (lhs, gimple_assign_rhs1 (stmt), stmt);
3043 else if (TREE_CODE (lhs) == SSA_NAME)
3045 if ((gimple_assign_copy_p (stmt)
3046 && is_gimple_min_invariant (gimple_assign_rhs1 (stmt)))
3047 || (simplified
3048 && is_gimple_min_invariant (simplified)))
3050 VN_INFO (lhs)->has_constants = true;
3051 if (simplified)
3052 changed = set_ssa_val_to (lhs, simplified);
3053 else
3054 changed = set_ssa_val_to (lhs, gimple_assign_rhs1 (stmt));
3056 else
3058 switch (get_gimple_rhs_class (gimple_assign_rhs_code (stmt)))
3060 case GIMPLE_UNARY_RHS:
3061 case GIMPLE_BINARY_RHS:
3062 case GIMPLE_TERNARY_RHS:
3063 changed = visit_nary_op (lhs, stmt);
3064 break;
3065 case GIMPLE_SINGLE_RHS:
3066 switch (TREE_CODE_CLASS (gimple_assign_rhs_code (stmt)))
3068 case tcc_reference:
3069 /* VOP-less references can go through unary case. */
3070 if ((gimple_assign_rhs_code (stmt) == REALPART_EXPR
3071 || gimple_assign_rhs_code (stmt) == IMAGPART_EXPR
3072 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)
3073 && TREE_CODE (TREE_OPERAND (gimple_assign_rhs1 (stmt), 0)) == SSA_NAME)
3075 changed = visit_nary_op (lhs, stmt);
3076 break;
3078 /* Fallthrough. */
3079 case tcc_declaration:
3080 changed = visit_reference_op_load
3081 (lhs, gimple_assign_rhs1 (stmt), stmt);
3082 break;
3083 case tcc_expression:
3084 if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
3086 changed = visit_nary_op (lhs, stmt);
3087 break;
3089 /* Fallthrough. */
3090 default:
3091 changed = defs_to_varying (stmt);
3093 break;
3094 default:
3095 changed = defs_to_varying (stmt);
3096 break;
3100 else
3101 changed = defs_to_varying (stmt);
3103 else if (is_gimple_call (stmt))
3105 tree lhs = gimple_call_lhs (stmt);
3107 /* ??? We could try to simplify calls. */
3109 if (stmt_has_constants (stmt)
3110 && TREE_CODE (lhs) == SSA_NAME)
3111 VN_INFO (lhs)->has_constants = true;
3112 else if (TREE_CODE (lhs) == SSA_NAME)
3114 /* We reset expr and constantness here because we may
3115 have been value numbering optimistically, and
3116 iterating. They may become non-constant in this case,
3117 even if they were optimistically constant. */
3118 VN_INFO (lhs)->has_constants = false;
3119 VN_INFO (lhs)->expr = NULL_TREE;
3122 if (TREE_CODE (lhs) == SSA_NAME
3123 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
3124 changed = defs_to_varying (stmt);
3125 /* ??? We should handle stores from calls. */
3126 else if (TREE_CODE (lhs) == SSA_NAME)
3128 if (!gimple_call_internal_p (stmt)
3129 && gimple_call_flags (stmt) & (ECF_PURE | ECF_CONST))
3130 changed = visit_reference_op_call (lhs, stmt);
3131 else
3132 changed = defs_to_varying (stmt);
3134 else
3135 changed = defs_to_varying (stmt);
3138 done:
3139 return changed;
3142 /* Compare two operands by reverse postorder index */
3144 static int
3145 compare_ops (const void *pa, const void *pb)
3147 const tree opa = *((const tree *)pa);
3148 const tree opb = *((const tree *)pb);
3149 gimple opstmta = SSA_NAME_DEF_STMT (opa);
3150 gimple opstmtb = SSA_NAME_DEF_STMT (opb);
3151 basic_block bba;
3152 basic_block bbb;
3154 if (gimple_nop_p (opstmta) && gimple_nop_p (opstmtb))
3155 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3156 else if (gimple_nop_p (opstmta))
3157 return -1;
3158 else if (gimple_nop_p (opstmtb))
3159 return 1;
3161 bba = gimple_bb (opstmta);
3162 bbb = gimple_bb (opstmtb);
3164 if (!bba && !bbb)
3165 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3166 else if (!bba)
3167 return -1;
3168 else if (!bbb)
3169 return 1;
3171 if (bba == bbb)
3173 if (gimple_code (opstmta) == GIMPLE_PHI
3174 && gimple_code (opstmtb) == GIMPLE_PHI)
3175 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3176 else if (gimple_code (opstmta) == GIMPLE_PHI)
3177 return -1;
3178 else if (gimple_code (opstmtb) == GIMPLE_PHI)
3179 return 1;
3180 else if (gimple_uid (opstmta) != gimple_uid (opstmtb))
3181 return gimple_uid (opstmta) - gimple_uid (opstmtb);
3182 else
3183 return SSA_NAME_VERSION (opa) - SSA_NAME_VERSION (opb);
3185 return rpo_numbers[bba->index] - rpo_numbers[bbb->index];
3188 /* Sort an array containing members of a strongly connected component
3189 SCC so that the members are ordered by RPO number.
3190 This means that when the sort is complete, iterating through the
3191 array will give you the members in RPO order. */
3193 static void
3194 sort_scc (VEC (tree, heap) *scc)
3196 VEC_qsort (tree, scc, compare_ops);
3199 /* Insert the no longer used nary ONARY to the hash INFO. */
3201 static void
3202 copy_nary (vn_nary_op_t onary, vn_tables_t info)
3204 size_t size = sizeof_vn_nary_op (onary->length);
3205 vn_nary_op_t nary = alloc_vn_nary_op_noinit (onary->length,
3206 &info->nary_obstack);
3207 memcpy (nary, onary, size);
3208 vn_nary_op_insert_into (nary, info->nary, false);
3211 /* Insert the no longer used phi OPHI to the hash INFO. */
3213 static void
3214 copy_phi (vn_phi_t ophi, vn_tables_t info)
3216 vn_phi_t phi = (vn_phi_t) pool_alloc (info->phis_pool);
3217 void **slot;
3218 memcpy (phi, ophi, sizeof (*phi));
3219 ophi->phiargs = NULL;
3220 slot = htab_find_slot_with_hash (info->phis, phi, phi->hashcode, INSERT);
3221 gcc_assert (!*slot);
3222 *slot = phi;
3225 /* Insert the no longer used reference OREF to the hash INFO. */
3227 static void
3228 copy_reference (vn_reference_t oref, vn_tables_t info)
3230 vn_reference_t ref;
3231 void **slot;
3232 ref = (vn_reference_t) pool_alloc (info->references_pool);
3233 memcpy (ref, oref, sizeof (*ref));
3234 oref->operands = NULL;
3235 slot = htab_find_slot_with_hash (info->references, ref, ref->hashcode,
3236 INSERT);
3237 if (*slot)
3238 free_reference (*slot);
3239 *slot = ref;
3242 /* Process a strongly connected component in the SSA graph. */
3244 static void
3245 process_scc (VEC (tree, heap) *scc)
3247 tree var;
3248 unsigned int i;
3249 unsigned int iterations = 0;
3250 bool changed = true;
3251 htab_iterator hi;
3252 vn_nary_op_t nary;
3253 vn_phi_t phi;
3254 vn_reference_t ref;
3256 /* If the SCC has a single member, just visit it. */
3257 if (VEC_length (tree, scc) == 1)
3259 tree use = VEC_index (tree, scc, 0);
3260 if (VN_INFO (use)->use_processed)
3261 return;
3262 /* We need to make sure it doesn't form a cycle itself, which can
3263 happen for self-referential PHI nodes. In that case we would
3264 end up inserting an expression with VN_TOP operands into the
3265 valid table which makes us derive bogus equivalences later.
3266 The cheapest way to check this is to assume it for all PHI nodes. */
3267 if (gimple_code (SSA_NAME_DEF_STMT (use)) == GIMPLE_PHI)
3268 /* Fallthru to iteration. */ ;
3269 else
3271 visit_use (use);
3272 return;
3276 /* Iterate over the SCC with the optimistic table until it stops
3277 changing. */
3278 current_info = optimistic_info;
3279 while (changed)
3281 changed = false;
3282 iterations++;
3283 /* As we are value-numbering optimistically we have to
3284 clear the expression tables and the simplified expressions
3285 in each iteration until we converge. */
3286 htab_empty (optimistic_info->nary);
3287 htab_empty (optimistic_info->phis);
3288 htab_empty (optimistic_info->references);
3289 obstack_free (&optimistic_info->nary_obstack, NULL);
3290 gcc_obstack_init (&optimistic_info->nary_obstack);
3291 empty_alloc_pool (optimistic_info->phis_pool);
3292 empty_alloc_pool (optimistic_info->references_pool);
3293 FOR_EACH_VEC_ELT (tree, scc, i, var)
3294 VN_INFO (var)->expr = NULL_TREE;
3295 FOR_EACH_VEC_ELT (tree, scc, i, var)
3296 changed |= visit_use (var);
3299 statistics_histogram_event (cfun, "SCC iterations", iterations);
3301 /* Finally, copy the contents of the no longer used optimistic
3302 table to the valid table. */
3303 FOR_EACH_HTAB_ELEMENT (optimistic_info->nary, nary, vn_nary_op_t, hi)
3304 copy_nary (nary, valid_info);
3305 FOR_EACH_HTAB_ELEMENT (optimistic_info->phis, phi, vn_phi_t, hi)
3306 copy_phi (phi, valid_info);
3307 FOR_EACH_HTAB_ELEMENT (optimistic_info->references, ref, vn_reference_t, hi)
3308 copy_reference (ref, valid_info);
3310 current_info = valid_info;
3313 DEF_VEC_O(ssa_op_iter);
3314 DEF_VEC_ALLOC_O(ssa_op_iter,heap);
3316 /* Pop the components of the found SCC for NAME off the SCC stack
3317 and process them. Returns true if all went well, false if
3318 we run into resource limits. */
3320 static bool
3321 extract_and_process_scc_for_name (tree name)
3323 VEC (tree, heap) *scc = NULL;
3324 tree x;
3326 /* Found an SCC, pop the components off the SCC stack and
3327 process them. */
3330 x = VEC_pop (tree, sccstack);
3332 VN_INFO (x)->on_sccstack = false;
3333 VEC_safe_push (tree, heap, scc, x);
3334 } while (x != name);
3336 /* Bail out of SCCVN in case a SCC turns out to be incredibly large. */
3337 if (VEC_length (tree, scc)
3338 > (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE))
3340 if (dump_file)
3341 fprintf (dump_file, "WARNING: Giving up with SCCVN due to "
3342 "SCC size %u exceeding %u\n", VEC_length (tree, scc),
3343 (unsigned)PARAM_VALUE (PARAM_SCCVN_MAX_SCC_SIZE));
3344 return false;
3347 if (VEC_length (tree, scc) > 1)
3348 sort_scc (scc);
3350 if (dump_file && (dump_flags & TDF_DETAILS))
3351 print_scc (dump_file, scc);
3353 process_scc (scc);
3355 VEC_free (tree, heap, scc);
3357 return true;
3360 /* Depth first search on NAME to discover and process SCC's in the SSA
3361 graph.
3362 Execution of this algorithm relies on the fact that the SCC's are
3363 popped off the stack in topological order.
3364 Returns true if successful, false if we stopped processing SCC's due
3365 to resource constraints. */
3367 static bool
3368 DFS (tree name)
3370 VEC(ssa_op_iter, heap) *itervec = NULL;
3371 VEC(tree, heap) *namevec = NULL;
3372 use_operand_p usep = NULL;
3373 gimple defstmt;
3374 tree use;
3375 ssa_op_iter iter;
3377 start_over:
3378 /* SCC info */
3379 VN_INFO (name)->dfsnum = next_dfs_num++;
3380 VN_INFO (name)->visited = true;
3381 VN_INFO (name)->low = VN_INFO (name)->dfsnum;
3383 VEC_safe_push (tree, heap, sccstack, name);
3384 VN_INFO (name)->on_sccstack = true;
3385 defstmt = SSA_NAME_DEF_STMT (name);
3387 /* Recursively DFS on our operands, looking for SCC's. */
3388 if (!gimple_nop_p (defstmt))
3390 /* Push a new iterator. */
3391 if (gimple_code (defstmt) == GIMPLE_PHI)
3392 usep = op_iter_init_phiuse (&iter, defstmt, SSA_OP_ALL_USES);
3393 else
3394 usep = op_iter_init_use (&iter, defstmt, SSA_OP_ALL_USES);
3396 else
3397 clear_and_done_ssa_iter (&iter);
3399 while (1)
3401 /* If we are done processing uses of a name, go up the stack
3402 of iterators and process SCCs as we found them. */
3403 if (op_iter_done (&iter))
3405 /* See if we found an SCC. */
3406 if (VN_INFO (name)->low == VN_INFO (name)->dfsnum)
3407 if (!extract_and_process_scc_for_name (name))
3409 VEC_free (tree, heap, namevec);
3410 VEC_free (ssa_op_iter, heap, itervec);
3411 return false;
3414 /* Check if we are done. */
3415 if (VEC_empty (tree, namevec))
3417 VEC_free (tree, heap, namevec);
3418 VEC_free (ssa_op_iter, heap, itervec);
3419 return true;
3422 /* Restore the last use walker and continue walking there. */
3423 use = name;
3424 name = VEC_pop (tree, namevec);
3425 memcpy (&iter, VEC_last (ssa_op_iter, itervec),
3426 sizeof (ssa_op_iter));
3427 VEC_pop (ssa_op_iter, itervec);
3428 goto continue_walking;
3431 use = USE_FROM_PTR (usep);
3433 /* Since we handle phi nodes, we will sometimes get
3434 invariants in the use expression. */
3435 if (TREE_CODE (use) == SSA_NAME)
3437 if (! (VN_INFO (use)->visited))
3439 /* Recurse by pushing the current use walking state on
3440 the stack and starting over. */
3441 VEC_safe_push(ssa_op_iter, heap, itervec, &iter);
3442 VEC_safe_push(tree, heap, namevec, name);
3443 name = use;
3444 goto start_over;
3446 continue_walking:
3447 VN_INFO (name)->low = MIN (VN_INFO (name)->low,
3448 VN_INFO (use)->low);
3450 if (VN_INFO (use)->dfsnum < VN_INFO (name)->dfsnum
3451 && VN_INFO (use)->on_sccstack)
3453 VN_INFO (name)->low = MIN (VN_INFO (use)->dfsnum,
3454 VN_INFO (name)->low);
3458 usep = op_iter_next_use (&iter);
3462 /* Allocate a value number table. */
3464 static void
3465 allocate_vn_table (vn_tables_t table)
3467 table->phis = htab_create (23, vn_phi_hash, vn_phi_eq, free_phi);
3468 table->nary = htab_create (23, vn_nary_op_hash, vn_nary_op_eq, NULL);
3469 table->references = htab_create (23, vn_reference_hash, vn_reference_eq,
3470 free_reference);
3472 gcc_obstack_init (&table->nary_obstack);
3473 table->phis_pool = create_alloc_pool ("VN phis",
3474 sizeof (struct vn_phi_s),
3475 30);
3476 table->references_pool = create_alloc_pool ("VN references",
3477 sizeof (struct vn_reference_s),
3478 30);
3481 /* Free a value number table. */
3483 static void
3484 free_vn_table (vn_tables_t table)
3486 htab_delete (table->phis);
3487 htab_delete (table->nary);
3488 htab_delete (table->references);
3489 obstack_free (&table->nary_obstack, NULL);
3490 free_alloc_pool (table->phis_pool);
3491 free_alloc_pool (table->references_pool);
3494 static void
3495 init_scc_vn (void)
3497 size_t i;
3498 int j;
3499 int *rpo_numbers_temp;
3501 calculate_dominance_info (CDI_DOMINATORS);
3502 sccstack = NULL;
3503 constant_to_value_id = htab_create (23, vn_constant_hash, vn_constant_eq,
3504 free);
3506 constant_value_ids = BITMAP_ALLOC (NULL);
3508 next_dfs_num = 1;
3509 next_value_id = 1;
3511 vn_ssa_aux_table = VEC_alloc (vn_ssa_aux_t, heap, num_ssa_names + 1);
3512 /* VEC_alloc doesn't actually grow it to the right size, it just
3513 preallocates the space to do so. */
3514 VEC_safe_grow_cleared (vn_ssa_aux_t, heap, vn_ssa_aux_table, num_ssa_names + 1);
3515 gcc_obstack_init (&vn_ssa_aux_obstack);
3517 shared_lookup_phiargs = NULL;
3518 shared_lookup_references = NULL;
3519 rpo_numbers = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3520 rpo_numbers_temp = XCNEWVEC (int, last_basic_block + NUM_FIXED_BLOCKS);
3521 pre_and_rev_post_order_compute (NULL, rpo_numbers_temp, false);
3523 /* RPO numbers is an array of rpo ordering, rpo[i] = bb means that
3524 the i'th block in RPO order is bb. We want to map bb's to RPO
3525 numbers, so we need to rearrange this array. */
3526 for (j = 0; j < n_basic_blocks - NUM_FIXED_BLOCKS; j++)
3527 rpo_numbers[rpo_numbers_temp[j]] = j;
3529 XDELETE (rpo_numbers_temp);
3531 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
3533 /* Create the VN_INFO structures, and initialize value numbers to
3534 TOP. */
3535 for (i = 0; i < num_ssa_names; i++)
3537 tree name = ssa_name (i);
3538 if (name)
3540 VN_INFO_GET (name)->valnum = VN_TOP;
3541 VN_INFO (name)->expr = NULL_TREE;
3542 VN_INFO (name)->value_id = 0;
3546 renumber_gimple_stmt_uids ();
3548 /* Create the valid and optimistic value numbering tables. */
3549 valid_info = XCNEW (struct vn_tables_s);
3550 allocate_vn_table (valid_info);
3551 optimistic_info = XCNEW (struct vn_tables_s);
3552 allocate_vn_table (optimistic_info);
3555 void
3556 free_scc_vn (void)
3558 size_t i;
3560 htab_delete (constant_to_value_id);
3561 BITMAP_FREE (constant_value_ids);
3562 VEC_free (tree, heap, shared_lookup_phiargs);
3563 VEC_free (vn_reference_op_s, heap, shared_lookup_references);
3564 XDELETEVEC (rpo_numbers);
3566 for (i = 0; i < num_ssa_names; i++)
3568 tree name = ssa_name (i);
3569 if (name
3570 && VN_INFO (name)->needs_insertion)
3571 release_ssa_name (name);
3573 obstack_free (&vn_ssa_aux_obstack, NULL);
3574 VEC_free (vn_ssa_aux_t, heap, vn_ssa_aux_table);
3576 VEC_free (tree, heap, sccstack);
3577 free_vn_table (valid_info);
3578 XDELETE (valid_info);
3579 free_vn_table (optimistic_info);
3580 XDELETE (optimistic_info);
3583 /* Set *ID if we computed something useful in RESULT. */
3585 static void
3586 set_value_id_for_result (tree result, unsigned int *id)
3588 if (result)
3590 if (TREE_CODE (result) == SSA_NAME)
3591 *id = VN_INFO (result)->value_id;
3592 else if (is_gimple_min_invariant (result))
3593 *id = get_or_alloc_constant_value_id (result);
3597 /* Set the value ids in the valid hash tables. */
3599 static void
3600 set_hashtable_value_ids (void)
3602 htab_iterator hi;
3603 vn_nary_op_t vno;
3604 vn_reference_t vr;
3605 vn_phi_t vp;
3607 /* Now set the value ids of the things we had put in the hash
3608 table. */
3610 FOR_EACH_HTAB_ELEMENT (valid_info->nary,
3611 vno, vn_nary_op_t, hi)
3612 set_value_id_for_result (vno->result, &vno->value_id);
3614 FOR_EACH_HTAB_ELEMENT (valid_info->phis,
3615 vp, vn_phi_t, hi)
3616 set_value_id_for_result (vp->result, &vp->value_id);
3618 FOR_EACH_HTAB_ELEMENT (valid_info->references,
3619 vr, vn_reference_t, hi)
3620 set_value_id_for_result (vr->result, &vr->value_id);
3623 /* Do SCCVN. Returns true if it finished, false if we bailed out
3624 due to resource constraints. DEFAULT_VN_WALK_KIND_ specifies
3625 how we use the alias oracle walking during the VN process. */
3627 bool
3628 run_scc_vn (vn_lookup_kind default_vn_walk_kind_)
3630 size_t i;
3631 tree param;
3632 bool changed = true;
3634 default_vn_walk_kind = default_vn_walk_kind_;
3636 init_scc_vn ();
3637 current_info = valid_info;
3639 for (param = DECL_ARGUMENTS (current_function_decl);
3640 param;
3641 param = DECL_CHAIN (param))
3643 if (gimple_default_def (cfun, param) != NULL)
3645 tree def = gimple_default_def (cfun, param);
3646 VN_INFO (def)->valnum = def;
3650 for (i = 1; i < num_ssa_names; ++i)
3652 tree name = ssa_name (i);
3653 if (name
3654 && VN_INFO (name)->visited == false
3655 && !has_zero_uses (name))
3656 if (!DFS (name))
3658 free_scc_vn ();
3659 return false;
3663 /* Initialize the value ids. */
3665 for (i = 1; i < num_ssa_names; ++i)
3667 tree name = ssa_name (i);
3668 vn_ssa_aux_t info;
3669 if (!name)
3670 continue;
3671 info = VN_INFO (name);
3672 if (info->valnum == name
3673 || info->valnum == VN_TOP)
3674 info->value_id = get_next_value_id ();
3675 else if (is_gimple_min_invariant (info->valnum))
3676 info->value_id = get_or_alloc_constant_value_id (info->valnum);
3679 /* Propagate until they stop changing. */
3680 while (changed)
3682 changed = false;
3683 for (i = 1; i < num_ssa_names; ++i)
3685 tree name = ssa_name (i);
3686 vn_ssa_aux_t info;
3687 if (!name)
3688 continue;
3689 info = VN_INFO (name);
3690 if (TREE_CODE (info->valnum) == SSA_NAME
3691 && info->valnum != name
3692 && info->value_id != VN_INFO (info->valnum)->value_id)
3694 changed = true;
3695 info->value_id = VN_INFO (info->valnum)->value_id;
3700 set_hashtable_value_ids ();
3702 if (dump_file && (dump_flags & TDF_DETAILS))
3704 fprintf (dump_file, "Value numbers:\n");
3705 for (i = 0; i < num_ssa_names; i++)
3707 tree name = ssa_name (i);
3708 if (name
3709 && VN_INFO (name)->visited
3710 && SSA_VAL (name) != name)
3712 print_generic_expr (dump_file, name, 0);
3713 fprintf (dump_file, " = ");
3714 print_generic_expr (dump_file, SSA_VAL (name), 0);
3715 fprintf (dump_file, "\n");
3720 return true;
3723 /* Return the maximum value id we have ever seen. */
3725 unsigned int
3726 get_max_value_id (void)
3728 return next_value_id;
3731 /* Return the next unique value id. */
3733 unsigned int
3734 get_next_value_id (void)
3736 return next_value_id++;
3740 /* Compare two expressions E1 and E2 and return true if they are equal. */
3742 bool
3743 expressions_equal_p (tree e1, tree e2)
3745 /* The obvious case. */
3746 if (e1 == e2)
3747 return true;
3749 /* If only one of them is null, they cannot be equal. */
3750 if (!e1 || !e2)
3751 return false;
3753 /* Now perform the actual comparison. */
3754 if (TREE_CODE (e1) == TREE_CODE (e2)
3755 && operand_equal_p (e1, e2, OEP_PURE_SAME))
3756 return true;
3758 return false;
3762 /* Return true if the nary operation NARY may trap. This is a copy
3763 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
3765 bool
3766 vn_nary_may_trap (vn_nary_op_t nary)
3768 tree type;
3769 tree rhs2 = NULL_TREE;
3770 bool honor_nans = false;
3771 bool honor_snans = false;
3772 bool fp_operation = false;
3773 bool honor_trapv = false;
3774 bool handled, ret;
3775 unsigned i;
3777 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
3778 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
3779 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
3781 type = nary->type;
3782 fp_operation = FLOAT_TYPE_P (type);
3783 if (fp_operation)
3785 honor_nans = flag_trapping_math && !flag_finite_math_only;
3786 honor_snans = flag_signaling_nans != 0;
3788 else if (INTEGRAL_TYPE_P (type)
3789 && TYPE_OVERFLOW_TRAPS (type))
3790 honor_trapv = true;
3792 if (nary->length >= 2)
3793 rhs2 = nary->op[1];
3794 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
3795 honor_trapv,
3796 honor_nans, honor_snans, rhs2,
3797 &handled);
3798 if (handled
3799 && ret)
3800 return true;
3802 for (i = 0; i < nary->length; ++i)
3803 if (tree_could_trap_p (nary->op[i]))
3804 return true;
3806 return false;