Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blob82bd10bd83c75bb9d2450049e5e0cacdc7c61d22
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "tree-ssa-sccvn.h"
75 /* This algorithm is based on the SCC algorithm presented by Keith
76 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
77 (http://citeseer.ist.psu.edu/41805.html). In
78 straight line code, it is equivalent to a regular hash based value
79 numbering that is performed in reverse postorder.
81 For code with cycles, there are two alternatives, both of which
82 require keeping the hashtables separate from the actual list of
83 value numbers for SSA names.
85 1. Iterate value numbering in an RPO walk of the blocks, removing
86 all the entries from the hashtable after each iteration (but
87 keeping the SSA name->value number mapping between iterations).
88 Iterate until it does not change.
90 2. Perform value numbering as part of an SCC walk on the SSA graph,
91 iterating only the cycles in the SSA graph until they do not change
92 (using a separate, optimistic hashtable for value numbering the SCC
93 operands).
95 The second is not just faster in practice (because most SSA graph
96 cycles do not involve all the variables in the graph), it also has
97 some nice properties.
99 One of these nice properties is that when we pop an SCC off the
100 stack, we are guaranteed to have processed all the operands coming from
101 *outside of that SCC*, so we do not need to do anything special to
102 ensure they have value numbers.
104 Another nice property is that the SCC walk is done as part of a DFS
105 of the SSA graph, which makes it easy to perform combining and
106 simplifying operations at the same time.
108 The code below is deliberately written in a way that makes it easy
109 to separate the SCC walk from the other work it does.
111 In order to propagate constants through the code, we track which
112 expressions contain constants, and use those while folding. In
113 theory, we could also track expressions whose value numbers are
114 replaced, in case we end up folding based on expression
115 identities.
117 In order to value number memory, we assign value numbers to vuses.
118 This enables us to note that, for example, stores to the same
119 address of the same value from the same starting memory states are
120 equivalent.
121 TODO:
123 1. We can iterate only the changing portions of the SCC's, but
124 I have not seen an SCC big enough for this to be a win.
125 2. If you differentiate between phi nodes for loops and phi nodes
126 for if-then-else, you can properly consider phi nodes in different
127 blocks for equivalence.
128 3. We could value number vuses in more cases, particularly, whole
129 structure copies.
132 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
133 #define BB_EXECUTABLE BB_VISITED
135 static vn_lookup_kind default_vn_walk_kind;
137 /* vn_nary_op hashtable helpers. */
139 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
141 typedef vn_nary_op_s *compare_type;
142 static inline hashval_t hash (const vn_nary_op_s *);
143 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
146 /* Return the computed hashcode for nary operation P1. */
148 inline hashval_t
149 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
151 return vno1->hashcode;
154 /* Compare nary operations P1 and P2 and return true if they are
155 equivalent. */
157 inline bool
158 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
160 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
163 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
164 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
167 /* vn_phi hashtable helpers. */
169 static int
170 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
172 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
174 static inline hashval_t hash (const vn_phi_s *);
175 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
178 /* Return the computed hashcode for phi operation P1. */
180 inline hashval_t
181 vn_phi_hasher::hash (const vn_phi_s *vp1)
183 return vp1->hashcode;
186 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
188 inline bool
189 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
191 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
194 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
195 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
198 /* Compare two reference operands P1 and P2 for equality. Return true if
199 they are equal, and false otherwise. */
201 static int
202 vn_reference_op_eq (const void *p1, const void *p2)
204 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
205 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
207 return (vro1->opcode == vro2->opcode
208 /* We do not care for differences in type qualification. */
209 && (vro1->type == vro2->type
210 || (vro1->type && vro2->type
211 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
212 TYPE_MAIN_VARIANT (vro2->type))))
213 && expressions_equal_p (vro1->op0, vro2->op0)
214 && expressions_equal_p (vro1->op1, vro2->op1)
215 && expressions_equal_p (vro1->op2, vro2->op2));
218 /* Free a reference operation structure VP. */
220 static inline void
221 free_reference (vn_reference_s *vr)
223 vr->operands.release ();
227 /* vn_reference hashtable helpers. */
229 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
231 static inline hashval_t hash (const vn_reference_s *);
232 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
235 /* Return the hashcode for a given reference operation P1. */
237 inline hashval_t
238 vn_reference_hasher::hash (const vn_reference_s *vr1)
240 return vr1->hashcode;
243 inline bool
244 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
246 return v == c || vn_reference_eq (v, c);
249 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
250 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
252 /* Pretty-print OPS to OUTFILE. */
254 void
255 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
257 vn_reference_op_t vro;
258 unsigned int i;
259 fprintf (outfile, "{");
260 for (i = 0; ops.iterate (i, &vro); i++)
262 bool closebrace = false;
263 if (vro->opcode != SSA_NAME
264 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
266 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
267 if (vro->op0)
269 fprintf (outfile, "<");
270 closebrace = true;
273 if (vro->op0)
275 print_generic_expr (outfile, vro->op0);
276 if (vro->op1)
278 fprintf (outfile, ",");
279 print_generic_expr (outfile, vro->op1);
281 if (vro->op2)
283 fprintf (outfile, ",");
284 print_generic_expr (outfile, vro->op2);
287 if (closebrace)
288 fprintf (outfile, ">");
289 if (i != ops.length () - 1)
290 fprintf (outfile, ",");
292 fprintf (outfile, "}");
295 DEBUG_FUNCTION void
296 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
298 print_vn_reference_ops (stderr, ops);
299 fputc ('\n', stderr);
302 /* The set of VN hashtables. */
304 typedef struct vn_tables_s
306 vn_nary_op_table_type *nary;
307 vn_phi_table_type *phis;
308 vn_reference_table_type *references;
309 } *vn_tables_t;
312 /* vn_constant hashtable helpers. */
314 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
316 static inline hashval_t hash (const vn_constant_s *);
317 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
320 /* Hash table hash function for vn_constant_t. */
322 inline hashval_t
323 vn_constant_hasher::hash (const vn_constant_s *vc1)
325 return vc1->hashcode;
328 /* Hash table equality function for vn_constant_t. */
330 inline bool
331 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
333 if (vc1->hashcode != vc2->hashcode)
334 return false;
336 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
339 static hash_table<vn_constant_hasher> *constant_to_value_id;
342 /* Obstack we allocate the vn-tables elements from. */
343 static obstack vn_tables_obstack;
344 /* Special obstack we never unwind. */
345 static obstack vn_tables_insert_obstack;
347 static vn_reference_t last_inserted_ref;
348 static vn_phi_t last_inserted_phi;
349 static vn_nary_op_t last_inserted_nary;
350 static vn_ssa_aux_t last_pushed_avail;
352 /* Valid hashtables storing information we have proven to be
353 correct. */
354 static vn_tables_t valid_info;
357 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
358 an SSA name, otherwise just return it. */
359 tree (*vn_valueize) (tree);
360 static tree
361 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
363 basic_block saved_vn_context_bb = vn_context_bb;
364 /* Look for sth available at the definition block of the argument.
365 This avoids inconsistencies between availability there which
366 decides if the stmt can be removed and availability at the
367 use site. The SSA property ensures that things available
368 at the definition are also available at uses. */
369 if (!SSA_NAME_IS_DEFAULT_DEF (t))
370 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
371 tree res = vn_valueize (t);
372 vn_context_bb = saved_vn_context_bb;
373 return res;
377 /* This represents the top of the VN lattice, which is the universal
378 value. */
380 tree VN_TOP;
382 /* Unique counter for our value ids. */
384 static unsigned int next_value_id;
385 static int next_constant_value_id;
388 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
389 are allocated on an obstack for locality reasons, and to free them
390 without looping over the vec. */
392 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
394 typedef vn_ssa_aux_t value_type;
395 typedef tree compare_type;
396 static inline hashval_t hash (const value_type &);
397 static inline bool equal (const value_type &, const compare_type &);
398 static inline void mark_deleted (value_type &) {}
399 static const bool empty_zero_p = true;
400 static inline void mark_empty (value_type &e) { e = NULL; }
401 static inline bool is_deleted (value_type &) { return false; }
402 static inline bool is_empty (value_type &e) { return e == NULL; }
405 hashval_t
406 vn_ssa_aux_hasher::hash (const value_type &entry)
408 return SSA_NAME_VERSION (entry->name);
411 bool
412 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
414 return name == entry->name;
417 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
418 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
419 static struct obstack vn_ssa_aux_obstack;
421 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
422 static unsigned int vn_nary_length_from_stmt (gimple *);
423 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
424 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
425 vn_nary_op_table_type *, bool);
426 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
427 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
428 enum tree_code, tree, tree *);
429 static tree vn_lookup_simplify_result (gimple_match_op *);
430 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
431 (tree, alias_set_type, alias_set_type, tree,
432 vec<vn_reference_op_s, va_heap>, tree);
434 /* Return whether there is value numbering information for a given SSA name. */
436 bool
437 has_VN_INFO (tree name)
439 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
442 vn_ssa_aux_t
443 VN_INFO (tree name)
445 vn_ssa_aux_t *res
446 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
447 INSERT);
448 if (*res != NULL)
449 return *res;
451 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
452 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
453 newinfo->name = name;
454 newinfo->valnum = VN_TOP;
455 /* We are using the visited flag to handle uses with defs not within the
456 region being value-numbered. */
457 newinfo->visited = false;
459 /* Given we create the VN_INFOs on-demand now we have to do initialization
460 different than VN_TOP here. */
461 if (SSA_NAME_IS_DEFAULT_DEF (name))
462 switch (TREE_CODE (SSA_NAME_VAR (name)))
464 case VAR_DECL:
465 /* All undefined vars are VARYING. */
466 newinfo->valnum = name;
467 newinfo->visited = true;
468 break;
470 case PARM_DECL:
471 /* Parameters are VARYING but we can record a condition
472 if we know it is a non-NULL pointer. */
473 newinfo->visited = true;
474 newinfo->valnum = name;
475 if (POINTER_TYPE_P (TREE_TYPE (name))
476 && nonnull_arg_p (SSA_NAME_VAR (name)))
478 tree ops[2];
479 ops[0] = name;
480 ops[1] = build_int_cst (TREE_TYPE (name), 0);
481 vn_nary_op_t nary;
482 /* Allocate from non-unwinding stack. */
483 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
484 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
485 boolean_type_node, ops);
486 nary->predicated_values = 0;
487 nary->u.result = boolean_true_node;
488 vn_nary_op_insert_into (nary, valid_info->nary, true);
489 gcc_assert (nary->unwind_to == NULL);
490 /* Also do not link it into the undo chain. */
491 last_inserted_nary = nary->next;
492 nary->next = (vn_nary_op_t)(void *)-1;
493 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
494 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
495 boolean_type_node, ops);
496 nary->predicated_values = 0;
497 nary->u.result = boolean_false_node;
498 vn_nary_op_insert_into (nary, valid_info->nary, true);
499 gcc_assert (nary->unwind_to == NULL);
500 last_inserted_nary = nary->next;
501 nary->next = (vn_nary_op_t)(void *)-1;
502 if (dump_file && (dump_flags & TDF_DETAILS))
504 fprintf (dump_file, "Recording ");
505 print_generic_expr (dump_file, name, TDF_SLIM);
506 fprintf (dump_file, " != 0\n");
509 break;
511 case RESULT_DECL:
512 /* If the result is passed by invisible reference the default
513 def is initialized, otherwise it's uninitialized. Still
514 undefined is varying. */
515 newinfo->visited = true;
516 newinfo->valnum = name;
517 break;
519 default:
520 gcc_unreachable ();
522 return newinfo;
525 /* Return the SSA value of X. */
527 inline tree
528 SSA_VAL (tree x, bool *visited = NULL)
530 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
531 if (visited)
532 *visited = tem && tem->visited;
533 return tem && tem->visited ? tem->valnum : x;
536 /* Return the SSA value of the VUSE x, supporting released VDEFs
537 during elimination which will value-number the VDEF to the
538 associated VUSE (but not substitute in the whole lattice). */
540 static inline tree
541 vuse_ssa_val (tree x)
543 if (!x)
544 return NULL_TREE;
548 x = SSA_VAL (x);
549 gcc_assert (x != VN_TOP);
551 while (SSA_NAME_IN_FREE_LIST (x));
553 return x;
556 /* Similar to the above but used as callback for walk_non_aliased_vuses
557 and thus should stop at unvisited VUSE to not walk across region
558 boundaries. */
560 static tree
561 vuse_valueize (tree vuse)
565 bool visited;
566 vuse = SSA_VAL (vuse, &visited);
567 if (!visited)
568 return NULL_TREE;
569 gcc_assert (vuse != VN_TOP);
571 while (SSA_NAME_IN_FREE_LIST (vuse));
572 return vuse;
576 /* Return the vn_kind the expression computed by the stmt should be
577 associated with. */
579 enum vn_kind
580 vn_get_stmt_kind (gimple *stmt)
582 switch (gimple_code (stmt))
584 case GIMPLE_CALL:
585 return VN_REFERENCE;
586 case GIMPLE_PHI:
587 return VN_PHI;
588 case GIMPLE_ASSIGN:
590 enum tree_code code = gimple_assign_rhs_code (stmt);
591 tree rhs1 = gimple_assign_rhs1 (stmt);
592 switch (get_gimple_rhs_class (code))
594 case GIMPLE_UNARY_RHS:
595 case GIMPLE_BINARY_RHS:
596 case GIMPLE_TERNARY_RHS:
597 return VN_NARY;
598 case GIMPLE_SINGLE_RHS:
599 switch (TREE_CODE_CLASS (code))
601 case tcc_reference:
602 /* VOP-less references can go through unary case. */
603 if ((code == REALPART_EXPR
604 || code == IMAGPART_EXPR
605 || code == VIEW_CONVERT_EXPR
606 || code == BIT_FIELD_REF)
607 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
608 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
609 return VN_NARY;
611 /* Fallthrough. */
612 case tcc_declaration:
613 return VN_REFERENCE;
615 case tcc_constant:
616 return VN_CONSTANT;
618 default:
619 if (code == ADDR_EXPR)
620 return (is_gimple_min_invariant (rhs1)
621 ? VN_CONSTANT : VN_REFERENCE);
622 else if (code == CONSTRUCTOR)
623 return VN_NARY;
624 return VN_NONE;
626 default:
627 return VN_NONE;
630 default:
631 return VN_NONE;
635 /* Lookup a value id for CONSTANT and return it. If it does not
636 exist returns 0. */
638 unsigned int
639 get_constant_value_id (tree constant)
641 vn_constant_s **slot;
642 struct vn_constant_s vc;
644 vc.hashcode = vn_hash_constant_with_type (constant);
645 vc.constant = constant;
646 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
647 if (slot)
648 return (*slot)->value_id;
649 return 0;
652 /* Lookup a value id for CONSTANT, and if it does not exist, create a
653 new one and return it. If it does exist, return it. */
655 unsigned int
656 get_or_alloc_constant_value_id (tree constant)
658 vn_constant_s **slot;
659 struct vn_constant_s vc;
660 vn_constant_t vcp;
662 /* If the hashtable isn't initialized we're not running from PRE and thus
663 do not need value-ids. */
664 if (!constant_to_value_id)
665 return 0;
667 vc.hashcode = vn_hash_constant_with_type (constant);
668 vc.constant = constant;
669 slot = constant_to_value_id->find_slot (&vc, INSERT);
670 if (*slot)
671 return (*slot)->value_id;
673 vcp = XNEW (struct vn_constant_s);
674 vcp->hashcode = vc.hashcode;
675 vcp->constant = constant;
676 vcp->value_id = get_next_constant_value_id ();
677 *slot = vcp;
678 return vcp->value_id;
681 /* Compute the hash for a reference operand VRO1. */
683 static void
684 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
686 hstate.add_int (vro1->opcode);
687 if (vro1->op0)
688 inchash::add_expr (vro1->op0, hstate);
689 if (vro1->op1)
690 inchash::add_expr (vro1->op1, hstate);
691 if (vro1->op2)
692 inchash::add_expr (vro1->op2, hstate);
695 /* Compute a hash for the reference operation VR1 and return it. */
697 static hashval_t
698 vn_reference_compute_hash (const vn_reference_t vr1)
700 inchash::hash hstate;
701 hashval_t result;
702 int i;
703 vn_reference_op_t vro;
704 poly_int64 off = -1;
705 bool deref = false;
707 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
709 if (vro->opcode == MEM_REF)
710 deref = true;
711 else if (vro->opcode != ADDR_EXPR)
712 deref = false;
713 if (maybe_ne (vro->off, -1))
715 if (known_eq (off, -1))
716 off = 0;
717 off += vro->off;
719 else
721 if (maybe_ne (off, -1)
722 && maybe_ne (off, 0))
723 hstate.add_poly_int (off);
724 off = -1;
725 if (deref
726 && vro->opcode == ADDR_EXPR)
728 if (vro->op0)
730 tree op = TREE_OPERAND (vro->op0, 0);
731 hstate.add_int (TREE_CODE (op));
732 inchash::add_expr (op, hstate);
735 else
736 vn_reference_op_compute_hash (vro, hstate);
739 result = hstate.end ();
740 /* ??? We would ICE later if we hash instead of adding that in. */
741 if (vr1->vuse)
742 result += SSA_NAME_VERSION (vr1->vuse);
744 return result;
747 /* Return true if reference operations VR1 and VR2 are equivalent. This
748 means they have the same set of operands and vuses. */
750 bool
751 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
753 unsigned i, j;
755 /* Early out if this is not a hash collision. */
756 if (vr1->hashcode != vr2->hashcode)
757 return false;
759 /* The VOP needs to be the same. */
760 if (vr1->vuse != vr2->vuse)
761 return false;
763 /* If the operands are the same we are done. */
764 if (vr1->operands == vr2->operands)
765 return true;
767 if (!vr1->type || !vr2->type)
769 if (vr1->type != vr2->type)
770 return false;
772 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
773 || (COMPLETE_TYPE_P (vr1->type)
774 && !expressions_equal_p (TYPE_SIZE (vr1->type),
775 TYPE_SIZE (vr2->type))))
776 return false;
777 else if (INTEGRAL_TYPE_P (vr1->type)
778 && INTEGRAL_TYPE_P (vr2->type))
780 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
781 return false;
783 else if (INTEGRAL_TYPE_P (vr1->type)
784 && (TYPE_PRECISION (vr1->type)
785 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
786 return false;
787 else if (INTEGRAL_TYPE_P (vr2->type)
788 && (TYPE_PRECISION (vr2->type)
789 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
790 return false;
792 i = 0;
793 j = 0;
796 poly_int64 off1 = 0, off2 = 0;
797 vn_reference_op_t vro1, vro2;
798 vn_reference_op_s tem1, tem2;
799 bool deref1 = false, deref2 = false;
800 bool reverse1 = false, reverse2 = false;
801 for (; vr1->operands.iterate (i, &vro1); i++)
803 if (vro1->opcode == MEM_REF)
804 deref1 = true;
805 /* Do not look through a storage order barrier. */
806 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
807 return false;
808 reverse1 |= vro1->reverse;
809 if (known_eq (vro1->off, -1))
810 break;
811 off1 += vro1->off;
813 for (; vr2->operands.iterate (j, &vro2); j++)
815 if (vro2->opcode == MEM_REF)
816 deref2 = true;
817 /* Do not look through a storage order barrier. */
818 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
819 return false;
820 reverse2 |= vro2->reverse;
821 if (known_eq (vro2->off, -1))
822 break;
823 off2 += vro2->off;
825 if (maybe_ne (off1, off2) || reverse1 != reverse2)
826 return false;
827 if (deref1 && vro1->opcode == ADDR_EXPR)
829 memset (&tem1, 0, sizeof (tem1));
830 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
831 tem1.type = TREE_TYPE (tem1.op0);
832 tem1.opcode = TREE_CODE (tem1.op0);
833 vro1 = &tem1;
834 deref1 = false;
836 if (deref2 && vro2->opcode == ADDR_EXPR)
838 memset (&tem2, 0, sizeof (tem2));
839 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
840 tem2.type = TREE_TYPE (tem2.op0);
841 tem2.opcode = TREE_CODE (tem2.op0);
842 vro2 = &tem2;
843 deref2 = false;
845 if (deref1 != deref2)
846 return false;
847 if (!vn_reference_op_eq (vro1, vro2))
848 return false;
849 ++j;
850 ++i;
852 while (vr1->operands.length () != i
853 || vr2->operands.length () != j);
855 return true;
858 /* Copy the operations present in load/store REF into RESULT, a vector of
859 vn_reference_op_s's. */
861 static void
862 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
864 /* For non-calls, store the information that makes up the address. */
865 tree orig = ref;
866 while (ref)
868 vn_reference_op_s temp;
870 memset (&temp, 0, sizeof (temp));
871 temp.type = TREE_TYPE (ref);
872 temp.opcode = TREE_CODE (ref);
873 temp.off = -1;
875 switch (temp.opcode)
877 case MODIFY_EXPR:
878 temp.op0 = TREE_OPERAND (ref, 1);
879 break;
880 case WITH_SIZE_EXPR:
881 temp.op0 = TREE_OPERAND (ref, 1);
882 temp.off = 0;
883 break;
884 case MEM_REF:
885 /* The base address gets its own vn_reference_op_s structure. */
886 temp.op0 = TREE_OPERAND (ref, 1);
887 if (!mem_ref_offset (ref).to_shwi (&temp.off))
888 temp.off = -1;
889 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
890 temp.base = MR_DEPENDENCE_BASE (ref);
891 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
892 break;
893 case TARGET_MEM_REF:
894 /* The base address gets its own vn_reference_op_s structure. */
895 temp.op0 = TMR_INDEX (ref);
896 temp.op1 = TMR_STEP (ref);
897 temp.op2 = TMR_OFFSET (ref);
898 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
899 temp.base = MR_DEPENDENCE_BASE (ref);
900 result->safe_push (temp);
901 memset (&temp, 0, sizeof (temp));
902 temp.type = NULL_TREE;
903 temp.opcode = ERROR_MARK;
904 temp.op0 = TMR_INDEX2 (ref);
905 temp.off = -1;
906 break;
907 case BIT_FIELD_REF:
908 /* Record bits, position and storage order. */
909 temp.op0 = TREE_OPERAND (ref, 1);
910 temp.op1 = TREE_OPERAND (ref, 2);
911 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
912 temp.off = -1;
913 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
914 break;
915 case COMPONENT_REF:
916 /* The field decl is enough to unambiguously specify the field,
917 a matching type is not necessary and a mismatching type
918 is always a spurious difference. */
919 temp.type = NULL_TREE;
920 temp.op0 = TREE_OPERAND (ref, 1);
921 temp.op1 = TREE_OPERAND (ref, 2);
922 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
923 && TYPE_REVERSE_STORAGE_ORDER
924 (TREE_TYPE (TREE_OPERAND (ref, 0))));
926 tree this_offset = component_ref_field_offset (ref);
927 if (this_offset
928 && poly_int_tree_p (this_offset))
930 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
931 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
933 poly_offset_int off
934 = (wi::to_poly_offset (this_offset)
935 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
936 /* Probibit value-numbering zero offset components
937 of addresses the same before the pass folding
938 __builtin_object_size had a chance to run. */
939 if (TREE_CODE (orig) != ADDR_EXPR
940 || maybe_ne (off, 0)
941 || (cfun->curr_properties & PROP_objsz))
942 off.to_shwi (&temp.off);
946 break;
947 case ARRAY_RANGE_REF:
948 case ARRAY_REF:
950 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
951 /* Record index as operand. */
952 temp.op0 = TREE_OPERAND (ref, 1);
953 /* Always record lower bounds and element size. */
954 temp.op1 = array_ref_low_bound (ref);
955 /* But record element size in units of the type alignment. */
956 temp.op2 = TREE_OPERAND (ref, 3);
957 temp.align = eltype->type_common.align;
958 if (! temp.op2)
959 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
960 size_int (TYPE_ALIGN_UNIT (eltype)));
961 if (poly_int_tree_p (temp.op0)
962 && poly_int_tree_p (temp.op1)
963 && TREE_CODE (temp.op2) == INTEGER_CST)
965 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
966 - wi::to_poly_offset (temp.op1))
967 * wi::to_offset (temp.op2)
968 * vn_ref_op_align_unit (&temp));
969 off.to_shwi (&temp.off);
971 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
972 && TYPE_REVERSE_STORAGE_ORDER
973 (TREE_TYPE (TREE_OPERAND (ref, 0))));
975 break;
976 case VAR_DECL:
977 if (DECL_HARD_REGISTER (ref))
979 temp.op0 = ref;
980 break;
982 /* Fallthru. */
983 case PARM_DECL:
984 case CONST_DECL:
985 case RESULT_DECL:
986 /* Canonicalize decls to MEM[&decl] which is what we end up with
987 when valueizing MEM[ptr] with ptr = &decl. */
988 temp.opcode = MEM_REF;
989 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
990 temp.off = 0;
991 result->safe_push (temp);
992 temp.opcode = ADDR_EXPR;
993 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
994 temp.type = TREE_TYPE (temp.op0);
995 temp.off = -1;
996 break;
997 case STRING_CST:
998 case INTEGER_CST:
999 case POLY_INT_CST:
1000 case COMPLEX_CST:
1001 case VECTOR_CST:
1002 case REAL_CST:
1003 case FIXED_CST:
1004 case CONSTRUCTOR:
1005 case SSA_NAME:
1006 temp.op0 = ref;
1007 break;
1008 case ADDR_EXPR:
1009 if (is_gimple_min_invariant (ref))
1011 temp.op0 = ref;
1012 break;
1014 break;
1015 /* These are only interesting for their operands, their
1016 existence, and their type. They will never be the last
1017 ref in the chain of references (IE they require an
1018 operand), so we don't have to put anything
1019 for op* as it will be handled by the iteration */
1020 case REALPART_EXPR:
1021 temp.off = 0;
1022 break;
1023 case VIEW_CONVERT_EXPR:
1024 temp.off = 0;
1025 temp.reverse = storage_order_barrier_p (ref);
1026 break;
1027 case IMAGPART_EXPR:
1028 /* This is only interesting for its constant offset. */
1029 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1030 break;
1031 default:
1032 gcc_unreachable ();
1034 result->safe_push (temp);
1036 if (REFERENCE_CLASS_P (ref)
1037 || TREE_CODE (ref) == MODIFY_EXPR
1038 || TREE_CODE (ref) == WITH_SIZE_EXPR
1039 || (TREE_CODE (ref) == ADDR_EXPR
1040 && !is_gimple_min_invariant (ref)))
1041 ref = TREE_OPERAND (ref, 0);
1042 else
1043 ref = NULL_TREE;
1047 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1048 operands in *OPS, the reference alias set SET and the reference type TYPE.
1049 Return true if something useful was produced. */
1051 bool
1052 ao_ref_init_from_vn_reference (ao_ref *ref,
1053 alias_set_type set, alias_set_type base_set,
1054 tree type, const vec<vn_reference_op_s> &ops)
1056 unsigned i;
1057 tree base = NULL_TREE;
1058 tree *op0_p = &base;
1059 poly_offset_int offset = 0;
1060 poly_offset_int max_size;
1061 poly_offset_int size = -1;
1062 tree size_tree = NULL_TREE;
1064 /* We don't handle calls. */
1065 if (!type)
1066 return false;
1068 machine_mode mode = TYPE_MODE (type);
1069 if (mode == BLKmode)
1070 size_tree = TYPE_SIZE (type);
1071 else
1072 size = GET_MODE_BITSIZE (mode);
1073 if (size_tree != NULL_TREE
1074 && poly_int_tree_p (size_tree))
1075 size = wi::to_poly_offset (size_tree);
1077 /* Lower the final access size from the outermost expression. */
1078 const_vn_reference_op_t cst_op = &ops[0];
1079 /* Cast away constness for the sake of the const-unsafe
1080 FOR_EACH_VEC_ELT(). */
1081 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1082 size_tree = NULL_TREE;
1083 if (op->opcode == COMPONENT_REF)
1084 size_tree = DECL_SIZE (op->op0);
1085 else if (op->opcode == BIT_FIELD_REF)
1086 size_tree = op->op0;
1087 if (size_tree != NULL_TREE
1088 && poly_int_tree_p (size_tree)
1089 && (!known_size_p (size)
1090 || known_lt (wi::to_poly_offset (size_tree), size)))
1091 size = wi::to_poly_offset (size_tree);
1093 /* Initially, maxsize is the same as the accessed element size.
1094 In the following it will only grow (or become -1). */
1095 max_size = size;
1097 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1098 and find the ultimate containing object. */
1099 FOR_EACH_VEC_ELT (ops, i, op)
1101 switch (op->opcode)
1103 /* These may be in the reference ops, but we cannot do anything
1104 sensible with them here. */
1105 case ADDR_EXPR:
1106 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1107 if (base != NULL_TREE
1108 && TREE_CODE (base) == MEM_REF
1109 && op->op0
1110 && DECL_P (TREE_OPERAND (op->op0, 0)))
1112 const_vn_reference_op_t pop = &ops[i-1];
1113 base = TREE_OPERAND (op->op0, 0);
1114 if (known_eq (pop->off, -1))
1116 max_size = -1;
1117 offset = 0;
1119 else
1120 offset += pop->off * BITS_PER_UNIT;
1121 op0_p = NULL;
1122 break;
1124 /* Fallthru. */
1125 case CALL_EXPR:
1126 return false;
1128 /* Record the base objects. */
1129 case MEM_REF:
1130 *op0_p = build2 (MEM_REF, op->type,
1131 NULL_TREE, op->op0);
1132 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1133 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1134 op0_p = &TREE_OPERAND (*op0_p, 0);
1135 break;
1137 case VAR_DECL:
1138 case PARM_DECL:
1139 case RESULT_DECL:
1140 case SSA_NAME:
1141 *op0_p = op->op0;
1142 op0_p = NULL;
1143 break;
1145 /* And now the usual component-reference style ops. */
1146 case BIT_FIELD_REF:
1147 offset += wi::to_poly_offset (op->op1);
1148 break;
1150 case COMPONENT_REF:
1152 tree field = op->op0;
1153 /* We do not have a complete COMPONENT_REF tree here so we
1154 cannot use component_ref_field_offset. Do the interesting
1155 parts manually. */
1156 tree this_offset = DECL_FIELD_OFFSET (field);
1158 if (op->op1 || !poly_int_tree_p (this_offset))
1159 max_size = -1;
1160 else
1162 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1163 << LOG2_BITS_PER_UNIT);
1164 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1165 offset += woffset;
1167 break;
1170 case ARRAY_RANGE_REF:
1171 case ARRAY_REF:
1172 /* We recorded the lower bound and the element size. */
1173 if (!poly_int_tree_p (op->op0)
1174 || !poly_int_tree_p (op->op1)
1175 || TREE_CODE (op->op2) != INTEGER_CST)
1176 max_size = -1;
1177 else
1179 poly_offset_int woffset
1180 = wi::sext (wi::to_poly_offset (op->op0)
1181 - wi::to_poly_offset (op->op1),
1182 TYPE_PRECISION (sizetype));
1183 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1184 woffset <<= LOG2_BITS_PER_UNIT;
1185 offset += woffset;
1187 break;
1189 case REALPART_EXPR:
1190 break;
1192 case IMAGPART_EXPR:
1193 offset += size;
1194 break;
1196 case VIEW_CONVERT_EXPR:
1197 break;
1199 case STRING_CST:
1200 case INTEGER_CST:
1201 case COMPLEX_CST:
1202 case VECTOR_CST:
1203 case REAL_CST:
1204 case CONSTRUCTOR:
1205 case CONST_DECL:
1206 return false;
1208 default:
1209 return false;
1213 if (base == NULL_TREE)
1214 return false;
1216 ref->ref = NULL_TREE;
1217 ref->base = base;
1218 ref->ref_alias_set = set;
1219 ref->base_alias_set = base_set;
1220 /* We discount volatiles from value-numbering elsewhere. */
1221 ref->volatile_p = false;
1223 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1225 ref->offset = 0;
1226 ref->size = -1;
1227 ref->max_size = -1;
1228 return true;
1231 if (!offset.to_shwi (&ref->offset))
1233 ref->offset = 0;
1234 ref->max_size = -1;
1235 return true;
1238 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1239 ref->max_size = -1;
1241 return true;
1244 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1245 vn_reference_op_s's. */
1247 static void
1248 copy_reference_ops_from_call (gcall *call,
1249 vec<vn_reference_op_s> *result)
1251 vn_reference_op_s temp;
1252 unsigned i;
1253 tree lhs = gimple_call_lhs (call);
1254 int lr;
1256 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1257 different. By adding the lhs here in the vector, we ensure that the
1258 hashcode is different, guaranteeing a different value number. */
1259 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1261 memset (&temp, 0, sizeof (temp));
1262 temp.opcode = MODIFY_EXPR;
1263 temp.type = TREE_TYPE (lhs);
1264 temp.op0 = lhs;
1265 temp.off = -1;
1266 result->safe_push (temp);
1269 /* Copy the type, opcode, function, static chain and EH region, if any. */
1270 memset (&temp, 0, sizeof (temp));
1271 temp.type = gimple_call_fntype (call);
1272 temp.opcode = CALL_EXPR;
1273 temp.op0 = gimple_call_fn (call);
1274 temp.op1 = gimple_call_chain (call);
1275 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1276 temp.op2 = size_int (lr);
1277 temp.off = -1;
1278 result->safe_push (temp);
1280 /* Copy the call arguments. As they can be references as well,
1281 just chain them together. */
1282 for (i = 0; i < gimple_call_num_args (call); ++i)
1284 tree callarg = gimple_call_arg (call, i);
1285 copy_reference_ops_from_ref (callarg, result);
1289 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1290 *I_P to point to the last element of the replacement. */
1291 static bool
1292 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1293 unsigned int *i_p)
1295 unsigned int i = *i_p;
1296 vn_reference_op_t op = &(*ops)[i];
1297 vn_reference_op_t mem_op = &(*ops)[i - 1];
1298 tree addr_base;
1299 poly_int64 addr_offset = 0;
1301 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1302 from .foo.bar to the preceding MEM_REF offset and replace the
1303 address with &OBJ. */
1304 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1305 &addr_offset, vn_valueize);
1306 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1307 if (addr_base != TREE_OPERAND (op->op0, 0))
1309 poly_offset_int off
1310 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1311 SIGNED)
1312 + addr_offset);
1313 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1314 op->op0 = build_fold_addr_expr (addr_base);
1315 if (tree_fits_shwi_p (mem_op->op0))
1316 mem_op->off = tree_to_shwi (mem_op->op0);
1317 else
1318 mem_op->off = -1;
1319 return true;
1321 return false;
1324 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1325 *I_P to point to the last element of the replacement. */
1326 static bool
1327 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1328 unsigned int *i_p)
1330 bool changed = false;
1331 vn_reference_op_t op;
1335 unsigned int i = *i_p;
1336 op = &(*ops)[i];
1337 vn_reference_op_t mem_op = &(*ops)[i - 1];
1338 gimple *def_stmt;
1339 enum tree_code code;
1340 poly_offset_int off;
1342 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1343 if (!is_gimple_assign (def_stmt))
1344 return changed;
1346 code = gimple_assign_rhs_code (def_stmt);
1347 if (code != ADDR_EXPR
1348 && code != POINTER_PLUS_EXPR)
1349 return changed;
1351 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1353 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1354 from .foo.bar to the preceding MEM_REF offset and replace the
1355 address with &OBJ. */
1356 if (code == ADDR_EXPR)
1358 tree addr, addr_base;
1359 poly_int64 addr_offset;
1361 addr = gimple_assign_rhs1 (def_stmt);
1362 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1363 &addr_offset,
1364 vn_valueize);
1365 /* If that didn't work because the address isn't invariant propagate
1366 the reference tree from the address operation in case the current
1367 dereference isn't offsetted. */
1368 if (!addr_base
1369 && *i_p == ops->length () - 1
1370 && known_eq (off, 0)
1371 /* This makes us disable this transform for PRE where the
1372 reference ops might be also used for code insertion which
1373 is invalid. */
1374 && default_vn_walk_kind == VN_WALKREWRITE)
1376 auto_vec<vn_reference_op_s, 32> tem;
1377 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1378 /* Make sure to preserve TBAA info. The only objects not
1379 wrapped in MEM_REFs that can have their address taken are
1380 STRING_CSTs. */
1381 if (tem.length () >= 2
1382 && tem[tem.length () - 2].opcode == MEM_REF)
1384 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1385 new_mem_op->op0
1386 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1387 wi::to_poly_wide (new_mem_op->op0));
1389 else
1390 gcc_assert (tem.last ().opcode == STRING_CST);
1391 ops->pop ();
1392 ops->pop ();
1393 ops->safe_splice (tem);
1394 --*i_p;
1395 return true;
1397 if (!addr_base
1398 || TREE_CODE (addr_base) != MEM_REF
1399 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1400 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1401 0))))
1402 return changed;
1404 off += addr_offset;
1405 off += mem_ref_offset (addr_base);
1406 op->op0 = TREE_OPERAND (addr_base, 0);
1408 else
1410 tree ptr, ptroff;
1411 ptr = gimple_assign_rhs1 (def_stmt);
1412 ptroff = gimple_assign_rhs2 (def_stmt);
1413 if (TREE_CODE (ptr) != SSA_NAME
1414 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1415 /* Make sure to not endlessly recurse.
1416 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1417 happen when we value-number a PHI to its backedge value. */
1418 || SSA_VAL (ptr) == op->op0
1419 || !poly_int_tree_p (ptroff))
1420 return changed;
1422 off += wi::to_poly_offset (ptroff);
1423 op->op0 = ptr;
1426 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1427 if (tree_fits_shwi_p (mem_op->op0))
1428 mem_op->off = tree_to_shwi (mem_op->op0);
1429 else
1430 mem_op->off = -1;
1431 /* ??? Can end up with endless recursion here!?
1432 gcc.c-torture/execute/strcmp-1.c */
1433 if (TREE_CODE (op->op0) == SSA_NAME)
1434 op->op0 = SSA_VAL (op->op0);
1435 if (TREE_CODE (op->op0) != SSA_NAME)
1436 op->opcode = TREE_CODE (op->op0);
1438 changed = true;
1440 /* Tail-recurse. */
1441 while (TREE_CODE (op->op0) == SSA_NAME);
1443 /* Fold a remaining *&. */
1444 if (TREE_CODE (op->op0) == ADDR_EXPR)
1445 vn_reference_fold_indirect (ops, i_p);
1447 return changed;
1450 /* Optimize the reference REF to a constant if possible or return
1451 NULL_TREE if not. */
1453 tree
1454 fully_constant_vn_reference_p (vn_reference_t ref)
1456 vec<vn_reference_op_s> operands = ref->operands;
1457 vn_reference_op_t op;
1459 /* Try to simplify the translated expression if it is
1460 a call to a builtin function with at most two arguments. */
1461 op = &operands[0];
1462 if (op->opcode == CALL_EXPR
1463 && TREE_CODE (op->op0) == ADDR_EXPR
1464 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1465 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0))
1466 && operands.length () >= 2
1467 && operands.length () <= 3)
1469 vn_reference_op_t arg0, arg1 = NULL;
1470 bool anyconst = false;
1471 arg0 = &operands[1];
1472 if (operands.length () > 2)
1473 arg1 = &operands[2];
1474 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1475 || (arg0->opcode == ADDR_EXPR
1476 && is_gimple_min_invariant (arg0->op0)))
1477 anyconst = true;
1478 if (arg1
1479 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1480 || (arg1->opcode == ADDR_EXPR
1481 && is_gimple_min_invariant (arg1->op0))))
1482 anyconst = true;
1483 if (anyconst)
1485 tree folded = build_call_expr (TREE_OPERAND (op->op0, 0),
1486 arg1 ? 2 : 1,
1487 arg0->op0,
1488 arg1 ? arg1->op0 : NULL);
1489 if (folded
1490 && TREE_CODE (folded) == NOP_EXPR)
1491 folded = TREE_OPERAND (folded, 0);
1492 if (folded
1493 && is_gimple_min_invariant (folded))
1494 return folded;
1498 /* Simplify reads from constants or constant initializers. */
1499 else if (BITS_PER_UNIT == 8
1500 && ref->type
1501 && COMPLETE_TYPE_P (ref->type)
1502 && is_gimple_reg_type (ref->type))
1504 poly_int64 off = 0;
1505 HOST_WIDE_INT size;
1506 if (INTEGRAL_TYPE_P (ref->type))
1507 size = TYPE_PRECISION (ref->type);
1508 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1509 size = tree_to_shwi (TYPE_SIZE (ref->type));
1510 else
1511 return NULL_TREE;
1512 if (size % BITS_PER_UNIT != 0
1513 || size > MAX_BITSIZE_MODE_ANY_MODE)
1514 return NULL_TREE;
1515 size /= BITS_PER_UNIT;
1516 unsigned i;
1517 for (i = 0; i < operands.length (); ++i)
1519 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1521 ++i;
1522 break;
1524 if (known_eq (operands[i].off, -1))
1525 return NULL_TREE;
1526 off += operands[i].off;
1527 if (operands[i].opcode == MEM_REF)
1529 ++i;
1530 break;
1533 vn_reference_op_t base = &operands[--i];
1534 tree ctor = error_mark_node;
1535 tree decl = NULL_TREE;
1536 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1537 ctor = base->op0;
1538 else if (base->opcode == MEM_REF
1539 && base[1].opcode == ADDR_EXPR
1540 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1541 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1542 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1544 decl = TREE_OPERAND (base[1].op0, 0);
1545 if (TREE_CODE (decl) == STRING_CST)
1546 ctor = decl;
1547 else
1548 ctor = ctor_for_folding (decl);
1550 if (ctor == NULL_TREE)
1551 return build_zero_cst (ref->type);
1552 else if (ctor != error_mark_node)
1554 HOST_WIDE_INT const_off;
1555 if (decl)
1557 tree res = fold_ctor_reference (ref->type, ctor,
1558 off * BITS_PER_UNIT,
1559 size * BITS_PER_UNIT, decl);
1560 if (res)
1562 STRIP_USELESS_TYPE_CONVERSION (res);
1563 if (is_gimple_min_invariant (res))
1564 return res;
1567 else if (off.is_constant (&const_off))
1569 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1570 int len = native_encode_expr (ctor, buf, size, const_off);
1571 if (len > 0)
1572 return native_interpret_expr (ref->type, buf, len);
1577 return NULL_TREE;
1580 /* Return true if OPS contain a storage order barrier. */
1582 static bool
1583 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1585 vn_reference_op_t op;
1586 unsigned i;
1588 FOR_EACH_VEC_ELT (ops, i, op)
1589 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1590 return true;
1592 return false;
1595 /* Return true if OPS represent an access with reverse storage order. */
1597 static bool
1598 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1600 unsigned i = 0;
1601 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1602 ++i;
1603 switch (ops[i].opcode)
1605 case ARRAY_REF:
1606 case COMPONENT_REF:
1607 case BIT_FIELD_REF:
1608 case MEM_REF:
1609 return ops[i].reverse;
1610 default:
1611 return false;
1615 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1616 structures into their value numbers. This is done in-place, and
1617 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1618 whether any operands were valueized. */
1620 static void
1621 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1622 bool with_avail = false)
1624 vn_reference_op_t vro;
1625 unsigned int i;
1627 *valueized_anything = false;
1629 FOR_EACH_VEC_ELT (*orig, i, vro)
1631 if (vro->opcode == SSA_NAME
1632 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1634 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1635 if (tem != vro->op0)
1637 *valueized_anything = true;
1638 vro->op0 = tem;
1640 /* If it transforms from an SSA_NAME to a constant, update
1641 the opcode. */
1642 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1643 vro->opcode = TREE_CODE (vro->op0);
1645 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1647 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1648 if (tem != vro->op1)
1650 *valueized_anything = true;
1651 vro->op1 = tem;
1654 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1656 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1657 if (tem != vro->op2)
1659 *valueized_anything = true;
1660 vro->op2 = tem;
1663 /* If it transforms from an SSA_NAME to an address, fold with
1664 a preceding indirect reference. */
1665 if (i > 0
1666 && vro->op0
1667 && TREE_CODE (vro->op0) == ADDR_EXPR
1668 && (*orig)[i - 1].opcode == MEM_REF)
1670 if (vn_reference_fold_indirect (orig, &i))
1671 *valueized_anything = true;
1673 else if (i > 0
1674 && vro->opcode == SSA_NAME
1675 && (*orig)[i - 1].opcode == MEM_REF)
1677 if (vn_reference_maybe_forwprop_address (orig, &i))
1678 *valueized_anything = true;
1680 /* If it transforms a non-constant ARRAY_REF into a constant
1681 one, adjust the constant offset. */
1682 else if (vro->opcode == ARRAY_REF
1683 && known_eq (vro->off, -1)
1684 && poly_int_tree_p (vro->op0)
1685 && poly_int_tree_p (vro->op1)
1686 && TREE_CODE (vro->op2) == INTEGER_CST)
1688 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1689 - wi::to_poly_offset (vro->op1))
1690 * wi::to_offset (vro->op2)
1691 * vn_ref_op_align_unit (vro));
1692 off.to_shwi (&vro->off);
1697 static void
1698 valueize_refs (vec<vn_reference_op_s> *orig)
1700 bool tem;
1701 valueize_refs_1 (orig, &tem);
1704 static vec<vn_reference_op_s> shared_lookup_references;
1706 /* Create a vector of vn_reference_op_s structures from REF, a
1707 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1708 this function. *VALUEIZED_ANYTHING will specify whether any
1709 operands were valueized. */
1711 static vec<vn_reference_op_s>
1712 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1714 if (!ref)
1715 return vNULL;
1716 shared_lookup_references.truncate (0);
1717 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1718 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1719 return shared_lookup_references;
1722 /* Create a vector of vn_reference_op_s structures from CALL, a
1723 call statement. The vector is shared among all callers of
1724 this function. */
1726 static vec<vn_reference_op_s>
1727 valueize_shared_reference_ops_from_call (gcall *call)
1729 if (!call)
1730 return vNULL;
1731 shared_lookup_references.truncate (0);
1732 copy_reference_ops_from_call (call, &shared_lookup_references);
1733 valueize_refs (&shared_lookup_references);
1734 return shared_lookup_references;
1737 /* Lookup a SCCVN reference operation VR in the current hash table.
1738 Returns the resulting value number if it exists in the hash table,
1739 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1740 vn_reference_t stored in the hashtable if something is found. */
1742 static tree
1743 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1745 vn_reference_s **slot;
1746 hashval_t hash;
1748 hash = vr->hashcode;
1749 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1750 if (slot)
1752 if (vnresult)
1753 *vnresult = (vn_reference_t)*slot;
1754 return ((vn_reference_t)*slot)->result;
1757 return NULL_TREE;
1761 /* Partial definition tracking support. */
1763 struct pd_range
1765 HOST_WIDE_INT offset;
1766 HOST_WIDE_INT size;
1769 struct pd_data
1771 tree rhs;
1772 HOST_WIDE_INT offset;
1773 HOST_WIDE_INT size;
1776 /* Context for alias walking. */
1778 struct vn_walk_cb_data
1780 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1781 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1782 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1783 mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1784 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1785 first_base_set (-2), known_ranges (NULL)
1787 if (!last_vuse_ptr)
1788 last_vuse_ptr = &last_vuse;
1789 ao_ref_init (&orig_ref, orig_ref_);
1790 if (mask)
1792 wide_int w = wi::to_wide (mask);
1793 unsigned int pos = 0, prec = w.get_precision ();
1794 pd_data pd;
1795 pd.rhs = build_constructor (NULL_TREE, NULL);
1796 /* When bitwise and with a constant is done on a memory load,
1797 we don't really need all the bits to be defined or defined
1798 to constants, we don't really care what is in the position
1799 corresponding to 0 bits in the mask.
1800 So, push the ranges of those 0 bits in the mask as artificial
1801 zero stores and let the partial def handling code do the
1802 rest. */
1803 while (pos < prec)
1805 int tz = wi::ctz (w);
1806 if (pos + tz > prec)
1807 tz = prec - pos;
1808 if (tz)
1810 if (BYTES_BIG_ENDIAN)
1811 pd.offset = prec - pos - tz;
1812 else
1813 pd.offset = pos;
1814 pd.size = tz;
1815 void *r = push_partial_def (pd, 0, 0, 0, prec);
1816 gcc_assert (r == NULL_TREE);
1818 pos += tz;
1819 if (pos == prec)
1820 break;
1821 w = wi::lrshift (w, tz);
1822 tz = wi::ctz (wi::bit_not (w));
1823 if (pos + tz > prec)
1824 tz = prec - pos;
1825 pos += tz;
1826 w = wi::lrshift (w, tz);
1830 ~vn_walk_cb_data ();
1831 void *finish (alias_set_type, alias_set_type, tree);
1832 void *push_partial_def (pd_data pd,
1833 alias_set_type, alias_set_type, HOST_WIDE_INT,
1834 HOST_WIDE_INT);
1836 vn_reference_t vr;
1837 ao_ref orig_ref;
1838 tree *last_vuse_ptr;
1839 tree last_vuse;
1840 tree mask;
1841 tree masked_result;
1842 vn_lookup_kind vn_walk_kind;
1843 bool tbaa_p;
1844 vec<vn_reference_op_s> saved_operands;
1846 /* The VDEFs of partial defs we come along. */
1847 auto_vec<pd_data, 2> partial_defs;
1848 /* The first defs range to avoid splay tree setup in most cases. */
1849 pd_range first_range;
1850 alias_set_type first_set;
1851 alias_set_type first_base_set;
1852 splay_tree known_ranges;
1853 obstack ranges_obstack;
1856 vn_walk_cb_data::~vn_walk_cb_data ()
1858 if (known_ranges)
1860 splay_tree_delete (known_ranges);
1861 obstack_free (&ranges_obstack, NULL);
1863 saved_operands.release ();
1866 void *
1867 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1869 if (first_set != -2)
1871 set = first_set;
1872 base_set = first_base_set;
1874 if (mask)
1876 masked_result = val;
1877 return (void *) -1;
1879 vec<vn_reference_op_s> &operands
1880 = saved_operands.exists () ? saved_operands : vr->operands;
1881 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1882 vr->type, operands, val);
1885 /* pd_range splay-tree helpers. */
1887 static int
1888 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1890 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1891 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1892 if (offset1 < offset2)
1893 return -1;
1894 else if (offset1 > offset2)
1895 return 1;
1896 return 0;
1899 static void *
1900 pd_tree_alloc (int size, void *data_)
1902 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1903 return obstack_alloc (&data->ranges_obstack, size);
1906 static void
1907 pd_tree_dealloc (void *, void *)
1911 /* Push PD to the vector of partial definitions returning a
1912 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1913 NULL when we want to continue looking for partial defs or -1
1914 on failure. */
1916 void *
1917 vn_walk_cb_data::push_partial_def (pd_data pd,
1918 alias_set_type set, alias_set_type base_set,
1919 HOST_WIDE_INT offseti,
1920 HOST_WIDE_INT maxsizei)
1922 const HOST_WIDE_INT bufsize = 64;
1923 /* We're using a fixed buffer for encoding so fail early if the object
1924 we want to interpret is bigger. */
1925 if (maxsizei > bufsize * BITS_PER_UNIT
1926 || CHAR_BIT != 8
1927 || BITS_PER_UNIT != 8
1928 /* Not prepared to handle PDP endian. */
1929 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1930 return (void *)-1;
1932 /* Turn too large constant stores into non-constant stores. */
1933 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1934 pd.rhs = error_mark_node;
1936 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1937 most a partial byte before and/or after the region. */
1938 if (!CONSTANT_CLASS_P (pd.rhs))
1940 if (pd.offset < offseti)
1942 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1943 gcc_assert (pd.size > o);
1944 pd.size -= o;
1945 pd.offset += o;
1947 if (pd.size > maxsizei)
1948 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1951 pd.offset -= offseti;
1953 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1954 || CONSTANT_CLASS_P (pd.rhs));
1955 if (partial_defs.is_empty ())
1957 /* If we get a clobber upfront, fail. */
1958 if (TREE_CLOBBER_P (pd.rhs))
1959 return (void *)-1;
1960 if (!pd_constant_p)
1961 return (void *)-1;
1962 partial_defs.safe_push (pd);
1963 first_range.offset = pd.offset;
1964 first_range.size = pd.size;
1965 first_set = set;
1966 first_base_set = base_set;
1967 last_vuse_ptr = NULL;
1968 /* Continue looking for partial defs. */
1969 return NULL;
1972 if (!known_ranges)
1974 /* ??? Optimize the case where the 2nd partial def completes things. */
1975 gcc_obstack_init (&ranges_obstack);
1976 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1977 pd_tree_alloc,
1978 pd_tree_dealloc, this);
1979 splay_tree_insert (known_ranges,
1980 (splay_tree_key)&first_range.offset,
1981 (splay_tree_value)&first_range);
1984 pd_range newr = { pd.offset, pd.size };
1985 splay_tree_node n;
1986 pd_range *r;
1987 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
1988 HOST_WIDE_INT loffset = newr.offset + 1;
1989 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
1990 && ((r = (pd_range *)n->value), true)
1991 && ranges_known_overlap_p (r->offset, r->size + 1,
1992 newr.offset, newr.size))
1994 /* Ignore partial defs already covered. Here we also drop shadowed
1995 clobbers arriving here at the floor. */
1996 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
1997 return NULL;
1998 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2000 else
2002 /* newr.offset wasn't covered yet, insert the range. */
2003 r = XOBNEW (&ranges_obstack, pd_range);
2004 *r = newr;
2005 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2006 (splay_tree_value)r);
2008 /* Merge r which now contains newr and is a member of the splay tree with
2009 adjacent overlapping ranges. */
2010 pd_range *rafter;
2011 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2012 && ((rafter = (pd_range *)n->value), true)
2013 && ranges_known_overlap_p (r->offset, r->size + 1,
2014 rafter->offset, rafter->size))
2016 r->size = MAX (r->offset + r->size,
2017 rafter->offset + rafter->size) - r->offset;
2018 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2020 /* If we get a clobber, fail. */
2021 if (TREE_CLOBBER_P (pd.rhs))
2022 return (void *)-1;
2023 /* Non-constants are OK as long as they are shadowed by a constant. */
2024 if (!pd_constant_p)
2025 return (void *)-1;
2026 partial_defs.safe_push (pd);
2028 /* Now we have merged newr into the range tree. When we have covered
2029 [offseti, sizei] then the tree will contain exactly one node which has
2030 the desired properties and it will be 'r'. */
2031 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2032 /* Continue looking for partial defs. */
2033 return NULL;
2035 /* Now simply native encode all partial defs in reverse order. */
2036 unsigned ndefs = partial_defs.length ();
2037 /* We support up to 512-bit values (for V8DFmode). */
2038 unsigned char buffer[bufsize + 1];
2039 unsigned char this_buffer[bufsize + 1];
2040 int len;
2042 memset (buffer, 0, bufsize + 1);
2043 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2044 while (!partial_defs.is_empty ())
2046 pd_data pd = partial_defs.pop ();
2047 unsigned int amnt;
2048 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2050 /* Empty CONSTRUCTOR. */
2051 if (pd.size >= needed_len * BITS_PER_UNIT)
2052 len = needed_len;
2053 else
2054 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2055 memset (this_buffer, 0, len);
2057 else
2059 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2060 MAX (0, -pd.offset) / BITS_PER_UNIT);
2061 if (len <= 0
2062 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2063 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2065 if (dump_file && (dump_flags & TDF_DETAILS))
2066 fprintf (dump_file, "Failed to encode %u "
2067 "partial definitions\n", ndefs);
2068 return (void *)-1;
2072 unsigned char *p = buffer;
2073 HOST_WIDE_INT size = pd.size;
2074 if (pd.offset < 0)
2075 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2076 this_buffer[len] = 0;
2077 if (BYTES_BIG_ENDIAN)
2079 /* LSB of this_buffer[len - 1] byte should be at
2080 pd.offset + pd.size - 1 bits in buffer. */
2081 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2082 + pd.size) % BITS_PER_UNIT;
2083 if (amnt)
2084 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2085 unsigned char *q = this_buffer;
2086 unsigned int off = 0;
2087 if (pd.offset >= 0)
2089 unsigned int msk;
2090 off = pd.offset / BITS_PER_UNIT;
2091 gcc_assert (off < needed_len);
2092 p = buffer + off;
2093 if (size <= amnt)
2095 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2096 *p = (*p & ~msk) | (this_buffer[len] & msk);
2097 size = 0;
2099 else
2101 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2102 q = (this_buffer + len
2103 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2104 / BITS_PER_UNIT));
2105 if (pd.offset % BITS_PER_UNIT)
2107 msk = -1U << (BITS_PER_UNIT
2108 - (pd.offset % BITS_PER_UNIT));
2109 *p = (*p & msk) | (*q & ~msk);
2110 p++;
2111 q++;
2112 off++;
2113 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2114 gcc_assert (size >= 0);
2118 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2120 q = (this_buffer + len
2121 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2122 / BITS_PER_UNIT));
2123 if (pd.offset % BITS_PER_UNIT)
2125 q++;
2126 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2127 % BITS_PER_UNIT);
2128 gcc_assert (size >= 0);
2131 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2132 > needed_len)
2133 size = (needed_len - off) * BITS_PER_UNIT;
2134 memcpy (p, q, size / BITS_PER_UNIT);
2135 if (size % BITS_PER_UNIT)
2137 unsigned int msk
2138 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2139 p += size / BITS_PER_UNIT;
2140 q += size / BITS_PER_UNIT;
2141 *p = (*q & msk) | (*p & ~msk);
2144 else
2146 if (pd.offset >= 0)
2148 /* LSB of this_buffer[0] byte should be at pd.offset bits
2149 in buffer. */
2150 unsigned int msk;
2151 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2152 amnt = pd.offset % BITS_PER_UNIT;
2153 if (amnt)
2154 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2155 unsigned int off = pd.offset / BITS_PER_UNIT;
2156 gcc_assert (off < needed_len);
2157 size = MIN (size,
2158 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2159 p = buffer + off;
2160 if (amnt + size < BITS_PER_UNIT)
2162 /* Low amnt bits come from *p, then size bits
2163 from this_buffer[0] and the remaining again from
2164 *p. */
2165 msk = ((1 << size) - 1) << amnt;
2166 *p = (*p & ~msk) | (this_buffer[0] & msk);
2167 size = 0;
2169 else if (amnt)
2171 msk = -1U << amnt;
2172 *p = (*p & ~msk) | (this_buffer[0] & msk);
2173 p++;
2174 size -= (BITS_PER_UNIT - amnt);
2177 else
2179 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2180 if (amnt)
2181 size -= BITS_PER_UNIT - amnt;
2182 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2183 if (amnt)
2184 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2186 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2187 p += size / BITS_PER_UNIT;
2188 if (size % BITS_PER_UNIT)
2190 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2191 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2192 & ~msk) | (*p & msk);
2197 tree type = vr->type;
2198 /* Make sure to interpret in a type that has a range covering the whole
2199 access size. */
2200 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2201 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2202 tree val;
2203 if (BYTES_BIG_ENDIAN)
2205 unsigned sz = needed_len;
2206 if (maxsizei % BITS_PER_UNIT)
2207 shift_bytes_in_array_right (buffer, needed_len,
2208 BITS_PER_UNIT
2209 - (maxsizei % BITS_PER_UNIT));
2210 if (INTEGRAL_TYPE_P (type))
2211 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2212 if (sz > needed_len)
2214 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2215 val = native_interpret_expr (type, this_buffer, sz);
2217 else
2218 val = native_interpret_expr (type, buffer, needed_len);
2220 else
2221 val = native_interpret_expr (type, buffer, bufsize);
2222 /* If we chop off bits because the types precision doesn't match the memory
2223 access size this is ok when optimizing reads but not when called from
2224 the DSE code during elimination. */
2225 if (val && type != vr->type)
2227 if (! int_fits_type_p (val, vr->type))
2228 val = NULL_TREE;
2229 else
2230 val = fold_convert (vr->type, val);
2233 if (val)
2235 if (dump_file && (dump_flags & TDF_DETAILS))
2236 fprintf (dump_file,
2237 "Successfully combined %u partial definitions\n", ndefs);
2238 /* We are using the alias-set of the first store we encounter which
2239 should be appropriate here. */
2240 return finish (first_set, first_base_set, val);
2242 else
2244 if (dump_file && (dump_flags & TDF_DETAILS))
2245 fprintf (dump_file,
2246 "Failed to interpret %u encoded partial definitions\n", ndefs);
2247 return (void *)-1;
2251 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2252 with the current VUSE and performs the expression lookup. */
2254 static void *
2255 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2257 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2258 vn_reference_t vr = data->vr;
2259 vn_reference_s **slot;
2260 hashval_t hash;
2262 /* If we have partial definitions recorded we have to go through
2263 vn_reference_lookup_3. */
2264 if (!data->partial_defs.is_empty ())
2265 return NULL;
2267 if (data->last_vuse_ptr)
2269 *data->last_vuse_ptr = vuse;
2270 data->last_vuse = vuse;
2273 /* Fixup vuse and hash. */
2274 if (vr->vuse)
2275 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2276 vr->vuse = vuse_ssa_val (vuse);
2277 if (vr->vuse)
2278 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2280 hash = vr->hashcode;
2281 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2282 if (slot)
2284 if ((*slot)->result && data->saved_operands.exists ())
2285 return data->finish (vr->set, vr->base_set, (*slot)->result);
2286 return *slot;
2289 return NULL;
2292 /* Lookup an existing or insert a new vn_reference entry into the
2293 value table for the VUSE, SET, TYPE, OPERANDS reference which
2294 has the value VALUE which is either a constant or an SSA name. */
2296 static vn_reference_t
2297 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2298 alias_set_type set,
2299 alias_set_type base_set,
2300 tree type,
2301 vec<vn_reference_op_s,
2302 va_heap> operands,
2303 tree value)
2305 vn_reference_s vr1;
2306 vn_reference_t result;
2307 unsigned value_id;
2308 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2309 vr1.operands = operands;
2310 vr1.type = type;
2311 vr1.set = set;
2312 vr1.base_set = base_set;
2313 vr1.hashcode = vn_reference_compute_hash (&vr1);
2314 if (vn_reference_lookup_1 (&vr1, &result))
2315 return result;
2316 if (TREE_CODE (value) == SSA_NAME)
2317 value_id = VN_INFO (value)->value_id;
2318 else
2319 value_id = get_or_alloc_constant_value_id (value);
2320 return vn_reference_insert_pieces (vuse, set, base_set, type,
2321 operands.copy (), value, value_id);
2324 /* Return a value-number for RCODE OPS... either by looking up an existing
2325 value-number for the simplified result or by inserting the operation if
2326 INSERT is true. */
2328 static tree
2329 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert)
2331 tree result = NULL_TREE;
2332 /* We will be creating a value number for
2333 RCODE (OPS...).
2334 So first simplify and lookup this expression to see if it
2335 is already available. */
2336 /* For simplification valueize. */
2337 unsigned i;
2338 for (i = 0; i < res_op->num_ops; ++i)
2339 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2341 tree tem = vn_valueize (res_op->ops[i]);
2342 if (!tem)
2343 break;
2344 res_op->ops[i] = tem;
2346 /* If valueization of an operand fails (it is not available), skip
2347 simplification. */
2348 bool res = false;
2349 if (i == res_op->num_ops)
2351 mprts_hook = vn_lookup_simplify_result;
2352 res = res_op->resimplify (NULL, vn_valueize);
2353 mprts_hook = NULL;
2355 gimple *new_stmt = NULL;
2356 if (res
2357 && gimple_simplified_result_is_gimple_val (res_op))
2359 /* The expression is already available. */
2360 result = res_op->ops[0];
2361 /* Valueize it, simplification returns sth in AVAIL only. */
2362 if (TREE_CODE (result) == SSA_NAME)
2363 result = SSA_VAL (result);
2365 else
2367 tree val = vn_lookup_simplify_result (res_op);
2368 if (!val && insert)
2370 gimple_seq stmts = NULL;
2371 result = maybe_push_res_to_seq (res_op, &stmts);
2372 if (result)
2374 gcc_assert (gimple_seq_singleton_p (stmts));
2375 new_stmt = gimple_seq_first_stmt (stmts);
2378 else
2379 /* The expression is already available. */
2380 result = val;
2382 if (new_stmt)
2384 /* The expression is not yet available, value-number lhs to
2385 the new SSA_NAME we created. */
2386 /* Initialize value-number information properly. */
2387 vn_ssa_aux_t result_info = VN_INFO (result);
2388 result_info->valnum = result;
2389 result_info->value_id = get_next_value_id ();
2390 result_info->visited = 1;
2391 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2392 new_stmt);
2393 result_info->needs_insertion = true;
2394 /* ??? PRE phi-translation inserts NARYs without corresponding
2395 SSA name result. Re-use those but set their result according
2396 to the stmt we just built. */
2397 vn_nary_op_t nary = NULL;
2398 vn_nary_op_lookup_stmt (new_stmt, &nary);
2399 if (nary)
2401 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2402 nary->u.result = gimple_assign_lhs (new_stmt);
2404 /* As all "inserted" statements are singleton SCCs, insert
2405 to the valid table. This is strictly needed to
2406 avoid re-generating new value SSA_NAMEs for the same
2407 expression during SCC iteration over and over (the
2408 optimistic table gets cleared after each iteration).
2409 We do not need to insert into the optimistic table, as
2410 lookups there will fall back to the valid table. */
2411 else
2413 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2414 vn_nary_op_t vno1
2415 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2416 vno1->value_id = result_info->value_id;
2417 vno1->length = length;
2418 vno1->predicated_values = 0;
2419 vno1->u.result = result;
2420 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2421 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2422 /* Also do not link it into the undo chain. */
2423 last_inserted_nary = vno1->next;
2424 vno1->next = (vn_nary_op_t)(void *)-1;
2426 if (dump_file && (dump_flags & TDF_DETAILS))
2428 fprintf (dump_file, "Inserting name ");
2429 print_generic_expr (dump_file, result);
2430 fprintf (dump_file, " for expression ");
2431 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2432 fprintf (dump_file, "\n");
2435 return result;
2438 /* Return a value-number for RCODE OPS... either by looking up an existing
2439 value-number for the simplified result or by inserting the operation. */
2441 static tree
2442 vn_nary_build_or_lookup (gimple_match_op *res_op)
2444 return vn_nary_build_or_lookup_1 (res_op, true);
2447 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2448 its value if present. */
2450 tree
2451 vn_nary_simplify (vn_nary_op_t nary)
2453 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2454 return NULL_TREE;
2455 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2456 nary->type, nary->length);
2457 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2458 return vn_nary_build_or_lookup_1 (&op, false);
2461 /* Elimination engine. */
2463 class eliminate_dom_walker : public dom_walker
2465 public:
2466 eliminate_dom_walker (cdi_direction, bitmap);
2467 ~eliminate_dom_walker ();
2469 virtual edge before_dom_children (basic_block);
2470 virtual void after_dom_children (basic_block);
2472 virtual tree eliminate_avail (basic_block, tree op);
2473 virtual void eliminate_push_avail (basic_block, tree op);
2474 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2476 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2478 unsigned eliminate_cleanup (bool region_p = false);
2480 bool do_pre;
2481 unsigned int el_todo;
2482 unsigned int eliminations;
2483 unsigned int insertions;
2485 /* SSA names that had their defs inserted by PRE if do_pre. */
2486 bitmap inserted_exprs;
2488 /* Blocks with statements that have had their EH properties changed. */
2489 bitmap need_eh_cleanup;
2491 /* Blocks with statements that have had their AB properties changed. */
2492 bitmap need_ab_cleanup;
2494 /* Local state for the eliminate domwalk. */
2495 auto_vec<gimple *> to_remove;
2496 auto_vec<gimple *> to_fixup;
2497 auto_vec<tree> avail;
2498 auto_vec<tree> avail_stack;
2501 /* Adaptor to the elimination engine using RPO availability. */
2503 class rpo_elim : public eliminate_dom_walker
2505 public:
2506 rpo_elim(basic_block entry_)
2507 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2508 m_avail_freelist (NULL) {}
2510 virtual tree eliminate_avail (basic_block, tree op);
2512 virtual void eliminate_push_avail (basic_block, tree);
2514 basic_block entry;
2515 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2516 obstack. */
2517 vn_avail *m_avail_freelist;
2520 /* Global RPO state for access from hooks. */
2521 static eliminate_dom_walker *rpo_avail;
2522 basic_block vn_context_bb;
2524 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2525 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2526 Otherwise return false. */
2528 static bool
2529 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2530 tree base2, poly_int64 *offset2)
2532 poly_int64 soff;
2533 if (TREE_CODE (base1) == MEM_REF
2534 && TREE_CODE (base2) == MEM_REF)
2536 if (mem_ref_offset (base1).to_shwi (&soff))
2538 base1 = TREE_OPERAND (base1, 0);
2539 *offset1 += soff * BITS_PER_UNIT;
2541 if (mem_ref_offset (base2).to_shwi (&soff))
2543 base2 = TREE_OPERAND (base2, 0);
2544 *offset2 += soff * BITS_PER_UNIT;
2546 return operand_equal_p (base1, base2, 0);
2548 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2551 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2552 from the statement defining VUSE and if not successful tries to
2553 translate *REFP and VR_ through an aggregate copy at the definition
2554 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2555 of *REF and *VR. If only disambiguation was performed then
2556 *DISAMBIGUATE_ONLY is set to true. */
2558 static void *
2559 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2560 translate_flags *disambiguate_only)
2562 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2563 vn_reference_t vr = data->vr;
2564 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2565 tree base = ao_ref_base (ref);
2566 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2567 static vec<vn_reference_op_s> lhs_ops;
2568 ao_ref lhs_ref;
2569 bool lhs_ref_ok = false;
2570 poly_int64 copy_size;
2572 /* First try to disambiguate after value-replacing in the definitions LHS. */
2573 if (is_gimple_assign (def_stmt))
2575 tree lhs = gimple_assign_lhs (def_stmt);
2576 bool valueized_anything = false;
2577 /* Avoid re-allocation overhead. */
2578 lhs_ops.truncate (0);
2579 basic_block saved_rpo_bb = vn_context_bb;
2580 vn_context_bb = gimple_bb (def_stmt);
2581 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2583 copy_reference_ops_from_ref (lhs, &lhs_ops);
2584 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2586 vn_context_bb = saved_rpo_bb;
2587 ao_ref_init (&lhs_ref, lhs);
2588 lhs_ref_ok = true;
2589 if (valueized_anything
2590 && ao_ref_init_from_vn_reference
2591 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2592 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2593 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2595 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2596 return NULL;
2599 /* Besides valueizing the LHS we can also use access-path based
2600 disambiguation on the original non-valueized ref. */
2601 if (!ref->ref
2602 && lhs_ref_ok
2603 && data->orig_ref.ref)
2605 /* We want to use the non-valueized LHS for this, but avoid redundant
2606 work. */
2607 ao_ref *lref = &lhs_ref;
2608 ao_ref lref_alt;
2609 if (valueized_anything)
2611 ao_ref_init (&lref_alt, lhs);
2612 lref = &lref_alt;
2614 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2616 *disambiguate_only = (valueized_anything
2617 ? TR_VALUEIZE_AND_DISAMBIGUATE
2618 : TR_DISAMBIGUATE);
2619 return NULL;
2623 /* If we reach a clobbering statement try to skip it and see if
2624 we find a VN result with exactly the same value as the
2625 possible clobber. In this case we can ignore the clobber
2626 and return the found value. */
2627 if (is_gimple_reg_type (TREE_TYPE (lhs))
2628 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2629 && (ref->ref || data->orig_ref.ref))
2631 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2632 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2633 data->last_vuse_ptr = NULL;
2634 tree saved_vuse = vr->vuse;
2635 hashval_t saved_hashcode = vr->hashcode;
2636 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2637 /* Need to restore vr->vuse and vr->hashcode. */
2638 vr->vuse = saved_vuse;
2639 vr->hashcode = saved_hashcode;
2640 data->last_vuse_ptr = saved_last_vuse_ptr;
2641 if (res && res != (void *)-1)
2643 vn_reference_t vnresult = (vn_reference_t) res;
2644 tree rhs = gimple_assign_rhs1 (def_stmt);
2645 if (TREE_CODE (rhs) == SSA_NAME)
2646 rhs = SSA_VAL (rhs);
2647 if (vnresult->result
2648 && operand_equal_p (vnresult->result, rhs, 0)
2649 /* We have to honor our promise about union type punning
2650 and also support arbitrary overlaps with
2651 -fno-strict-aliasing. So simply resort to alignment to
2652 rule out overlaps. Do this check last because it is
2653 quite expensive compared to the hash-lookup above. */
2654 && multiple_p (get_object_alignment
2655 (ref->ref ? ref->ref : data->orig_ref.ref),
2656 ref->size)
2657 && multiple_p (get_object_alignment (lhs), ref->size))
2658 return res;
2662 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2663 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2664 && gimple_call_num_args (def_stmt) <= 4)
2666 /* For builtin calls valueize its arguments and call the
2667 alias oracle again. Valueization may improve points-to
2668 info of pointers and constify size and position arguments.
2669 Originally this was motivated by PR61034 which has
2670 conditional calls to free falsely clobbering ref because
2671 of imprecise points-to info of the argument. */
2672 tree oldargs[4];
2673 bool valueized_anything = false;
2674 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2676 oldargs[i] = gimple_call_arg (def_stmt, i);
2677 tree val = vn_valueize (oldargs[i]);
2678 if (val != oldargs[i])
2680 gimple_call_set_arg (def_stmt, i, val);
2681 valueized_anything = true;
2684 if (valueized_anything)
2686 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2687 ref, data->tbaa_p);
2688 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2689 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2690 if (!res)
2692 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2693 return NULL;
2698 if (*disambiguate_only > TR_TRANSLATE)
2699 return (void *)-1;
2701 /* If we cannot constrain the size of the reference we cannot
2702 test if anything kills it. */
2703 if (!ref->max_size_known_p ())
2704 return (void *)-1;
2706 poly_int64 offset = ref->offset;
2707 poly_int64 maxsize = ref->max_size;
2709 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2710 from that definition.
2711 1) Memset. */
2712 if (is_gimple_reg_type (vr->type)
2713 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2714 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2715 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2716 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2717 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2718 && CHAR_BIT == 8
2719 && BITS_PER_UNIT == 8
2720 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2721 && offset.is_constant (&offseti)
2722 && ref->size.is_constant (&sizei)
2723 && (offseti % BITS_PER_UNIT == 0
2724 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2725 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2726 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2727 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2728 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2729 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2731 tree base2;
2732 poly_int64 offset2, size2, maxsize2;
2733 bool reverse;
2734 tree ref2 = gimple_call_arg (def_stmt, 0);
2735 if (TREE_CODE (ref2) == SSA_NAME)
2737 ref2 = SSA_VAL (ref2);
2738 if (TREE_CODE (ref2) == SSA_NAME
2739 && (TREE_CODE (base) != MEM_REF
2740 || TREE_OPERAND (base, 0) != ref2))
2742 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2743 if (gimple_assign_single_p (def_stmt)
2744 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2745 ref2 = gimple_assign_rhs1 (def_stmt);
2748 if (TREE_CODE (ref2) == ADDR_EXPR)
2750 ref2 = TREE_OPERAND (ref2, 0);
2751 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2752 &reverse);
2753 if (!known_size_p (maxsize2)
2754 || !known_eq (maxsize2, size2)
2755 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2756 return (void *)-1;
2758 else if (TREE_CODE (ref2) == SSA_NAME)
2760 poly_int64 soff;
2761 if (TREE_CODE (base) != MEM_REF
2762 || !(mem_ref_offset (base)
2763 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2764 return (void *)-1;
2765 offset += soff;
2766 offset2 = 0;
2767 if (TREE_OPERAND (base, 0) != ref2)
2769 gimple *def = SSA_NAME_DEF_STMT (ref2);
2770 if (is_gimple_assign (def)
2771 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2772 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2773 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2775 tree rhs2 = gimple_assign_rhs2 (def);
2776 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2777 SIGNED)
2778 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2779 return (void *)-1;
2780 ref2 = gimple_assign_rhs1 (def);
2781 if (TREE_CODE (ref2) == SSA_NAME)
2782 ref2 = SSA_VAL (ref2);
2784 else
2785 return (void *)-1;
2788 else
2789 return (void *)-1;
2790 tree len = gimple_call_arg (def_stmt, 2);
2791 HOST_WIDE_INT leni, offset2i;
2792 if (TREE_CODE (len) == SSA_NAME)
2793 len = SSA_VAL (len);
2794 /* Sometimes the above trickery is smarter than alias analysis. Take
2795 advantage of that. */
2796 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2797 (wi::to_poly_offset (len)
2798 << LOG2_BITS_PER_UNIT)))
2799 return NULL;
2800 if (data->partial_defs.is_empty ()
2801 && known_subrange_p (offset, maxsize, offset2,
2802 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2804 tree val;
2805 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2806 val = build_zero_cst (vr->type);
2807 else if (INTEGRAL_TYPE_P (vr->type)
2808 && known_eq (ref->size, 8)
2809 && offseti % BITS_PER_UNIT == 0)
2811 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2812 vr->type, gimple_call_arg (def_stmt, 1));
2813 val = vn_nary_build_or_lookup (&res_op);
2814 if (!val
2815 || (TREE_CODE (val) == SSA_NAME
2816 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2817 return (void *)-1;
2819 else
2821 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2822 if (INTEGRAL_TYPE_P (vr->type))
2823 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2824 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2825 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2826 buflen);
2827 if (BYTES_BIG_ENDIAN)
2829 unsigned int amnt
2830 = (((unsigned HOST_WIDE_INT) offseti + sizei)
2831 % BITS_PER_UNIT);
2832 if (amnt)
2834 shift_bytes_in_array_right (buf, buflen,
2835 BITS_PER_UNIT - amnt);
2836 buf++;
2837 buflen--;
2840 else if (offseti % BITS_PER_UNIT != 0)
2842 unsigned int amnt
2843 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2844 % BITS_PER_UNIT);
2845 shift_bytes_in_array_left (buf, buflen, amnt);
2846 buf++;
2847 buflen--;
2849 val = native_interpret_expr (vr->type, buf, buflen);
2850 if (!val)
2851 return (void *)-1;
2853 return data->finish (0, 0, val);
2855 /* For now handle clearing memory with partial defs. */
2856 else if (known_eq (ref->size, maxsize)
2857 && integer_zerop (gimple_call_arg (def_stmt, 1))
2858 && tree_fits_poly_int64_p (len)
2859 && tree_to_poly_int64 (len).is_constant (&leni)
2860 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2861 && offset.is_constant (&offseti)
2862 && offset2.is_constant (&offset2i)
2863 && maxsize.is_constant (&maxsizei)
2864 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2865 leni << LOG2_BITS_PER_UNIT))
2867 pd_data pd;
2868 pd.rhs = build_constructor (NULL_TREE, NULL);
2869 pd.offset = offset2i;
2870 pd.size = leni << LOG2_BITS_PER_UNIT;
2871 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2875 /* 2) Assignment from an empty CONSTRUCTOR. */
2876 else if (is_gimple_reg_type (vr->type)
2877 && gimple_assign_single_p (def_stmt)
2878 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2879 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2881 tree base2;
2882 poly_int64 offset2, size2, maxsize2;
2883 HOST_WIDE_INT offset2i, size2i;
2884 gcc_assert (lhs_ref_ok);
2885 base2 = ao_ref_base (&lhs_ref);
2886 offset2 = lhs_ref.offset;
2887 size2 = lhs_ref.size;
2888 maxsize2 = lhs_ref.max_size;
2889 if (known_size_p (maxsize2)
2890 && known_eq (maxsize2, size2)
2891 && adjust_offsets_for_equal_base_address (base, &offset,
2892 base2, &offset2))
2894 if (data->partial_defs.is_empty ()
2895 && known_subrange_p (offset, maxsize, offset2, size2))
2897 /* While technically undefined behavior do not optimize
2898 a full read from a clobber. */
2899 if (gimple_clobber_p (def_stmt))
2900 return (void *)-1;
2901 tree val = build_zero_cst (vr->type);
2902 return data->finish (ao_ref_alias_set (&lhs_ref),
2903 ao_ref_base_alias_set (&lhs_ref), val);
2905 else if (known_eq (ref->size, maxsize)
2906 && maxsize.is_constant (&maxsizei)
2907 && offset.is_constant (&offseti)
2908 && offset2.is_constant (&offset2i)
2909 && size2.is_constant (&size2i)
2910 && ranges_known_overlap_p (offseti, maxsizei,
2911 offset2i, size2i))
2913 /* Let clobbers be consumed by the partial-def tracker
2914 which can choose to ignore them if they are shadowed
2915 by a later def. */
2916 pd_data pd;
2917 pd.rhs = gimple_assign_rhs1 (def_stmt);
2918 pd.offset = offset2i;
2919 pd.size = size2i;
2920 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2921 ao_ref_base_alias_set (&lhs_ref),
2922 offseti, maxsizei);
2927 /* 3) Assignment from a constant. We can use folds native encode/interpret
2928 routines to extract the assigned bits. */
2929 else if (known_eq (ref->size, maxsize)
2930 && is_gimple_reg_type (vr->type)
2931 && !reverse_storage_order_for_component_p (vr->operands)
2932 && !contains_storage_order_barrier_p (vr->operands)
2933 && gimple_assign_single_p (def_stmt)
2934 && CHAR_BIT == 8
2935 && BITS_PER_UNIT == 8
2936 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2937 /* native_encode and native_decode operate on arrays of bytes
2938 and so fundamentally need a compile-time size and offset. */
2939 && maxsize.is_constant (&maxsizei)
2940 && offset.is_constant (&offseti)
2941 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2942 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2943 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2945 tree lhs = gimple_assign_lhs (def_stmt);
2946 tree base2;
2947 poly_int64 offset2, size2, maxsize2;
2948 HOST_WIDE_INT offset2i, size2i;
2949 bool reverse;
2950 gcc_assert (lhs_ref_ok);
2951 base2 = ao_ref_base (&lhs_ref);
2952 offset2 = lhs_ref.offset;
2953 size2 = lhs_ref.size;
2954 maxsize2 = lhs_ref.max_size;
2955 reverse = reverse_storage_order_for_component_p (lhs);
2956 if (base2
2957 && !reverse
2958 && !storage_order_barrier_p (lhs)
2959 && known_eq (maxsize2, size2)
2960 && adjust_offsets_for_equal_base_address (base, &offset,
2961 base2, &offset2)
2962 && offset.is_constant (&offseti)
2963 && offset2.is_constant (&offset2i)
2964 && size2.is_constant (&size2i))
2966 if (data->partial_defs.is_empty ()
2967 && known_subrange_p (offseti, maxsizei, offset2, size2))
2969 /* We support up to 512-bit values (for V8DFmode). */
2970 unsigned char buffer[65];
2971 int len;
2973 tree rhs = gimple_assign_rhs1 (def_stmt);
2974 if (TREE_CODE (rhs) == SSA_NAME)
2975 rhs = SSA_VAL (rhs);
2976 len = native_encode_expr (rhs,
2977 buffer, sizeof (buffer) - 1,
2978 (offseti - offset2i) / BITS_PER_UNIT);
2979 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
2981 tree type = vr->type;
2982 unsigned char *buf = buffer;
2983 unsigned int amnt = 0;
2984 /* Make sure to interpret in a type that has a range
2985 covering the whole access size. */
2986 if (INTEGRAL_TYPE_P (vr->type)
2987 && maxsizei != TYPE_PRECISION (vr->type))
2988 type = build_nonstandard_integer_type (maxsizei,
2989 TYPE_UNSIGNED (type));
2990 if (BYTES_BIG_ENDIAN)
2992 /* For big-endian native_encode_expr stored the rhs
2993 such that the LSB of it is the LSB of buffer[len - 1].
2994 That bit is stored into memory at position
2995 offset2 + size2 - 1, i.e. in byte
2996 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
2997 E.g. for offset2 1 and size2 14, rhs -1 and memory
2998 previously cleared that is:
3000 01111111|11111110
3001 Now, if we want to extract offset 2 and size 12 from
3002 it using native_interpret_expr (which actually works
3003 for integral bitfield types in terms of byte size of
3004 the mode), the native_encode_expr stored the value
3005 into buffer as
3006 XX111111|11111111
3007 and returned len 2 (the X bits are outside of
3008 precision).
3009 Let sz be maxsize / BITS_PER_UNIT if not extracting
3010 a bitfield, and GET_MODE_SIZE otherwise.
3011 We need to align the LSB of the value we want to
3012 extract as the LSB of buf[sz - 1].
3013 The LSB from memory we need to read is at position
3014 offset + maxsize - 1. */
3015 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3016 if (INTEGRAL_TYPE_P (type))
3017 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3018 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3019 - offseti - maxsizei) % BITS_PER_UNIT;
3020 if (amnt)
3021 shift_bytes_in_array_right (buffer, len, amnt);
3022 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3023 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3024 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3025 len = 0;
3026 else
3028 buf = buffer + len - sz - amnt;
3029 len -= (buf - buffer);
3032 else
3034 amnt = ((unsigned HOST_WIDE_INT) offset2i
3035 - offseti) % BITS_PER_UNIT;
3036 if (amnt)
3038 buffer[len] = 0;
3039 shift_bytes_in_array_left (buffer, len + 1, amnt);
3040 buf = buffer + 1;
3043 tree val = native_interpret_expr (type, buf, len);
3044 /* If we chop off bits because the types precision doesn't
3045 match the memory access size this is ok when optimizing
3046 reads but not when called from the DSE code during
3047 elimination. */
3048 if (val
3049 && type != vr->type)
3051 if (! int_fits_type_p (val, vr->type))
3052 val = NULL_TREE;
3053 else
3054 val = fold_convert (vr->type, val);
3057 if (val)
3058 return data->finish (ao_ref_alias_set (&lhs_ref),
3059 ao_ref_base_alias_set (&lhs_ref), val);
3062 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3063 size2i))
3065 pd_data pd;
3066 tree rhs = gimple_assign_rhs1 (def_stmt);
3067 if (TREE_CODE (rhs) == SSA_NAME)
3068 rhs = SSA_VAL (rhs);
3069 pd.rhs = rhs;
3070 pd.offset = offset2i;
3071 pd.size = size2i;
3072 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3073 ao_ref_base_alias_set (&lhs_ref),
3074 offseti, maxsizei);
3079 /* 4) Assignment from an SSA name which definition we may be able
3080 to access pieces from or we can combine to a larger entity. */
3081 else if (known_eq (ref->size, maxsize)
3082 && is_gimple_reg_type (vr->type)
3083 && !reverse_storage_order_for_component_p (vr->operands)
3084 && !contains_storage_order_barrier_p (vr->operands)
3085 && gimple_assign_single_p (def_stmt)
3086 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3088 tree lhs = gimple_assign_lhs (def_stmt);
3089 tree base2;
3090 poly_int64 offset2, size2, maxsize2;
3091 HOST_WIDE_INT offset2i, size2i, offseti;
3092 bool reverse;
3093 gcc_assert (lhs_ref_ok);
3094 base2 = ao_ref_base (&lhs_ref);
3095 offset2 = lhs_ref.offset;
3096 size2 = lhs_ref.size;
3097 maxsize2 = lhs_ref.max_size;
3098 reverse = reverse_storage_order_for_component_p (lhs);
3099 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3100 if (!reverse
3101 && !storage_order_barrier_p (lhs)
3102 && known_size_p (maxsize2)
3103 && known_eq (maxsize2, size2)
3104 && adjust_offsets_for_equal_base_address (base, &offset,
3105 base2, &offset2))
3107 if (data->partial_defs.is_empty ()
3108 && known_subrange_p (offset, maxsize, offset2, size2)
3109 /* ??? We can't handle bitfield precision extracts without
3110 either using an alternate type for the BIT_FIELD_REF and
3111 then doing a conversion or possibly adjusting the offset
3112 according to endianness. */
3113 && (! INTEGRAL_TYPE_P (vr->type)
3114 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3115 && multiple_p (ref->size, BITS_PER_UNIT))
3117 tree val = NULL_TREE;
3118 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3119 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3121 gimple_match_op op (gimple_match_cond::UNCOND,
3122 BIT_FIELD_REF, vr->type,
3123 SSA_VAL (def_rhs),
3124 bitsize_int (ref->size),
3125 bitsize_int (offset - offset2));
3126 val = vn_nary_build_or_lookup (&op);
3128 else if (known_eq (ref->size, size2))
3130 gimple_match_op op (gimple_match_cond::UNCOND,
3131 VIEW_CONVERT_EXPR, vr->type,
3132 SSA_VAL (def_rhs));
3133 val = vn_nary_build_or_lookup (&op);
3135 if (val
3136 && (TREE_CODE (val) != SSA_NAME
3137 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3138 return data->finish (ao_ref_alias_set (&lhs_ref),
3139 ao_ref_base_alias_set (&lhs_ref), val);
3141 else if (maxsize.is_constant (&maxsizei)
3142 && offset.is_constant (&offseti)
3143 && offset2.is_constant (&offset2i)
3144 && size2.is_constant (&size2i)
3145 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3147 pd_data pd;
3148 pd.rhs = SSA_VAL (def_rhs);
3149 pd.offset = offset2i;
3150 pd.size = size2i;
3151 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3152 ao_ref_base_alias_set (&lhs_ref),
3153 offseti, maxsizei);
3158 /* 5) For aggregate copies translate the reference through them if
3159 the copy kills ref. */
3160 else if (data->vn_walk_kind == VN_WALKREWRITE
3161 && gimple_assign_single_p (def_stmt)
3162 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3163 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3164 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3166 tree base2;
3167 int i, j, k;
3168 auto_vec<vn_reference_op_s> rhs;
3169 vn_reference_op_t vro;
3170 ao_ref r;
3172 gcc_assert (lhs_ref_ok);
3174 /* See if the assignment kills REF. */
3175 base2 = ao_ref_base (&lhs_ref);
3176 if (!lhs_ref.max_size_known_p ()
3177 || (base != base2
3178 && (TREE_CODE (base) != MEM_REF
3179 || TREE_CODE (base2) != MEM_REF
3180 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3181 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3182 TREE_OPERAND (base2, 1))))
3183 || !stmt_kills_ref_p (def_stmt, ref))
3184 return (void *)-1;
3186 /* Find the common base of ref and the lhs. lhs_ops already
3187 contains valueized operands for the lhs. */
3188 i = vr->operands.length () - 1;
3189 j = lhs_ops.length () - 1;
3190 while (j >= 0 && i >= 0
3191 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3193 i--;
3194 j--;
3197 /* ??? The innermost op should always be a MEM_REF and we already
3198 checked that the assignment to the lhs kills vr. Thus for
3199 aggregate copies using char[] types the vn_reference_op_eq
3200 may fail when comparing types for compatibility. But we really
3201 don't care here - further lookups with the rewritten operands
3202 will simply fail if we messed up types too badly. */
3203 poly_int64 extra_off = 0;
3204 if (j == 0 && i >= 0
3205 && lhs_ops[0].opcode == MEM_REF
3206 && maybe_ne (lhs_ops[0].off, -1))
3208 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3209 i--, j--;
3210 else if (vr->operands[i].opcode == MEM_REF
3211 && maybe_ne (vr->operands[i].off, -1))
3213 extra_off = vr->operands[i].off - lhs_ops[0].off;
3214 i--, j--;
3218 /* i now points to the first additional op.
3219 ??? LHS may not be completely contained in VR, one or more
3220 VIEW_CONVERT_EXPRs could be in its way. We could at least
3221 try handling outermost VIEW_CONVERT_EXPRs. */
3222 if (j != -1)
3223 return (void *)-1;
3225 /* Punt if the additional ops contain a storage order barrier. */
3226 for (k = i; k >= 0; k--)
3228 vro = &vr->operands[k];
3229 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3230 return (void *)-1;
3233 /* Now re-write REF to be based on the rhs of the assignment. */
3234 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3235 copy_reference_ops_from_ref (rhs1, &rhs);
3237 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3238 if (maybe_ne (extra_off, 0))
3240 if (rhs.length () < 2)
3241 return (void *)-1;
3242 int ix = rhs.length () - 2;
3243 if (rhs[ix].opcode != MEM_REF
3244 || known_eq (rhs[ix].off, -1))
3245 return (void *)-1;
3246 rhs[ix].off += extra_off;
3247 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3248 build_int_cst (TREE_TYPE (rhs[ix].op0),
3249 extra_off));
3252 /* Save the operands since we need to use the original ones for
3253 the hash entry we use. */
3254 if (!data->saved_operands.exists ())
3255 data->saved_operands = vr->operands.copy ();
3257 /* We need to pre-pend vr->operands[0..i] to rhs. */
3258 vec<vn_reference_op_s> old = vr->operands;
3259 if (i + 1 + rhs.length () > vr->operands.length ())
3260 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3261 else
3262 vr->operands.truncate (i + 1 + rhs.length ());
3263 FOR_EACH_VEC_ELT (rhs, j, vro)
3264 vr->operands[i + 1 + j] = *vro;
3265 valueize_refs (&vr->operands);
3266 if (old == shared_lookup_references)
3267 shared_lookup_references = vr->operands;
3268 vr->hashcode = vn_reference_compute_hash (vr);
3270 /* Try folding the new reference to a constant. */
3271 tree val = fully_constant_vn_reference_p (vr);
3272 if (val)
3274 if (data->partial_defs.is_empty ())
3275 return data->finish (ao_ref_alias_set (&lhs_ref),
3276 ao_ref_base_alias_set (&lhs_ref), val);
3277 /* This is the only interesting case for partial-def handling
3278 coming from targets that like to gimplify init-ctors as
3279 aggregate copies from constant data like aarch64 for
3280 PR83518. */
3281 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3283 pd_data pd;
3284 pd.rhs = val;
3285 pd.offset = 0;
3286 pd.size = maxsizei;
3287 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3288 ao_ref_base_alias_set (&lhs_ref),
3289 0, maxsizei);
3293 /* Continuing with partial defs isn't easily possible here, we
3294 have to find a full def from further lookups from here. Probably
3295 not worth the special-casing everywhere. */
3296 if (!data->partial_defs.is_empty ())
3297 return (void *)-1;
3299 /* Adjust *ref from the new operands. */
3300 ao_ref rhs1_ref;
3301 ao_ref_init (&rhs1_ref, rhs1);
3302 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3303 ao_ref_base_alias_set (&rhs1_ref),
3304 vr->type, vr->operands))
3305 return (void *)-1;
3306 /* This can happen with bitfields. */
3307 if (maybe_ne (ref->size, r.size))
3309 /* If the access lacks some subsetting simply apply that by
3310 shortening it. That in the end can only be successful
3311 if we can pun the lookup result which in turn requires
3312 exact offsets. */
3313 if (known_eq (r.size, r.max_size)
3314 && known_lt (ref->size, r.size))
3315 r.size = r.max_size = ref->size;
3316 else
3317 return (void *)-1;
3319 *ref = r;
3321 /* Do not update last seen VUSE after translating. */
3322 data->last_vuse_ptr = NULL;
3323 /* Invalidate the original access path since it now contains
3324 the wrong base. */
3325 data->orig_ref.ref = NULL_TREE;
3326 /* Use the alias-set of this LHS for recording an eventual result. */
3327 if (data->first_set == -2)
3329 data->first_set = ao_ref_alias_set (&lhs_ref);
3330 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3333 /* Keep looking for the adjusted *REF / VR pair. */
3334 return NULL;
3337 /* 6) For memcpy copies translate the reference through them if the copy
3338 kills ref. But we cannot (easily) do this translation if the memcpy is
3339 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3340 can modify the storage order of objects (see storage_order_barrier_p). */
3341 else if (data->vn_walk_kind == VN_WALKREWRITE
3342 && is_gimple_reg_type (vr->type)
3343 /* ??? Handle BCOPY as well. */
3344 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3345 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3346 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3347 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3348 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3349 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3350 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3351 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3352 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3353 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3354 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3355 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3356 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3357 &copy_size)))
3358 /* Handling this is more complicated, give up for now. */
3359 && data->partial_defs.is_empty ())
3361 tree lhs, rhs;
3362 ao_ref r;
3363 poly_int64 rhs_offset, lhs_offset;
3364 vn_reference_op_s op;
3365 poly_uint64 mem_offset;
3366 poly_int64 at, byte_maxsize;
3368 /* Only handle non-variable, addressable refs. */
3369 if (maybe_ne (ref->size, maxsize)
3370 || !multiple_p (offset, BITS_PER_UNIT, &at)
3371 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3372 return (void *)-1;
3374 /* Extract a pointer base and an offset for the destination. */
3375 lhs = gimple_call_arg (def_stmt, 0);
3376 lhs_offset = 0;
3377 if (TREE_CODE (lhs) == SSA_NAME)
3379 lhs = vn_valueize (lhs);
3380 if (TREE_CODE (lhs) == SSA_NAME)
3382 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3383 if (gimple_assign_single_p (def_stmt)
3384 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3385 lhs = gimple_assign_rhs1 (def_stmt);
3388 if (TREE_CODE (lhs) == ADDR_EXPR)
3390 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3391 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3392 return (void *)-1;
3393 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3394 &lhs_offset);
3395 if (!tem)
3396 return (void *)-1;
3397 if (TREE_CODE (tem) == MEM_REF
3398 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3400 lhs = TREE_OPERAND (tem, 0);
3401 if (TREE_CODE (lhs) == SSA_NAME)
3402 lhs = vn_valueize (lhs);
3403 lhs_offset += mem_offset;
3405 else if (DECL_P (tem))
3406 lhs = build_fold_addr_expr (tem);
3407 else
3408 return (void *)-1;
3410 if (TREE_CODE (lhs) != SSA_NAME
3411 && TREE_CODE (lhs) != ADDR_EXPR)
3412 return (void *)-1;
3414 /* Extract a pointer base and an offset for the source. */
3415 rhs = gimple_call_arg (def_stmt, 1);
3416 rhs_offset = 0;
3417 if (TREE_CODE (rhs) == SSA_NAME)
3418 rhs = vn_valueize (rhs);
3419 if (TREE_CODE (rhs) == ADDR_EXPR)
3421 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3422 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3423 return (void *)-1;
3424 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3425 &rhs_offset);
3426 if (!tem)
3427 return (void *)-1;
3428 if (TREE_CODE (tem) == MEM_REF
3429 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3431 rhs = TREE_OPERAND (tem, 0);
3432 rhs_offset += mem_offset;
3434 else if (DECL_P (tem)
3435 || TREE_CODE (tem) == STRING_CST)
3436 rhs = build_fold_addr_expr (tem);
3437 else
3438 return (void *)-1;
3440 if (TREE_CODE (rhs) == SSA_NAME)
3441 rhs = SSA_VAL (rhs);
3442 else if (TREE_CODE (rhs) != ADDR_EXPR)
3443 return (void *)-1;
3445 /* The bases of the destination and the references have to agree. */
3446 if (TREE_CODE (base) == MEM_REF)
3448 if (TREE_OPERAND (base, 0) != lhs
3449 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3450 return (void *) -1;
3451 at += mem_offset;
3453 else if (!DECL_P (base)
3454 || TREE_CODE (lhs) != ADDR_EXPR
3455 || TREE_OPERAND (lhs, 0) != base)
3456 return (void *)-1;
3458 /* If the access is completely outside of the memcpy destination
3459 area there is no aliasing. */
3460 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3461 return NULL;
3462 /* And the access has to be contained within the memcpy destination. */
3463 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3464 return (void *)-1;
3466 /* Save the operands since we need to use the original ones for
3467 the hash entry we use. */
3468 if (!data->saved_operands.exists ())
3469 data->saved_operands = vr->operands.copy ();
3471 /* Make room for 2 operands in the new reference. */
3472 if (vr->operands.length () < 2)
3474 vec<vn_reference_op_s> old = vr->operands;
3475 vr->operands.safe_grow_cleared (2, true);
3476 if (old == shared_lookup_references)
3477 shared_lookup_references = vr->operands;
3479 else
3480 vr->operands.truncate (2);
3482 /* The looked-through reference is a simple MEM_REF. */
3483 memset (&op, 0, sizeof (op));
3484 op.type = vr->type;
3485 op.opcode = MEM_REF;
3486 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3487 op.off = at - lhs_offset + rhs_offset;
3488 vr->operands[0] = op;
3489 op.type = TREE_TYPE (rhs);
3490 op.opcode = TREE_CODE (rhs);
3491 op.op0 = rhs;
3492 op.off = -1;
3493 vr->operands[1] = op;
3494 vr->hashcode = vn_reference_compute_hash (vr);
3496 /* Try folding the new reference to a constant. */
3497 tree val = fully_constant_vn_reference_p (vr);
3498 if (val)
3499 return data->finish (0, 0, val);
3501 /* Adjust *ref from the new operands. */
3502 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3503 return (void *)-1;
3504 /* This can happen with bitfields. */
3505 if (maybe_ne (ref->size, r.size))
3506 return (void *)-1;
3507 *ref = r;
3509 /* Do not update last seen VUSE after translating. */
3510 data->last_vuse_ptr = NULL;
3511 /* Invalidate the original access path since it now contains
3512 the wrong base. */
3513 data->orig_ref.ref = NULL_TREE;
3514 /* Use the alias-set of this stmt for recording an eventual result. */
3515 if (data->first_set == -2)
3517 data->first_set = 0;
3518 data->first_base_set = 0;
3521 /* Keep looking for the adjusted *REF / VR pair. */
3522 return NULL;
3525 /* Bail out and stop walking. */
3526 return (void *)-1;
3529 /* Return a reference op vector from OP that can be used for
3530 vn_reference_lookup_pieces. The caller is responsible for releasing
3531 the vector. */
3533 vec<vn_reference_op_s>
3534 vn_reference_operands_for_lookup (tree op)
3536 bool valueized;
3537 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3540 /* Lookup a reference operation by it's parts, in the current hash table.
3541 Returns the resulting value number if it exists in the hash table,
3542 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3543 vn_reference_t stored in the hashtable if something is found. */
3545 tree
3546 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3547 alias_set_type base_set, tree type,
3548 vec<vn_reference_op_s> operands,
3549 vn_reference_t *vnresult, vn_lookup_kind kind)
3551 struct vn_reference_s vr1;
3552 vn_reference_t tmp;
3553 tree cst;
3555 if (!vnresult)
3556 vnresult = &tmp;
3557 *vnresult = NULL;
3559 vr1.vuse = vuse_ssa_val (vuse);
3560 shared_lookup_references.truncate (0);
3561 shared_lookup_references.safe_grow (operands.length (), true);
3562 memcpy (shared_lookup_references.address (),
3563 operands.address (),
3564 sizeof (vn_reference_op_s)
3565 * operands.length ());
3566 bool valueized_p;
3567 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3568 vr1.operands = shared_lookup_references;
3569 vr1.type = type;
3570 vr1.set = set;
3571 vr1.base_set = base_set;
3572 vr1.hashcode = vn_reference_compute_hash (&vr1);
3573 if ((cst = fully_constant_vn_reference_p (&vr1)))
3574 return cst;
3576 vn_reference_lookup_1 (&vr1, vnresult);
3577 if (!*vnresult
3578 && kind != VN_NOWALK
3579 && vr1.vuse)
3581 ao_ref r;
3582 unsigned limit = param_sccvn_max_alias_queries_per_access;
3583 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3584 vec<vn_reference_op_s> ops_for_ref;
3585 if (!valueized_p)
3586 ops_for_ref = vr1.operands;
3587 else
3589 /* For ao_ref_from_mem we have to ensure only available SSA names
3590 end up in base and the only convenient way to make this work
3591 for PRE is to re-valueize with that in mind. */
3592 ops_for_ref.create (operands.length ());
3593 ops_for_ref.quick_grow (operands.length ());
3594 memcpy (ops_for_ref.address (),
3595 operands.address (),
3596 sizeof (vn_reference_op_s)
3597 * operands.length ());
3598 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3600 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3601 ops_for_ref))
3602 *vnresult
3603 = ((vn_reference_t)
3604 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3605 vn_reference_lookup_3, vuse_valueize,
3606 limit, &data));
3607 if (ops_for_ref != shared_lookup_references)
3608 ops_for_ref.release ();
3609 gcc_checking_assert (vr1.operands == shared_lookup_references);
3612 if (*vnresult)
3613 return (*vnresult)->result;
3615 return NULL_TREE;
3618 /* Lookup OP in the current hash table, and return the resulting value
3619 number if it exists in the hash table. Return NULL_TREE if it does
3620 not exist in the hash table or if the result field of the structure
3621 was NULL.. VNRESULT will be filled in with the vn_reference_t
3622 stored in the hashtable if one exists. When TBAA_P is false assume
3623 we are looking up a store and treat it as having alias-set zero.
3624 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3625 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3626 load is bitwise anded with MASK and so we are only interested in a subset
3627 of the bits and can ignore if the other bits are uninitialized or
3628 not initialized with constants. */
3630 tree
3631 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3632 vn_reference_t *vnresult, bool tbaa_p,
3633 tree *last_vuse_ptr, tree mask)
3635 vec<vn_reference_op_s> operands;
3636 struct vn_reference_s vr1;
3637 bool valueized_anything;
3639 if (vnresult)
3640 *vnresult = NULL;
3642 vr1.vuse = vuse_ssa_val (vuse);
3643 vr1.operands = operands
3644 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3645 vr1.type = TREE_TYPE (op);
3646 ao_ref op_ref;
3647 ao_ref_init (&op_ref, op);
3648 vr1.set = ao_ref_alias_set (&op_ref);
3649 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3650 vr1.hashcode = vn_reference_compute_hash (&vr1);
3651 if (mask == NULL_TREE)
3652 if (tree cst = fully_constant_vn_reference_p (&vr1))
3653 return cst;
3655 if (kind != VN_NOWALK && vr1.vuse)
3657 vn_reference_t wvnresult;
3658 ao_ref r;
3659 unsigned limit = param_sccvn_max_alias_queries_per_access;
3660 auto_vec<vn_reference_op_s> ops_for_ref;
3661 if (valueized_anything)
3663 copy_reference_ops_from_ref (op, &ops_for_ref);
3664 bool tem;
3665 valueize_refs_1 (&ops_for_ref, &tem, true);
3667 /* Make sure to use a valueized reference if we valueized anything.
3668 Otherwise preserve the full reference for advanced TBAA. */
3669 if (!valueized_anything
3670 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3671 vr1.type, ops_for_ref))
3672 ao_ref_init (&r, op);
3673 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3674 last_vuse_ptr, kind, tbaa_p, mask);
3676 wvnresult
3677 = ((vn_reference_t)
3678 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3679 vn_reference_lookup_3, vuse_valueize, limit,
3680 &data));
3681 gcc_checking_assert (vr1.operands == shared_lookup_references);
3682 if (wvnresult)
3684 gcc_assert (mask == NULL_TREE);
3685 if (vnresult)
3686 *vnresult = wvnresult;
3687 return wvnresult->result;
3689 else if (mask)
3690 return data.masked_result;
3692 return NULL_TREE;
3695 if (last_vuse_ptr)
3696 *last_vuse_ptr = vr1.vuse;
3697 if (mask)
3698 return NULL_TREE;
3699 return vn_reference_lookup_1 (&vr1, vnresult);
3702 /* Lookup CALL in the current hash table and return the entry in
3703 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3705 void
3706 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3707 vn_reference_t vr)
3709 if (vnresult)
3710 *vnresult = NULL;
3712 tree vuse = gimple_vuse (call);
3714 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3715 vr->operands = valueize_shared_reference_ops_from_call (call);
3716 tree lhs = gimple_call_lhs (call);
3717 /* For non-SSA return values the referece ops contain the LHS. */
3718 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
3719 ? TREE_TYPE (lhs) : NULL_TREE);
3720 vr->punned = false;
3721 vr->set = 0;
3722 vr->base_set = 0;
3723 vr->hashcode = vn_reference_compute_hash (vr);
3724 vn_reference_lookup_1 (vr, vnresult);
3727 /* Insert OP into the current hash table with a value number of RESULT. */
3729 static void
3730 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3732 vn_reference_s **slot;
3733 vn_reference_t vr1;
3734 bool tem;
3736 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3737 if (TREE_CODE (result) == SSA_NAME)
3738 vr1->value_id = VN_INFO (result)->value_id;
3739 else
3740 vr1->value_id = get_or_alloc_constant_value_id (result);
3741 vr1->vuse = vuse_ssa_val (vuse);
3742 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3743 vr1->type = TREE_TYPE (op);
3744 vr1->punned = false;
3745 ao_ref op_ref;
3746 ao_ref_init (&op_ref, op);
3747 vr1->set = ao_ref_alias_set (&op_ref);
3748 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3749 vr1->hashcode = vn_reference_compute_hash (vr1);
3750 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3751 vr1->result_vdef = vdef;
3753 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3754 INSERT);
3756 /* Because IL walking on reference lookup can end up visiting
3757 a def that is only to be visited later in iteration order
3758 when we are about to make an irreducible region reducible
3759 the def can be effectively processed and its ref being inserted
3760 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3761 but save a lookup if we deal with already inserted refs here. */
3762 if (*slot)
3764 /* We cannot assert that we have the same value either because
3765 when disentangling an irreducible region we may end up visiting
3766 a use before the corresponding def. That's a missed optimization
3767 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3768 if (dump_file && (dump_flags & TDF_DETAILS)
3769 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3771 fprintf (dump_file, "Keeping old value ");
3772 print_generic_expr (dump_file, (*slot)->result);
3773 fprintf (dump_file, " because of collision\n");
3775 free_reference (vr1);
3776 obstack_free (&vn_tables_obstack, vr1);
3777 return;
3780 *slot = vr1;
3781 vr1->next = last_inserted_ref;
3782 last_inserted_ref = vr1;
3785 /* Insert a reference by it's pieces into the current hash table with
3786 a value number of RESULT. Return the resulting reference
3787 structure we created. */
3789 vn_reference_t
3790 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3791 alias_set_type base_set, tree type,
3792 vec<vn_reference_op_s> operands,
3793 tree result, unsigned int value_id)
3796 vn_reference_s **slot;
3797 vn_reference_t vr1;
3799 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3800 vr1->value_id = value_id;
3801 vr1->vuse = vuse_ssa_val (vuse);
3802 vr1->operands = operands;
3803 valueize_refs (&vr1->operands);
3804 vr1->type = type;
3805 vr1->punned = false;
3806 vr1->set = set;
3807 vr1->base_set = base_set;
3808 vr1->hashcode = vn_reference_compute_hash (vr1);
3809 if (result && TREE_CODE (result) == SSA_NAME)
3810 result = SSA_VAL (result);
3811 vr1->result = result;
3813 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3814 INSERT);
3816 /* At this point we should have all the things inserted that we have
3817 seen before, and we should never try inserting something that
3818 already exists. */
3819 gcc_assert (!*slot);
3821 *slot = vr1;
3822 vr1->next = last_inserted_ref;
3823 last_inserted_ref = vr1;
3824 return vr1;
3827 /* Compute and return the hash value for nary operation VBO1. */
3829 static hashval_t
3830 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3832 inchash::hash hstate;
3833 unsigned i;
3835 for (i = 0; i < vno1->length; ++i)
3836 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3837 vno1->op[i] = SSA_VAL (vno1->op[i]);
3839 if (((vno1->length == 2
3840 && commutative_tree_code (vno1->opcode))
3841 || (vno1->length == 3
3842 && commutative_ternary_tree_code (vno1->opcode)))
3843 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3844 std::swap (vno1->op[0], vno1->op[1]);
3845 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3846 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3848 std::swap (vno1->op[0], vno1->op[1]);
3849 vno1->opcode = swap_tree_comparison (vno1->opcode);
3852 hstate.add_int (vno1->opcode);
3853 for (i = 0; i < vno1->length; ++i)
3854 inchash::add_expr (vno1->op[i], hstate);
3856 return hstate.end ();
3859 /* Compare nary operations VNO1 and VNO2 and return true if they are
3860 equivalent. */
3862 bool
3863 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3865 unsigned i;
3867 if (vno1->hashcode != vno2->hashcode)
3868 return false;
3870 if (vno1->length != vno2->length)
3871 return false;
3873 if (vno1->opcode != vno2->opcode
3874 || !types_compatible_p (vno1->type, vno2->type))
3875 return false;
3877 for (i = 0; i < vno1->length; ++i)
3878 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3879 return false;
3881 /* BIT_INSERT_EXPR has an implict operand as the type precision
3882 of op1. Need to check to make sure they are the same. */
3883 if (vno1->opcode == BIT_INSERT_EXPR
3884 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3885 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3886 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3887 return false;
3889 return true;
3892 /* Initialize VNO from the pieces provided. */
3894 static void
3895 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3896 enum tree_code code, tree type, tree *ops)
3898 vno->opcode = code;
3899 vno->length = length;
3900 vno->type = type;
3901 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3904 /* Return the number of operands for a vn_nary ops structure from STMT. */
3906 static unsigned int
3907 vn_nary_length_from_stmt (gimple *stmt)
3909 switch (gimple_assign_rhs_code (stmt))
3911 case REALPART_EXPR:
3912 case IMAGPART_EXPR:
3913 case VIEW_CONVERT_EXPR:
3914 return 1;
3916 case BIT_FIELD_REF:
3917 return 3;
3919 case CONSTRUCTOR:
3920 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3922 default:
3923 return gimple_num_ops (stmt) - 1;
3927 /* Initialize VNO from STMT. */
3929 static void
3930 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
3932 unsigned i;
3934 vno->opcode = gimple_assign_rhs_code (stmt);
3935 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
3936 switch (vno->opcode)
3938 case REALPART_EXPR:
3939 case IMAGPART_EXPR:
3940 case VIEW_CONVERT_EXPR:
3941 vno->length = 1;
3942 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3943 break;
3945 case BIT_FIELD_REF:
3946 vno->length = 3;
3947 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3948 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3949 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3950 break;
3952 case CONSTRUCTOR:
3953 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3954 for (i = 0; i < vno->length; ++i)
3955 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3956 break;
3958 default:
3959 gcc_checking_assert (!gimple_assign_single_p (stmt));
3960 vno->length = gimple_num_ops (stmt) - 1;
3961 for (i = 0; i < vno->length; ++i)
3962 vno->op[i] = gimple_op (stmt, i + 1);
3966 /* Compute the hashcode for VNO and look for it in the hash table;
3967 return the resulting value number if it exists in the hash table.
3968 Return NULL_TREE if it does not exist in the hash table or if the
3969 result field of the operation is NULL. VNRESULT will contain the
3970 vn_nary_op_t from the hashtable if it exists. */
3972 static tree
3973 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3975 vn_nary_op_s **slot;
3977 if (vnresult)
3978 *vnresult = NULL;
3980 vno->hashcode = vn_nary_op_compute_hash (vno);
3981 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
3982 if (!slot)
3983 return NULL_TREE;
3984 if (vnresult)
3985 *vnresult = *slot;
3986 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
3989 /* Lookup a n-ary operation by its pieces and return the resulting value
3990 number if it exists in the hash table. Return NULL_TREE if it does
3991 not exist in the hash table or if the result field of the operation
3992 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
3993 if it exists. */
3995 tree
3996 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
3997 tree type, tree *ops, vn_nary_op_t *vnresult)
3999 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4000 sizeof_vn_nary_op (length));
4001 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4002 return vn_nary_op_lookup_1 (vno1, vnresult);
4005 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4006 value number if it exists in the hash table. Return NULL_TREE if
4007 it does not exist in the hash table. VNRESULT will contain the
4008 vn_nary_op_t from the hashtable if it exists. */
4010 tree
4011 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4013 vn_nary_op_t vno1
4014 = XALLOCAVAR (struct vn_nary_op_s,
4015 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4016 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4017 return vn_nary_op_lookup_1 (vno1, vnresult);
4020 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4022 static vn_nary_op_t
4023 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4025 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4028 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4029 obstack. */
4031 static vn_nary_op_t
4032 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4034 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4036 vno1->value_id = value_id;
4037 vno1->length = length;
4038 vno1->predicated_values = 0;
4039 vno1->u.result = result;
4041 return vno1;
4044 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
4045 VNO->HASHCODE first. */
4047 static vn_nary_op_t
4048 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
4049 bool compute_hash)
4051 vn_nary_op_s **slot;
4053 if (compute_hash)
4055 vno->hashcode = vn_nary_op_compute_hash (vno);
4056 gcc_assert (! vno->predicated_values
4057 || (! vno->u.values->next
4058 && vno->u.values->n == 1));
4061 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4062 vno->unwind_to = *slot;
4063 if (*slot)
4065 /* Prefer non-predicated values.
4066 ??? Only if those are constant, otherwise, with constant predicated
4067 value, turn them into predicated values with entry-block validity
4068 (??? but we always find the first valid result currently). */
4069 if ((*slot)->predicated_values
4070 && ! vno->predicated_values)
4072 /* ??? We cannot remove *slot from the unwind stack list.
4073 For the moment we deal with this by skipping not found
4074 entries but this isn't ideal ... */
4075 *slot = vno;
4076 /* ??? Maintain a stack of states we can unwind in
4077 vn_nary_op_s? But how far do we unwind? In reality
4078 we need to push change records somewhere... Or not
4079 unwind vn_nary_op_s and linking them but instead
4080 unwind the results "list", linking that, which also
4081 doesn't move on hashtable resize. */
4082 /* We can also have a ->unwind_to recording *slot there.
4083 That way we can make u.values a fixed size array with
4084 recording the number of entries but of course we then
4085 have always N copies for each unwind_to-state. Or we
4086 make sure to only ever append and each unwinding will
4087 pop off one entry (but how to deal with predicated
4088 replaced with non-predicated here?) */
4089 vno->next = last_inserted_nary;
4090 last_inserted_nary = vno;
4091 return vno;
4093 else if (vno->predicated_values
4094 && ! (*slot)->predicated_values)
4095 return *slot;
4096 else if (vno->predicated_values
4097 && (*slot)->predicated_values)
4099 /* ??? Factor this all into a insert_single_predicated_value
4100 routine. */
4101 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4102 basic_block vno_bb
4103 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4104 vn_pval *nval = vno->u.values;
4105 vn_pval **next = &vno->u.values;
4106 bool found = false;
4107 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4109 if (expressions_equal_p (val->result, vno->u.values->result))
4111 found = true;
4112 for (unsigned i = 0; i < val->n; ++i)
4114 basic_block val_bb
4115 = BASIC_BLOCK_FOR_FN (cfun,
4116 val->valid_dominated_by_p[i]);
4117 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4118 /* Value registered with more generic predicate. */
4119 return *slot;
4120 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4121 /* Shouldn't happen, we insert in RPO order. */
4122 gcc_unreachable ();
4124 /* Append value. */
4125 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4126 sizeof (vn_pval)
4127 + val->n * sizeof (int));
4128 (*next)->next = NULL;
4129 (*next)->result = val->result;
4130 (*next)->n = val->n + 1;
4131 memcpy ((*next)->valid_dominated_by_p,
4132 val->valid_dominated_by_p,
4133 val->n * sizeof (int));
4134 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4135 next = &(*next)->next;
4136 if (dump_file && (dump_flags & TDF_DETAILS))
4137 fprintf (dump_file, "Appending predicate to value.\n");
4138 continue;
4140 /* Copy other predicated values. */
4141 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4142 sizeof (vn_pval)
4143 + (val->n-1) * sizeof (int));
4144 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4145 (*next)->next = NULL;
4146 next = &(*next)->next;
4148 if (!found)
4149 *next = nval;
4151 *slot = vno;
4152 vno->next = last_inserted_nary;
4153 last_inserted_nary = vno;
4154 return vno;
4157 /* While we do not want to insert things twice it's awkward to
4158 avoid it in the case where visit_nary_op pattern-matches stuff
4159 and ends up simplifying the replacement to itself. We then
4160 get two inserts, one from visit_nary_op and one from
4161 vn_nary_build_or_lookup.
4162 So allow inserts with the same value number. */
4163 if ((*slot)->u.result == vno->u.result)
4164 return *slot;
4167 /* ??? There's also optimistic vs. previous commited state merging
4168 that is problematic for the case of unwinding. */
4170 /* ??? We should return NULL if we do not use 'vno' and have the
4171 caller release it. */
4172 gcc_assert (!*slot);
4174 *slot = vno;
4175 vno->next = last_inserted_nary;
4176 last_inserted_nary = vno;
4177 return vno;
4180 /* Insert a n-ary operation into the current hash table using it's
4181 pieces. Return the vn_nary_op_t structure we created and put in
4182 the hashtable. */
4184 vn_nary_op_t
4185 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4186 tree type, tree *ops,
4187 tree result, unsigned int value_id)
4189 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4190 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4191 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4194 static vn_nary_op_t
4195 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4196 tree type, tree *ops,
4197 tree result, unsigned int value_id,
4198 edge pred_e)
4200 /* ??? Currently tracking BBs. */
4201 if (! single_pred_p (pred_e->dest))
4203 /* Never record for backedges. */
4204 if (pred_e->flags & EDGE_DFS_BACK)
4205 return NULL;
4206 edge_iterator ei;
4207 edge e;
4208 int cnt = 0;
4209 /* Ignore backedges. */
4210 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4211 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4212 cnt++;
4213 if (cnt != 1)
4214 return NULL;
4216 if (dump_file && (dump_flags & TDF_DETAILS)
4217 /* ??? Fix dumping, but currently we only get comparisons. */
4218 && TREE_CODE_CLASS (code) == tcc_comparison)
4220 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4221 pred_e->dest->index);
4222 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4223 fprintf (dump_file, " %s ", get_tree_code_name (code));
4224 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4225 fprintf (dump_file, " == %s\n",
4226 integer_zerop (result) ? "false" : "true");
4228 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4229 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4230 vno1->predicated_values = 1;
4231 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4232 sizeof (vn_pval));
4233 vno1->u.values->next = NULL;
4234 vno1->u.values->result = result;
4235 vno1->u.values->n = 1;
4236 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4237 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4240 static bool
4241 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4243 static tree
4244 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4246 if (! vno->predicated_values)
4247 return vno->u.result;
4248 for (vn_pval *val = vno->u.values; val; val = val->next)
4249 for (unsigned i = 0; i < val->n; ++i)
4250 /* Do not handle backedge executability optimistically since
4251 when figuring out whether to iterate we do not consider
4252 changed predication. */
4253 if (dominated_by_p_w_unex
4254 (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4255 false))
4256 return val->result;
4257 return NULL_TREE;
4260 /* Insert the rhs of STMT into the current hash table with a value number of
4261 RESULT. */
4263 static vn_nary_op_t
4264 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4266 vn_nary_op_t vno1
4267 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4268 result, VN_INFO (result)->value_id);
4269 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4270 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4273 /* Compute a hashcode for PHI operation VP1 and return it. */
4275 static inline hashval_t
4276 vn_phi_compute_hash (vn_phi_t vp1)
4278 inchash::hash hstate;
4279 tree phi1op;
4280 tree type;
4281 edge e;
4282 edge_iterator ei;
4284 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4285 switch (EDGE_COUNT (vp1->block->preds))
4287 case 1:
4288 break;
4289 case 2:
4290 if (vp1->block->loop_father->header == vp1->block)
4292 else
4293 break;
4294 /* Fallthru. */
4295 default:
4296 hstate.add_int (vp1->block->index);
4299 /* If all PHI arguments are constants we need to distinguish
4300 the PHI node via its type. */
4301 type = vp1->type;
4302 hstate.merge_hash (vn_hash_type (type));
4304 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4306 /* Don't hash backedge values they need to be handled as VN_TOP
4307 for optimistic value-numbering. */
4308 if (e->flags & EDGE_DFS_BACK)
4309 continue;
4311 phi1op = vp1->phiargs[e->dest_idx];
4312 if (phi1op == VN_TOP)
4313 continue;
4314 inchash::add_expr (phi1op, hstate);
4317 return hstate.end ();
4321 /* Return true if COND1 and COND2 represent the same condition, set
4322 *INVERTED_P if one needs to be inverted to make it the same as
4323 the other. */
4325 static bool
4326 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4327 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4329 enum tree_code code1 = gimple_cond_code (cond1);
4330 enum tree_code code2 = gimple_cond_code (cond2);
4332 *inverted_p = false;
4333 if (code1 == code2)
4335 else if (code1 == swap_tree_comparison (code2))
4336 std::swap (lhs2, rhs2);
4337 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4338 *inverted_p = true;
4339 else if (code1 == invert_tree_comparison
4340 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4342 std::swap (lhs2, rhs2);
4343 *inverted_p = true;
4345 else
4346 return false;
4348 return ((expressions_equal_p (lhs1, lhs2)
4349 && expressions_equal_p (rhs1, rhs2))
4350 || (commutative_tree_code (code1)
4351 && expressions_equal_p (lhs1, rhs2)
4352 && expressions_equal_p (rhs1, lhs2)));
4355 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4357 static int
4358 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4360 if (vp1->hashcode != vp2->hashcode)
4361 return false;
4363 if (vp1->block != vp2->block)
4365 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4366 return false;
4368 switch (EDGE_COUNT (vp1->block->preds))
4370 case 1:
4371 /* Single-arg PHIs are just copies. */
4372 break;
4374 case 2:
4376 /* Rule out backedges into the PHI. */
4377 if (vp1->block->loop_father->header == vp1->block
4378 || vp2->block->loop_father->header == vp2->block)
4379 return false;
4381 /* If the PHI nodes do not have compatible types
4382 they are not the same. */
4383 if (!types_compatible_p (vp1->type, vp2->type))
4384 return false;
4386 basic_block idom1
4387 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4388 basic_block idom2
4389 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4390 /* If the immediate dominator end in switch stmts multiple
4391 values may end up in the same PHI arg via intermediate
4392 CFG merges. */
4393 if (EDGE_COUNT (idom1->succs) != 2
4394 || EDGE_COUNT (idom2->succs) != 2)
4395 return false;
4397 /* Verify the controlling stmt is the same. */
4398 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4399 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4400 if (! last1 || ! last2)
4401 return false;
4402 bool inverted_p;
4403 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4404 last2, vp2->cclhs, vp2->ccrhs,
4405 &inverted_p))
4406 return false;
4408 /* Get at true/false controlled edges into the PHI. */
4409 edge te1, te2, fe1, fe2;
4410 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4411 &te1, &fe1)
4412 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4413 &te2, &fe2))
4414 return false;
4416 /* Swap edges if the second condition is the inverted of the
4417 first. */
4418 if (inverted_p)
4419 std::swap (te2, fe2);
4421 /* ??? Handle VN_TOP specially. */
4422 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4423 vp2->phiargs[te2->dest_idx])
4424 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4425 vp2->phiargs[fe2->dest_idx]))
4426 return false;
4428 return true;
4431 default:
4432 return false;
4436 /* If the PHI nodes do not have compatible types
4437 they are not the same. */
4438 if (!types_compatible_p (vp1->type, vp2->type))
4439 return false;
4441 /* Any phi in the same block will have it's arguments in the
4442 same edge order, because of how we store phi nodes. */
4443 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4444 for (unsigned i = 0; i < nargs; ++i)
4446 tree phi1op = vp1->phiargs[i];
4447 tree phi2op = vp2->phiargs[i];
4448 if (phi1op == phi2op)
4449 continue;
4450 if (!expressions_equal_p (phi1op, phi2op))
4451 return false;
4454 return true;
4457 /* Lookup PHI in the current hash table, and return the resulting
4458 value number if it exists in the hash table. Return NULL_TREE if
4459 it does not exist in the hash table. */
4461 static tree
4462 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4464 vn_phi_s **slot;
4465 struct vn_phi_s *vp1;
4466 edge e;
4467 edge_iterator ei;
4469 vp1 = XALLOCAVAR (struct vn_phi_s,
4470 sizeof (struct vn_phi_s)
4471 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4473 /* Canonicalize the SSA_NAME's to their value number. */
4474 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4476 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4477 if (TREE_CODE (def) == SSA_NAME
4478 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4479 def = SSA_VAL (def);
4480 vp1->phiargs[e->dest_idx] = def;
4482 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4483 vp1->block = gimple_bb (phi);
4484 /* Extract values of the controlling condition. */
4485 vp1->cclhs = NULL_TREE;
4486 vp1->ccrhs = NULL_TREE;
4487 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4488 if (EDGE_COUNT (idom1->succs) == 2)
4489 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4491 /* ??? We want to use SSA_VAL here. But possibly not
4492 allow VN_TOP. */
4493 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4494 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4496 vp1->hashcode = vn_phi_compute_hash (vp1);
4497 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4498 if (!slot)
4499 return NULL_TREE;
4500 return (*slot)->result;
4503 /* Insert PHI into the current hash table with a value number of
4504 RESULT. */
4506 static vn_phi_t
4507 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4509 vn_phi_s **slot;
4510 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4511 sizeof (vn_phi_s)
4512 + ((gimple_phi_num_args (phi) - 1)
4513 * sizeof (tree)));
4514 edge e;
4515 edge_iterator ei;
4517 /* Canonicalize the SSA_NAME's to their value number. */
4518 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4520 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4521 if (TREE_CODE (def) == SSA_NAME
4522 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4523 def = SSA_VAL (def);
4524 vp1->phiargs[e->dest_idx] = def;
4526 vp1->value_id = VN_INFO (result)->value_id;
4527 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4528 vp1->block = gimple_bb (phi);
4529 /* Extract values of the controlling condition. */
4530 vp1->cclhs = NULL_TREE;
4531 vp1->ccrhs = NULL_TREE;
4532 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4533 if (EDGE_COUNT (idom1->succs) == 2)
4534 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4536 /* ??? We want to use SSA_VAL here. But possibly not
4537 allow VN_TOP. */
4538 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4539 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4541 vp1->result = result;
4542 vp1->hashcode = vn_phi_compute_hash (vp1);
4544 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4545 gcc_assert (!*slot);
4547 *slot = vp1;
4548 vp1->next = last_inserted_phi;
4549 last_inserted_phi = vp1;
4550 return vp1;
4554 /* Return true if BB1 is dominated by BB2 taking into account edges
4555 that are not executable. When ALLOW_BACK is false consider not
4556 executable backedges as executable. */
4558 static bool
4559 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4561 edge_iterator ei;
4562 edge e;
4564 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4565 return true;
4567 /* Before iterating we'd like to know if there exists a
4568 (executable) path from bb2 to bb1 at all, if not we can
4569 directly return false. For now simply iterate once. */
4571 /* Iterate to the single executable bb1 predecessor. */
4572 if (EDGE_COUNT (bb1->preds) > 1)
4574 edge prede = NULL;
4575 FOR_EACH_EDGE (e, ei, bb1->preds)
4576 if ((e->flags & EDGE_EXECUTABLE)
4577 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4579 if (prede)
4581 prede = NULL;
4582 break;
4584 prede = e;
4586 if (prede)
4588 bb1 = prede->src;
4590 /* Re-do the dominance check with changed bb1. */
4591 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4592 return true;
4596 /* Iterate to the single executable bb2 successor. */
4597 edge succe = NULL;
4598 FOR_EACH_EDGE (e, ei, bb2->succs)
4599 if ((e->flags & EDGE_EXECUTABLE)
4600 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4602 if (succe)
4604 succe = NULL;
4605 break;
4607 succe = e;
4609 if (succe)
4611 /* Verify the reached block is only reached through succe.
4612 If there is only one edge we can spare us the dominator
4613 check and iterate directly. */
4614 if (EDGE_COUNT (succe->dest->preds) > 1)
4616 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4617 if (e != succe
4618 && ((e->flags & EDGE_EXECUTABLE)
4619 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4621 succe = NULL;
4622 break;
4625 if (succe)
4627 bb2 = succe->dest;
4629 /* Re-do the dominance check with changed bb2. */
4630 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4631 return true;
4635 /* We could now iterate updating bb1 / bb2. */
4636 return false;
4639 /* Set the value number of FROM to TO, return true if it has changed
4640 as a result. */
4642 static inline bool
4643 set_ssa_val_to (tree from, tree to)
4645 vn_ssa_aux_t from_info = VN_INFO (from);
4646 tree currval = from_info->valnum; // SSA_VAL (from)
4647 poly_int64 toff, coff;
4648 bool curr_undefined = false;
4649 bool curr_invariant = false;
4651 /* The only thing we allow as value numbers are ssa_names
4652 and invariants. So assert that here. We don't allow VN_TOP
4653 as visiting a stmt should produce a value-number other than
4654 that.
4655 ??? Still VN_TOP can happen for unreachable code, so force
4656 it to varying in that case. Not all code is prepared to
4657 get VN_TOP on valueization. */
4658 if (to == VN_TOP)
4660 /* ??? When iterating and visiting PHI <undef, backedge-value>
4661 for the first time we rightfully get VN_TOP and we need to
4662 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4663 With SCCVN we were simply lucky we iterated the other PHI
4664 cycles first and thus visited the backedge-value DEF. */
4665 if (currval == VN_TOP)
4666 goto set_and_exit;
4667 if (dump_file && (dump_flags & TDF_DETAILS))
4668 fprintf (dump_file, "Forcing value number to varying on "
4669 "receiving VN_TOP\n");
4670 to = from;
4673 gcc_checking_assert (to != NULL_TREE
4674 && ((TREE_CODE (to) == SSA_NAME
4675 && (to == from || SSA_VAL (to) == to))
4676 || is_gimple_min_invariant (to)));
4678 if (from != to)
4680 if (currval == from)
4682 if (dump_file && (dump_flags & TDF_DETAILS))
4684 fprintf (dump_file, "Not changing value number of ");
4685 print_generic_expr (dump_file, from);
4686 fprintf (dump_file, " from VARYING to ");
4687 print_generic_expr (dump_file, to);
4688 fprintf (dump_file, "\n");
4690 return false;
4692 curr_invariant = is_gimple_min_invariant (currval);
4693 curr_undefined = (TREE_CODE (currval) == SSA_NAME
4694 && ssa_undefined_value_p (currval, false));
4695 if (currval != VN_TOP
4696 && !curr_invariant
4697 && !curr_undefined
4698 && is_gimple_min_invariant (to))
4700 if (dump_file && (dump_flags & TDF_DETAILS))
4702 fprintf (dump_file, "Forcing VARYING instead of changing "
4703 "value number of ");
4704 print_generic_expr (dump_file, from);
4705 fprintf (dump_file, " from ");
4706 print_generic_expr (dump_file, currval);
4707 fprintf (dump_file, " (non-constant) to ");
4708 print_generic_expr (dump_file, to);
4709 fprintf (dump_file, " (constant)\n");
4711 to = from;
4713 else if (currval != VN_TOP
4714 && !curr_undefined
4715 && TREE_CODE (to) == SSA_NAME
4716 && ssa_undefined_value_p (to, false))
4718 if (dump_file && (dump_flags & TDF_DETAILS))
4720 fprintf (dump_file, "Forcing VARYING instead of changing "
4721 "value number of ");
4722 print_generic_expr (dump_file, from);
4723 fprintf (dump_file, " from ");
4724 print_generic_expr (dump_file, currval);
4725 fprintf (dump_file, " (non-undefined) to ");
4726 print_generic_expr (dump_file, to);
4727 fprintf (dump_file, " (undefined)\n");
4729 to = from;
4731 else if (TREE_CODE (to) == SSA_NAME
4732 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4733 to = from;
4736 set_and_exit:
4737 if (dump_file && (dump_flags & TDF_DETAILS))
4739 fprintf (dump_file, "Setting value number of ");
4740 print_generic_expr (dump_file, from);
4741 fprintf (dump_file, " to ");
4742 print_generic_expr (dump_file, to);
4745 if (currval != to
4746 && !operand_equal_p (currval, to, 0)
4747 /* Different undefined SSA names are not actually different. See
4748 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4749 && !(curr_undefined
4750 && TREE_CODE (to) == SSA_NAME
4751 && ssa_undefined_value_p (to, false))
4752 /* ??? For addresses involving volatile objects or types operand_equal_p
4753 does not reliably detect ADDR_EXPRs as equal. We know we are only
4754 getting invariant gimple addresses here, so can use
4755 get_addr_base_and_unit_offset to do this comparison. */
4756 && !(TREE_CODE (currval) == ADDR_EXPR
4757 && TREE_CODE (to) == ADDR_EXPR
4758 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4759 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4760 && known_eq (coff, toff)))
4762 if (to != from
4763 && currval != VN_TOP
4764 && !curr_undefined
4765 /* We do not want to allow lattice transitions from one value
4766 to another since that may lead to not terminating iteration
4767 (see PR95049). Since there's no convenient way to check
4768 for the allowed transition of VAL -> PHI (loop entry value,
4769 same on two PHIs, to same PHI result) we restrict the check
4770 to invariants. */
4771 && curr_invariant
4772 && is_gimple_min_invariant (to))
4774 if (dump_file && (dump_flags & TDF_DETAILS))
4775 fprintf (dump_file, " forced VARYING");
4776 to = from;
4778 if (dump_file && (dump_flags & TDF_DETAILS))
4779 fprintf (dump_file, " (changed)\n");
4780 from_info->valnum = to;
4781 return true;
4783 if (dump_file && (dump_flags & TDF_DETAILS))
4784 fprintf (dump_file, "\n");
4785 return false;
4788 /* Set all definitions in STMT to value number to themselves.
4789 Return true if a value number changed. */
4791 static bool
4792 defs_to_varying (gimple *stmt)
4794 bool changed = false;
4795 ssa_op_iter iter;
4796 def_operand_p defp;
4798 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4800 tree def = DEF_FROM_PTR (defp);
4801 changed |= set_ssa_val_to (def, def);
4803 return changed;
4806 /* Visit a copy between LHS and RHS, return true if the value number
4807 changed. */
4809 static bool
4810 visit_copy (tree lhs, tree rhs)
4812 /* Valueize. */
4813 rhs = SSA_VAL (rhs);
4815 return set_ssa_val_to (lhs, rhs);
4818 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4819 is the same. */
4821 static tree
4822 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4824 if (TREE_CODE (op) == SSA_NAME)
4825 op = vn_valueize (op);
4827 /* Either we have the op widened available. */
4828 tree ops[3] = {};
4829 ops[0] = op;
4830 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4831 wide_type, ops, NULL);
4832 if (tem)
4833 return tem;
4835 /* Or the op is truncated from some existing value. */
4836 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4838 gimple *def = SSA_NAME_DEF_STMT (op);
4839 if (is_gimple_assign (def)
4840 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4842 tem = gimple_assign_rhs1 (def);
4843 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4845 if (TREE_CODE (tem) == SSA_NAME)
4846 tem = vn_valueize (tem);
4847 return tem;
4852 /* For constants simply extend it. */
4853 if (TREE_CODE (op) == INTEGER_CST)
4854 return wide_int_to_tree (wide_type, wi::to_wide (op));
4856 return NULL_TREE;
4859 /* Visit a nary operator RHS, value number it, and return true if the
4860 value number of LHS has changed as a result. */
4862 static bool
4863 visit_nary_op (tree lhs, gassign *stmt)
4865 vn_nary_op_t vnresult;
4866 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4867 if (! result && vnresult)
4868 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4869 if (result)
4870 return set_ssa_val_to (lhs, result);
4872 /* Do some special pattern matching for redundancies of operations
4873 in different types. */
4874 enum tree_code code = gimple_assign_rhs_code (stmt);
4875 tree type = TREE_TYPE (lhs);
4876 tree rhs1 = gimple_assign_rhs1 (stmt);
4877 switch (code)
4879 CASE_CONVERT:
4880 /* Match arithmetic done in a different type where we can easily
4881 substitute the result from some earlier sign-changed or widened
4882 operation. */
4883 if (INTEGRAL_TYPE_P (type)
4884 && TREE_CODE (rhs1) == SSA_NAME
4885 /* We only handle sign-changes, zero-extension -> & mask or
4886 sign-extension if we know the inner operation doesn't
4887 overflow. */
4888 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4889 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4890 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4891 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4892 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4894 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4895 if (def
4896 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4897 || gimple_assign_rhs_code (def) == MINUS_EXPR
4898 || gimple_assign_rhs_code (def) == MULT_EXPR))
4900 tree ops[3] = {};
4901 /* When requiring a sign-extension we cannot model a
4902 previous truncation with a single op so don't bother. */
4903 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4904 /* Either we have the op widened available. */
4905 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4906 allow_truncate);
4907 if (ops[0])
4908 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4909 allow_truncate);
4910 if (ops[0] && ops[1])
4912 ops[0] = vn_nary_op_lookup_pieces
4913 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4914 /* We have wider operation available. */
4915 if (ops[0]
4916 /* If the leader is a wrapping operation we can
4917 insert it for code hoisting w/o introducing
4918 undefined overflow. If it is not it has to
4919 be available. See PR86554. */
4920 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4921 || (rpo_avail && vn_context_bb
4922 && rpo_avail->eliminate_avail (vn_context_bb,
4923 ops[0]))))
4925 unsigned lhs_prec = TYPE_PRECISION (type);
4926 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4927 if (lhs_prec == rhs_prec
4928 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4929 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4931 gimple_match_op match_op (gimple_match_cond::UNCOND,
4932 NOP_EXPR, type, ops[0]);
4933 result = vn_nary_build_or_lookup (&match_op);
4934 if (result)
4936 bool changed = set_ssa_val_to (lhs, result);
4937 vn_nary_op_insert_stmt (stmt, result);
4938 return changed;
4941 else
4943 tree mask = wide_int_to_tree
4944 (type, wi::mask (rhs_prec, false, lhs_prec));
4945 gimple_match_op match_op (gimple_match_cond::UNCOND,
4946 BIT_AND_EXPR,
4947 TREE_TYPE (lhs),
4948 ops[0], mask);
4949 result = vn_nary_build_or_lookup (&match_op);
4950 if (result)
4952 bool changed = set_ssa_val_to (lhs, result);
4953 vn_nary_op_insert_stmt (stmt, result);
4954 return changed;
4961 break;
4962 case BIT_AND_EXPR:
4963 if (INTEGRAL_TYPE_P (type)
4964 && TREE_CODE (rhs1) == SSA_NAME
4965 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4966 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4967 && default_vn_walk_kind != VN_NOWALK
4968 && CHAR_BIT == 8
4969 && BITS_PER_UNIT == 8
4970 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4971 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4972 && !integer_zerop (gimple_assign_rhs2 (stmt)))
4974 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4975 if (ass
4976 && !gimple_has_volatile_ops (ass)
4977 && vn_get_stmt_kind (ass) == VN_REFERENCE)
4979 tree last_vuse = gimple_vuse (ass);
4980 tree op = gimple_assign_rhs1 (ass);
4981 tree result = vn_reference_lookup (op, gimple_vuse (ass),
4982 default_vn_walk_kind,
4983 NULL, true, &last_vuse,
4984 gimple_assign_rhs2 (stmt));
4985 if (result
4986 && useless_type_conversion_p (TREE_TYPE (result),
4987 TREE_TYPE (op)))
4988 return set_ssa_val_to (lhs, result);
4991 break;
4992 case TRUNC_DIV_EXPR:
4993 if (TYPE_UNSIGNED (type))
4994 break;
4995 /* Fallthru. */
4996 case RDIV_EXPR:
4997 case MULT_EXPR:
4998 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
4999 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5001 tree rhs[2];
5002 rhs[0] = rhs1;
5003 rhs[1] = gimple_assign_rhs2 (stmt);
5004 for (unsigned i = 0; i <= 1; ++i)
5006 unsigned j = i == 0 ? 1 : 0;
5007 tree ops[2];
5008 gimple_match_op match_op (gimple_match_cond::UNCOND,
5009 NEGATE_EXPR, type, rhs[i]);
5010 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false);
5011 ops[j] = rhs[j];
5012 if (ops[i]
5013 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5014 type, ops, NULL)))
5016 gimple_match_op match_op (gimple_match_cond::UNCOND,
5017 NEGATE_EXPR, type, ops[0]);
5018 result = vn_nary_build_or_lookup (&match_op);
5019 if (result)
5021 bool changed = set_ssa_val_to (lhs, result);
5022 vn_nary_op_insert_stmt (stmt, result);
5023 return changed;
5028 break;
5029 default:
5030 break;
5033 bool changed = set_ssa_val_to (lhs, lhs);
5034 vn_nary_op_insert_stmt (stmt, lhs);
5035 return changed;
5038 /* Visit a call STMT storing into LHS. Return true if the value number
5039 of the LHS has changed as a result. */
5041 static bool
5042 visit_reference_op_call (tree lhs, gcall *stmt)
5044 bool changed = false;
5045 struct vn_reference_s vr1;
5046 vn_reference_t vnresult = NULL;
5047 tree vdef = gimple_vdef (stmt);
5049 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5050 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5051 lhs = NULL_TREE;
5053 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5054 if (vnresult)
5056 if (vnresult->result_vdef && vdef)
5057 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5058 else if (vdef)
5059 /* If the call was discovered to be pure or const reflect
5060 that as far as possible. */
5061 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5063 if (!vnresult->result && lhs)
5064 vnresult->result = lhs;
5066 if (vnresult->result && lhs)
5067 changed |= set_ssa_val_to (lhs, vnresult->result);
5069 else
5071 vn_reference_t vr2;
5072 vn_reference_s **slot;
5073 tree vdef_val = vdef;
5074 if (vdef)
5076 /* If we value numbered an indirect functions function to
5077 one not clobbering memory value number its VDEF to its
5078 VUSE. */
5079 tree fn = gimple_call_fn (stmt);
5080 if (fn && TREE_CODE (fn) == SSA_NAME)
5082 fn = SSA_VAL (fn);
5083 if (TREE_CODE (fn) == ADDR_EXPR
5084 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5085 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5086 & (ECF_CONST | ECF_PURE)))
5087 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5089 changed |= set_ssa_val_to (vdef, vdef_val);
5091 if (lhs)
5092 changed |= set_ssa_val_to (lhs, lhs);
5093 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5094 vr2->vuse = vr1.vuse;
5095 /* As we are not walking the virtual operand chain we know the
5096 shared_lookup_references are still original so we can re-use
5097 them here. */
5098 vr2->operands = vr1.operands.copy ();
5099 vr2->type = vr1.type;
5100 vr2->punned = vr1.punned;
5101 vr2->set = vr1.set;
5102 vr2->base_set = vr1.base_set;
5103 vr2->hashcode = vr1.hashcode;
5104 vr2->result = lhs;
5105 vr2->result_vdef = vdef_val;
5106 vr2->value_id = 0;
5107 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5108 INSERT);
5109 gcc_assert (!*slot);
5110 *slot = vr2;
5111 vr2->next = last_inserted_ref;
5112 last_inserted_ref = vr2;
5115 return changed;
5118 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5119 and return true if the value number of the LHS has changed as a result. */
5121 static bool
5122 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5124 bool changed = false;
5125 tree last_vuse;
5126 tree result;
5127 vn_reference_t res;
5129 last_vuse = gimple_vuse (stmt);
5130 result = vn_reference_lookup (op, gimple_vuse (stmt),
5131 default_vn_walk_kind, &res, true, &last_vuse);
5133 /* We handle type-punning through unions by value-numbering based
5134 on offset and size of the access. Be prepared to handle a
5135 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5136 if (result
5137 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5139 /* Avoid the type punning in case the result mode has padding where
5140 the op we lookup has not. */
5141 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5142 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5143 result = NULL_TREE;
5144 else
5146 /* We will be setting the value number of lhs to the value number
5147 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5148 So first simplify and lookup this expression to see if it
5149 is already available. */
5150 gimple_match_op res_op (gimple_match_cond::UNCOND,
5151 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5152 result = vn_nary_build_or_lookup (&res_op);
5153 if (result
5154 && TREE_CODE (result) == SSA_NAME
5155 && VN_INFO (result)->needs_insertion)
5156 /* Track whether this is the canonical expression for different
5157 typed loads. We use that as a stopgap measure for code
5158 hoisting when dealing with floating point loads. */
5159 res->punned = true;
5162 /* When building the conversion fails avoid inserting the reference
5163 again. */
5164 if (!result)
5165 return set_ssa_val_to (lhs, lhs);
5168 if (result)
5169 changed = set_ssa_val_to (lhs, result);
5170 else
5172 changed = set_ssa_val_to (lhs, lhs);
5173 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5176 return changed;
5180 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5181 and return true if the value number of the LHS has changed as a result. */
5183 static bool
5184 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5186 bool changed = false;
5187 vn_reference_t vnresult = NULL;
5188 tree assign;
5189 bool resultsame = false;
5190 tree vuse = gimple_vuse (stmt);
5191 tree vdef = gimple_vdef (stmt);
5193 if (TREE_CODE (op) == SSA_NAME)
5194 op = SSA_VAL (op);
5196 /* First we want to lookup using the *vuses* from the store and see
5197 if there the last store to this location with the same address
5198 had the same value.
5200 The vuses represent the memory state before the store. If the
5201 memory state, address, and value of the store is the same as the
5202 last store to this location, then this store will produce the
5203 same memory state as that store.
5205 In this case the vdef versions for this store are value numbered to those
5206 vuse versions, since they represent the same memory state after
5207 this store.
5209 Otherwise, the vdefs for the store are used when inserting into
5210 the table, since the store generates a new memory state. */
5212 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5213 if (vnresult
5214 && vnresult->result)
5216 tree result = vnresult->result;
5217 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5218 || result == SSA_VAL (result));
5219 resultsame = expressions_equal_p (result, op);
5220 if (resultsame)
5222 /* If the TBAA state isn't compatible for downstream reads
5223 we cannot value-number the VDEFs the same. */
5224 ao_ref lhs_ref;
5225 ao_ref_init (&lhs_ref, lhs);
5226 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5227 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5228 if ((vnresult->set != set
5229 && ! alias_set_subset_of (set, vnresult->set))
5230 || (vnresult->base_set != base_set
5231 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5232 resultsame = false;
5236 if (!resultsame)
5238 /* Only perform the following when being called from PRE
5239 which embeds tail merging. */
5240 if (default_vn_walk_kind == VN_WALK)
5242 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5243 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5244 if (vnresult)
5246 VN_INFO (vdef)->visited = true;
5247 return set_ssa_val_to (vdef, vnresult->result_vdef);
5251 if (dump_file && (dump_flags & TDF_DETAILS))
5253 fprintf (dump_file, "No store match\n");
5254 fprintf (dump_file, "Value numbering store ");
5255 print_generic_expr (dump_file, lhs);
5256 fprintf (dump_file, " to ");
5257 print_generic_expr (dump_file, op);
5258 fprintf (dump_file, "\n");
5260 /* Have to set value numbers before insert, since insert is
5261 going to valueize the references in-place. */
5262 if (vdef)
5263 changed |= set_ssa_val_to (vdef, vdef);
5265 /* Do not insert structure copies into the tables. */
5266 if (is_gimple_min_invariant (op)
5267 || is_gimple_reg (op))
5268 vn_reference_insert (lhs, op, vdef, NULL);
5270 /* Only perform the following when being called from PRE
5271 which embeds tail merging. */
5272 if (default_vn_walk_kind == VN_WALK)
5274 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5275 vn_reference_insert (assign, lhs, vuse, vdef);
5278 else
5280 /* We had a match, so value number the vdef to have the value
5281 number of the vuse it came from. */
5283 if (dump_file && (dump_flags & TDF_DETAILS))
5284 fprintf (dump_file, "Store matched earlier value, "
5285 "value numbering store vdefs to matching vuses.\n");
5287 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5290 return changed;
5293 /* Visit and value number PHI, return true if the value number
5294 changed. When BACKEDGES_VARYING_P is true then assume all
5295 backedge values are varying. When INSERTED is not NULL then
5296 this is just a ahead query for a possible iteration, set INSERTED
5297 to true if we'd insert into the hashtable. */
5299 static bool
5300 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5302 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5303 tree backedge_val = NULL_TREE;
5304 bool seen_non_backedge = false;
5305 tree sameval_base = NULL_TREE;
5306 poly_int64 soff, doff;
5307 unsigned n_executable = 0;
5308 edge_iterator ei;
5309 edge e;
5311 /* TODO: We could check for this in initialization, and replace this
5312 with a gcc_assert. */
5313 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5314 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5316 /* We track whether a PHI was CSEd to to avoid excessive iterations
5317 that would be necessary only because the PHI changed arguments
5318 but not value. */
5319 if (!inserted)
5320 gimple_set_plf (phi, GF_PLF_1, false);
5322 /* See if all non-TOP arguments have the same value. TOP is
5323 equivalent to everything, so we can ignore it. */
5324 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5325 if (e->flags & EDGE_EXECUTABLE)
5327 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5329 if (def == PHI_RESULT (phi))
5330 continue;
5331 ++n_executable;
5332 if (TREE_CODE (def) == SSA_NAME)
5334 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5335 def = SSA_VAL (def);
5336 if (e->flags & EDGE_DFS_BACK)
5337 backedge_val = def;
5339 if (!(e->flags & EDGE_DFS_BACK))
5340 seen_non_backedge = true;
5341 if (def == VN_TOP)
5343 /* Ignore undefined defs for sameval but record one. */
5344 else if (TREE_CODE (def) == SSA_NAME
5345 && ! virtual_operand_p (def)
5346 && ssa_undefined_value_p (def, false))
5347 seen_undef = def;
5348 else if (sameval == VN_TOP)
5349 sameval = def;
5350 else if (!expressions_equal_p (def, sameval))
5352 /* We know we're arriving only with invariant addresses here,
5353 try harder comparing them. We can do some caching here
5354 which we cannot do in expressions_equal_p. */
5355 if (TREE_CODE (def) == ADDR_EXPR
5356 && TREE_CODE (sameval) == ADDR_EXPR
5357 && sameval_base != (void *)-1)
5359 if (!sameval_base)
5360 sameval_base = get_addr_base_and_unit_offset
5361 (TREE_OPERAND (sameval, 0), &soff);
5362 if (!sameval_base)
5363 sameval_base = (tree)(void *)-1;
5364 else if ((get_addr_base_and_unit_offset
5365 (TREE_OPERAND (def, 0), &doff) == sameval_base)
5366 && known_eq (soff, doff))
5367 continue;
5369 sameval = NULL_TREE;
5370 break;
5374 /* If the value we want to use is flowing over the backedge and we
5375 should take it as VARYING but it has a non-VARYING value drop to
5376 VARYING.
5377 If we value-number a virtual operand never value-number to the
5378 value from the backedge as that confuses the alias-walking code.
5379 See gcc.dg/torture/pr87176.c. If the value is the same on a
5380 non-backedge everything is OK though. */
5381 bool visited_p;
5382 if ((backedge_val
5383 && !seen_non_backedge
5384 && TREE_CODE (backedge_val) == SSA_NAME
5385 && sameval == backedge_val
5386 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5387 || SSA_VAL (backedge_val) != backedge_val))
5388 /* Do not value-number a virtual operand to sth not visited though
5389 given that allows us to escape a region in alias walking. */
5390 || (sameval
5391 && TREE_CODE (sameval) == SSA_NAME
5392 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5393 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5394 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5395 /* Note this just drops to VARYING without inserting the PHI into
5396 the hashes. */
5397 result = PHI_RESULT (phi);
5398 /* If none of the edges was executable keep the value-number at VN_TOP,
5399 if only a single edge is exectuable use its value. */
5400 else if (n_executable <= 1)
5401 result = seen_undef ? seen_undef : sameval;
5402 /* If we saw only undefined values and VN_TOP use one of the
5403 undefined values. */
5404 else if (sameval == VN_TOP)
5405 result = seen_undef ? seen_undef : sameval;
5406 /* First see if it is equivalent to a phi node in this block. We prefer
5407 this as it allows IV elimination - see PRs 66502 and 67167. */
5408 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5410 if (!inserted
5411 && TREE_CODE (result) == SSA_NAME
5412 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5414 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5415 if (dump_file && (dump_flags & TDF_DETAILS))
5417 fprintf (dump_file, "Marking CSEd to PHI node ");
5418 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5419 0, TDF_SLIM);
5420 fprintf (dump_file, "\n");
5424 /* If all values are the same use that, unless we've seen undefined
5425 values as well and the value isn't constant.
5426 CCP/copyprop have the same restriction to not remove uninit warnings. */
5427 else if (sameval
5428 && (! seen_undef || is_gimple_min_invariant (sameval)))
5429 result = sameval;
5430 else
5432 result = PHI_RESULT (phi);
5433 /* Only insert PHIs that are varying, for constant value numbers
5434 we mess up equivalences otherwise as we are only comparing
5435 the immediate controlling predicates. */
5436 vn_phi_insert (phi, result, backedges_varying_p);
5437 if (inserted)
5438 *inserted = true;
5441 return set_ssa_val_to (PHI_RESULT (phi), result);
5444 /* Try to simplify RHS using equivalences and constant folding. */
5446 static tree
5447 try_to_simplify (gassign *stmt)
5449 enum tree_code code = gimple_assign_rhs_code (stmt);
5450 tree tem;
5452 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5453 in this case, there is no point in doing extra work. */
5454 if (code == SSA_NAME)
5455 return NULL_TREE;
5457 /* First try constant folding based on our current lattice. */
5458 mprts_hook = vn_lookup_simplify_result;
5459 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5460 mprts_hook = NULL;
5461 if (tem
5462 && (TREE_CODE (tem) == SSA_NAME
5463 || is_gimple_min_invariant (tem)))
5464 return tem;
5466 return NULL_TREE;
5469 /* Visit and value number STMT, return true if the value number
5470 changed. */
5472 static bool
5473 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5475 bool changed = false;
5477 if (dump_file && (dump_flags & TDF_DETAILS))
5479 fprintf (dump_file, "Value numbering stmt = ");
5480 print_gimple_stmt (dump_file, stmt, 0);
5483 if (gimple_code (stmt) == GIMPLE_PHI)
5484 changed = visit_phi (stmt, NULL, backedges_varying_p);
5485 else if (gimple_has_volatile_ops (stmt))
5486 changed = defs_to_varying (stmt);
5487 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5489 enum tree_code code = gimple_assign_rhs_code (ass);
5490 tree lhs = gimple_assign_lhs (ass);
5491 tree rhs1 = gimple_assign_rhs1 (ass);
5492 tree simplified;
5494 /* Shortcut for copies. Simplifying copies is pointless,
5495 since we copy the expression and value they represent. */
5496 if (code == SSA_NAME
5497 && TREE_CODE (lhs) == SSA_NAME)
5499 changed = visit_copy (lhs, rhs1);
5500 goto done;
5502 simplified = try_to_simplify (ass);
5503 if (simplified)
5505 if (dump_file && (dump_flags & TDF_DETAILS))
5507 fprintf (dump_file, "RHS ");
5508 print_gimple_expr (dump_file, ass, 0);
5509 fprintf (dump_file, " simplified to ");
5510 print_generic_expr (dump_file, simplified);
5511 fprintf (dump_file, "\n");
5514 /* Setting value numbers to constants will occasionally
5515 screw up phi congruence because constants are not
5516 uniquely associated with a single ssa name that can be
5517 looked up. */
5518 if (simplified
5519 && is_gimple_min_invariant (simplified)
5520 && TREE_CODE (lhs) == SSA_NAME)
5522 changed = set_ssa_val_to (lhs, simplified);
5523 goto done;
5525 else if (simplified
5526 && TREE_CODE (simplified) == SSA_NAME
5527 && TREE_CODE (lhs) == SSA_NAME)
5529 changed = visit_copy (lhs, simplified);
5530 goto done;
5533 if ((TREE_CODE (lhs) == SSA_NAME
5534 /* We can substitute SSA_NAMEs that are live over
5535 abnormal edges with their constant value. */
5536 && !(gimple_assign_copy_p (ass)
5537 && is_gimple_min_invariant (rhs1))
5538 && !(simplified
5539 && is_gimple_min_invariant (simplified))
5540 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5541 /* Stores or copies from SSA_NAMEs that are live over
5542 abnormal edges are a problem. */
5543 || (code == SSA_NAME
5544 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5545 changed = defs_to_varying (ass);
5546 else if (REFERENCE_CLASS_P (lhs)
5547 || DECL_P (lhs))
5548 changed = visit_reference_op_store (lhs, rhs1, ass);
5549 else if (TREE_CODE (lhs) == SSA_NAME)
5551 if ((gimple_assign_copy_p (ass)
5552 && is_gimple_min_invariant (rhs1))
5553 || (simplified
5554 && is_gimple_min_invariant (simplified)))
5556 if (simplified)
5557 changed = set_ssa_val_to (lhs, simplified);
5558 else
5559 changed = set_ssa_val_to (lhs, rhs1);
5561 else
5563 /* Visit the original statement. */
5564 switch (vn_get_stmt_kind (ass))
5566 case VN_NARY:
5567 changed = visit_nary_op (lhs, ass);
5568 break;
5569 case VN_REFERENCE:
5570 changed = visit_reference_op_load (lhs, rhs1, ass);
5571 break;
5572 default:
5573 changed = defs_to_varying (ass);
5574 break;
5578 else
5579 changed = defs_to_varying (ass);
5581 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5583 tree lhs = gimple_call_lhs (call_stmt);
5584 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5586 /* Try constant folding based on our current lattice. */
5587 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5588 vn_valueize);
5589 if (simplified)
5591 if (dump_file && (dump_flags & TDF_DETAILS))
5593 fprintf (dump_file, "call ");
5594 print_gimple_expr (dump_file, call_stmt, 0);
5595 fprintf (dump_file, " simplified to ");
5596 print_generic_expr (dump_file, simplified);
5597 fprintf (dump_file, "\n");
5600 /* Setting value numbers to constants will occasionally
5601 screw up phi congruence because constants are not
5602 uniquely associated with a single ssa name that can be
5603 looked up. */
5604 if (simplified
5605 && is_gimple_min_invariant (simplified))
5607 changed = set_ssa_val_to (lhs, simplified);
5608 if (gimple_vdef (call_stmt))
5609 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5610 SSA_VAL (gimple_vuse (call_stmt)));
5611 goto done;
5613 else if (simplified
5614 && TREE_CODE (simplified) == SSA_NAME)
5616 changed = visit_copy (lhs, simplified);
5617 if (gimple_vdef (call_stmt))
5618 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5619 SSA_VAL (gimple_vuse (call_stmt)));
5620 goto done;
5622 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5624 changed = defs_to_varying (call_stmt);
5625 goto done;
5629 /* Pick up flags from a devirtualization target. */
5630 tree fn = gimple_call_fn (stmt);
5631 int extra_fnflags = 0;
5632 if (fn && TREE_CODE (fn) == SSA_NAME)
5634 fn = SSA_VAL (fn);
5635 if (TREE_CODE (fn) == ADDR_EXPR
5636 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5637 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5639 if (!gimple_call_internal_p (call_stmt)
5640 && (/* Calls to the same function with the same vuse
5641 and the same operands do not necessarily return the same
5642 value, unless they're pure or const. */
5643 ((gimple_call_flags (call_stmt) | extra_fnflags)
5644 & (ECF_PURE | ECF_CONST))
5645 /* If calls have a vdef, subsequent calls won't have
5646 the same incoming vuse. So, if 2 calls with vdef have the
5647 same vuse, we know they're not subsequent.
5648 We can value number 2 calls to the same function with the
5649 same vuse and the same operands which are not subsequent
5650 the same, because there is no code in the program that can
5651 compare the 2 values... */
5652 || (gimple_vdef (call_stmt)
5653 /* ... unless the call returns a pointer which does
5654 not alias with anything else. In which case the
5655 information that the values are distinct are encoded
5656 in the IL. */
5657 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5658 /* Only perform the following when being called from PRE
5659 which embeds tail merging. */
5660 && default_vn_walk_kind == VN_WALK)))
5661 changed = visit_reference_op_call (lhs, call_stmt);
5662 else
5663 changed = defs_to_varying (call_stmt);
5665 else
5666 changed = defs_to_varying (stmt);
5667 done:
5668 return changed;
5672 /* Allocate a value number table. */
5674 static void
5675 allocate_vn_table (vn_tables_t table, unsigned size)
5677 table->phis = new vn_phi_table_type (size);
5678 table->nary = new vn_nary_op_table_type (size);
5679 table->references = new vn_reference_table_type (size);
5682 /* Free a value number table. */
5684 static void
5685 free_vn_table (vn_tables_t table)
5687 /* Walk over elements and release vectors. */
5688 vn_reference_iterator_type hir;
5689 vn_reference_t vr;
5690 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5691 vr->operands.release ();
5692 delete table->phis;
5693 table->phis = NULL;
5694 delete table->nary;
5695 table->nary = NULL;
5696 delete table->references;
5697 table->references = NULL;
5700 /* Set *ID according to RESULT. */
5702 static void
5703 set_value_id_for_result (tree result, unsigned int *id)
5705 if (result && TREE_CODE (result) == SSA_NAME)
5706 *id = VN_INFO (result)->value_id;
5707 else if (result && is_gimple_min_invariant (result))
5708 *id = get_or_alloc_constant_value_id (result);
5709 else
5710 *id = get_next_value_id ();
5713 /* Set the value ids in the valid hash tables. */
5715 static void
5716 set_hashtable_value_ids (void)
5718 vn_nary_op_iterator_type hin;
5719 vn_phi_iterator_type hip;
5720 vn_reference_iterator_type hir;
5721 vn_nary_op_t vno;
5722 vn_reference_t vr;
5723 vn_phi_t vp;
5725 /* Now set the value ids of the things we had put in the hash
5726 table. */
5728 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5729 if (! vno->predicated_values)
5730 set_value_id_for_result (vno->u.result, &vno->value_id);
5732 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5733 set_value_id_for_result (vp->result, &vp->value_id);
5735 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5736 hir)
5737 set_value_id_for_result (vr->result, &vr->value_id);
5740 /* Return the maximum value id we have ever seen. */
5742 unsigned int
5743 get_max_value_id (void)
5745 return next_value_id;
5748 /* Return the maximum constant value id we have ever seen. */
5750 unsigned int
5751 get_max_constant_value_id (void)
5753 return -next_constant_value_id;
5756 /* Return the next unique value id. */
5758 unsigned int
5759 get_next_value_id (void)
5761 gcc_checking_assert ((int)next_value_id > 0);
5762 return next_value_id++;
5765 /* Return the next unique value id for constants. */
5767 unsigned int
5768 get_next_constant_value_id (void)
5770 gcc_checking_assert (next_constant_value_id < 0);
5771 return next_constant_value_id--;
5775 /* Compare two expressions E1 and E2 and return true if they are equal. */
5777 bool
5778 expressions_equal_p (tree e1, tree e2)
5780 /* The obvious case. */
5781 if (e1 == e2)
5782 return true;
5784 /* If either one is VN_TOP consider them equal. */
5785 if (e1 == VN_TOP || e2 == VN_TOP)
5786 return true;
5788 /* SSA_NAME compare pointer equal. */
5789 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5790 return false;
5792 /* Now perform the actual comparison. */
5793 if (TREE_CODE (e1) == TREE_CODE (e2)
5794 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5795 return true;
5797 return false;
5801 /* Return true if the nary operation NARY may trap. This is a copy
5802 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5804 bool
5805 vn_nary_may_trap (vn_nary_op_t nary)
5807 tree type;
5808 tree rhs2 = NULL_TREE;
5809 bool honor_nans = false;
5810 bool honor_snans = false;
5811 bool fp_operation = false;
5812 bool honor_trapv = false;
5813 bool handled, ret;
5814 unsigned i;
5816 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5817 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5818 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5820 type = nary->type;
5821 fp_operation = FLOAT_TYPE_P (type);
5822 if (fp_operation)
5824 honor_nans = flag_trapping_math && !flag_finite_math_only;
5825 honor_snans = flag_signaling_nans != 0;
5827 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5828 honor_trapv = true;
5830 if (nary->length >= 2)
5831 rhs2 = nary->op[1];
5832 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5833 honor_trapv, honor_nans, honor_snans,
5834 rhs2, &handled);
5835 if (handled && ret)
5836 return true;
5838 for (i = 0; i < nary->length; ++i)
5839 if (tree_could_trap_p (nary->op[i]))
5840 return true;
5842 return false;
5845 /* Return true if the reference operation REF may trap. */
5847 bool
5848 vn_reference_may_trap (vn_reference_t ref)
5850 switch (ref->operands[0].opcode)
5852 case MODIFY_EXPR:
5853 case CALL_EXPR:
5854 /* We do not handle calls. */
5855 case ADDR_EXPR:
5856 /* And toplevel address computations never trap. */
5857 return false;
5858 default:;
5861 vn_reference_op_t op;
5862 unsigned i;
5863 FOR_EACH_VEC_ELT (ref->operands, i, op)
5865 switch (op->opcode)
5867 case WITH_SIZE_EXPR:
5868 case TARGET_MEM_REF:
5869 /* Always variable. */
5870 return true;
5871 case COMPONENT_REF:
5872 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5873 return true;
5874 break;
5875 case ARRAY_RANGE_REF:
5876 case ARRAY_REF:
5877 if (TREE_CODE (op->op0) == SSA_NAME)
5878 return true;
5879 break;
5880 case MEM_REF:
5881 /* Nothing interesting in itself, the base is separate. */
5882 break;
5883 /* The following are the address bases. */
5884 case SSA_NAME:
5885 return true;
5886 case ADDR_EXPR:
5887 if (op->op0)
5888 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5889 return false;
5890 default:;
5893 return false;
5896 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5897 bitmap inserted_exprs_)
5898 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5899 el_todo (0), eliminations (0), insertions (0),
5900 inserted_exprs (inserted_exprs_)
5902 need_eh_cleanup = BITMAP_ALLOC (NULL);
5903 need_ab_cleanup = BITMAP_ALLOC (NULL);
5906 eliminate_dom_walker::~eliminate_dom_walker ()
5908 BITMAP_FREE (need_eh_cleanup);
5909 BITMAP_FREE (need_ab_cleanup);
5912 /* Return a leader for OP that is available at the current point of the
5913 eliminate domwalk. */
5915 tree
5916 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5918 tree valnum = VN_INFO (op)->valnum;
5919 if (TREE_CODE (valnum) == SSA_NAME)
5921 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5922 return valnum;
5923 if (avail.length () > SSA_NAME_VERSION (valnum))
5924 return avail[SSA_NAME_VERSION (valnum)];
5926 else if (is_gimple_min_invariant (valnum))
5927 return valnum;
5928 return NULL_TREE;
5931 /* At the current point of the eliminate domwalk make OP available. */
5933 void
5934 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5936 tree valnum = VN_INFO (op)->valnum;
5937 if (TREE_CODE (valnum) == SSA_NAME)
5939 if (avail.length () <= SSA_NAME_VERSION (valnum))
5940 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
5941 tree pushop = op;
5942 if (avail[SSA_NAME_VERSION (valnum)])
5943 pushop = avail[SSA_NAME_VERSION (valnum)];
5944 avail_stack.safe_push (pushop);
5945 avail[SSA_NAME_VERSION (valnum)] = op;
5949 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
5950 the leader for the expression if insertion was successful. */
5952 tree
5953 eliminate_dom_walker::eliminate_insert (basic_block bb,
5954 gimple_stmt_iterator *gsi, tree val)
5956 /* We can insert a sequence with a single assignment only. */
5957 gimple_seq stmts = VN_INFO (val)->expr;
5958 if (!gimple_seq_singleton_p (stmts))
5959 return NULL_TREE;
5960 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
5961 if (!stmt
5962 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
5963 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
5964 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
5965 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
5966 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
5967 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
5968 return NULL_TREE;
5970 tree op = gimple_assign_rhs1 (stmt);
5971 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
5972 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5973 op = TREE_OPERAND (op, 0);
5974 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
5975 if (!leader)
5976 return NULL_TREE;
5978 tree res;
5979 stmts = NULL;
5980 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
5981 res = gimple_build (&stmts, BIT_FIELD_REF,
5982 TREE_TYPE (val), leader,
5983 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
5984 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
5985 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
5986 res = gimple_build (&stmts, BIT_AND_EXPR,
5987 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
5988 else
5989 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
5990 TREE_TYPE (val), leader);
5991 if (TREE_CODE (res) != SSA_NAME
5992 || SSA_NAME_IS_DEFAULT_DEF (res)
5993 || gimple_bb (SSA_NAME_DEF_STMT (res)))
5995 gimple_seq_discard (stmts);
5997 /* During propagation we have to treat SSA info conservatively
5998 and thus we can end up simplifying the inserted expression
5999 at elimination time to sth not defined in stmts. */
6000 /* But then this is a redundancy we failed to detect. Which means
6001 res now has two values. That doesn't play well with how
6002 we track availability here, so give up. */
6003 if (dump_file && (dump_flags & TDF_DETAILS))
6005 if (TREE_CODE (res) == SSA_NAME)
6006 res = eliminate_avail (bb, res);
6007 if (res)
6009 fprintf (dump_file, "Failed to insert expression for value ");
6010 print_generic_expr (dump_file, val);
6011 fprintf (dump_file, " which is really fully redundant to ");
6012 print_generic_expr (dump_file, res);
6013 fprintf (dump_file, "\n");
6017 return NULL_TREE;
6019 else
6021 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6022 vn_ssa_aux_t vn_info = VN_INFO (res);
6023 vn_info->valnum = val;
6024 vn_info->visited = true;
6027 insertions++;
6028 if (dump_file && (dump_flags & TDF_DETAILS))
6030 fprintf (dump_file, "Inserted ");
6031 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6034 return res;
6037 void
6038 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6040 tree sprime = NULL_TREE;
6041 gimple *stmt = gsi_stmt (*gsi);
6042 tree lhs = gimple_get_lhs (stmt);
6043 if (lhs && TREE_CODE (lhs) == SSA_NAME
6044 && !gimple_has_volatile_ops (stmt)
6045 /* See PR43491. Do not replace a global register variable when
6046 it is a the RHS of an assignment. Do replace local register
6047 variables since gcc does not guarantee a local variable will
6048 be allocated in register.
6049 ??? The fix isn't effective here. This should instead
6050 be ensured by not value-numbering them the same but treating
6051 them like volatiles? */
6052 && !(gimple_assign_single_p (stmt)
6053 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6054 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6055 && is_global_var (gimple_assign_rhs1 (stmt)))))
6057 sprime = eliminate_avail (b, lhs);
6058 if (!sprime)
6060 /* If there is no existing usable leader but SCCVN thinks
6061 it has an expression it wants to use as replacement,
6062 insert that. */
6063 tree val = VN_INFO (lhs)->valnum;
6064 vn_ssa_aux_t vn_info;
6065 if (val != VN_TOP
6066 && TREE_CODE (val) == SSA_NAME
6067 && (vn_info = VN_INFO (val), true)
6068 && vn_info->needs_insertion
6069 && vn_info->expr != NULL
6070 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6071 eliminate_push_avail (b, sprime);
6074 /* If this now constitutes a copy duplicate points-to
6075 and range info appropriately. This is especially
6076 important for inserted code. See tree-ssa-copy.c
6077 for similar code. */
6078 if (sprime
6079 && TREE_CODE (sprime) == SSA_NAME)
6081 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6082 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6083 && SSA_NAME_PTR_INFO (lhs)
6084 && ! SSA_NAME_PTR_INFO (sprime))
6086 duplicate_ssa_name_ptr_info (sprime,
6087 SSA_NAME_PTR_INFO (lhs));
6088 if (b != sprime_b)
6089 reset_flow_sensitive_info (sprime);
6091 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6092 && SSA_NAME_RANGE_INFO (lhs)
6093 && ! SSA_NAME_RANGE_INFO (sprime)
6094 && b == sprime_b)
6095 duplicate_ssa_name_range_info (sprime,
6096 SSA_NAME_RANGE_TYPE (lhs),
6097 SSA_NAME_RANGE_INFO (lhs));
6100 /* Inhibit the use of an inserted PHI on a loop header when
6101 the address of the memory reference is a simple induction
6102 variable. In other cases the vectorizer won't do anything
6103 anyway (either it's loop invariant or a complicated
6104 expression). */
6105 if (sprime
6106 && TREE_CODE (sprime) == SSA_NAME
6107 && do_pre
6108 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6109 && loop_outer (b->loop_father)
6110 && has_zero_uses (sprime)
6111 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6112 && gimple_assign_load_p (stmt))
6114 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6115 basic_block def_bb = gimple_bb (def_stmt);
6116 if (gimple_code (def_stmt) == GIMPLE_PHI
6117 && def_bb->loop_father->header == def_bb)
6119 loop_p loop = def_bb->loop_father;
6120 ssa_op_iter iter;
6121 tree op;
6122 bool found = false;
6123 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6125 affine_iv iv;
6126 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6127 if (def_bb
6128 && flow_bb_inside_loop_p (loop, def_bb)
6129 && simple_iv (loop, loop, op, &iv, true))
6131 found = true;
6132 break;
6135 if (found)
6137 if (dump_file && (dump_flags & TDF_DETAILS))
6139 fprintf (dump_file, "Not replacing ");
6140 print_gimple_expr (dump_file, stmt, 0);
6141 fprintf (dump_file, " with ");
6142 print_generic_expr (dump_file, sprime);
6143 fprintf (dump_file, " which would add a loop"
6144 " carried dependence to loop %d\n",
6145 loop->num);
6147 /* Don't keep sprime available. */
6148 sprime = NULL_TREE;
6153 if (sprime)
6155 /* If we can propagate the value computed for LHS into
6156 all uses don't bother doing anything with this stmt. */
6157 if (may_propagate_copy (lhs, sprime))
6159 /* Mark it for removal. */
6160 to_remove.safe_push (stmt);
6162 /* ??? Don't count copy/constant propagations. */
6163 if (gimple_assign_single_p (stmt)
6164 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6165 || gimple_assign_rhs1 (stmt) == sprime))
6166 return;
6168 if (dump_file && (dump_flags & TDF_DETAILS))
6170 fprintf (dump_file, "Replaced ");
6171 print_gimple_expr (dump_file, stmt, 0);
6172 fprintf (dump_file, " with ");
6173 print_generic_expr (dump_file, sprime);
6174 fprintf (dump_file, " in all uses of ");
6175 print_gimple_stmt (dump_file, stmt, 0);
6178 eliminations++;
6179 return;
6182 /* If this is an assignment from our leader (which
6183 happens in the case the value-number is a constant)
6184 then there is nothing to do. Likewise if we run into
6185 inserted code that needed a conversion because of
6186 our type-agnostic value-numbering of loads. */
6187 if ((gimple_assign_single_p (stmt)
6188 || (is_gimple_assign (stmt)
6189 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6190 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6191 && sprime == gimple_assign_rhs1 (stmt))
6192 return;
6194 /* Else replace its RHS. */
6195 if (dump_file && (dump_flags & TDF_DETAILS))
6197 fprintf (dump_file, "Replaced ");
6198 print_gimple_expr (dump_file, stmt, 0);
6199 fprintf (dump_file, " with ");
6200 print_generic_expr (dump_file, sprime);
6201 fprintf (dump_file, " in ");
6202 print_gimple_stmt (dump_file, stmt, 0);
6204 eliminations++;
6206 bool can_make_abnormal_goto = (is_gimple_call (stmt)
6207 && stmt_can_make_abnormal_goto (stmt));
6208 gimple *orig_stmt = stmt;
6209 if (!useless_type_conversion_p (TREE_TYPE (lhs),
6210 TREE_TYPE (sprime)))
6212 /* We preserve conversions to but not from function or method
6213 types. This asymmetry makes it necessary to re-instantiate
6214 conversions here. */
6215 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6216 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6217 sprime = fold_convert (TREE_TYPE (lhs), sprime);
6218 else
6219 gcc_unreachable ();
6221 tree vdef = gimple_vdef (stmt);
6222 tree vuse = gimple_vuse (stmt);
6223 propagate_tree_value_into_stmt (gsi, sprime);
6224 stmt = gsi_stmt (*gsi);
6225 update_stmt (stmt);
6226 /* In case the VDEF on the original stmt was released, value-number
6227 it to the VUSE. This is to make vuse_ssa_val able to skip
6228 released virtual operands. */
6229 if (vdef != gimple_vdef (stmt))
6231 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6232 VN_INFO (vdef)->valnum = vuse;
6235 /* If we removed EH side-effects from the statement, clean
6236 its EH information. */
6237 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6239 bitmap_set_bit (need_eh_cleanup,
6240 gimple_bb (stmt)->index);
6241 if (dump_file && (dump_flags & TDF_DETAILS))
6242 fprintf (dump_file, " Removed EH side-effects.\n");
6245 /* Likewise for AB side-effects. */
6246 if (can_make_abnormal_goto
6247 && !stmt_can_make_abnormal_goto (stmt))
6249 bitmap_set_bit (need_ab_cleanup,
6250 gimple_bb (stmt)->index);
6251 if (dump_file && (dump_flags & TDF_DETAILS))
6252 fprintf (dump_file, " Removed AB side-effects.\n");
6255 return;
6259 /* If the statement is a scalar store, see if the expression
6260 has the same value number as its rhs. If so, the store is
6261 dead. */
6262 if (gimple_assign_single_p (stmt)
6263 && !gimple_has_volatile_ops (stmt)
6264 && !is_gimple_reg (gimple_assign_lhs (stmt))
6265 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6266 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6268 tree rhs = gimple_assign_rhs1 (stmt);
6269 vn_reference_t vnresult;
6270 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6271 typed load of a byte known to be 0x11 as 1 so a store of
6272 a boolean 1 is detected as redundant. Because of this we
6273 have to make sure to lookup with a ref where its size
6274 matches the precision. */
6275 tree lookup_lhs = lhs;
6276 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6277 && (TREE_CODE (lhs) != COMPONENT_REF
6278 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6279 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6281 if (TREE_CODE (lhs) == COMPONENT_REF
6282 || TREE_CODE (lhs) == MEM_REF)
6284 tree ltype = build_nonstandard_integer_type
6285 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6286 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6287 if (TREE_CODE (lhs) == COMPONENT_REF)
6289 tree foff = component_ref_field_offset (lhs);
6290 tree f = TREE_OPERAND (lhs, 1);
6291 if (!poly_int_tree_p (foff))
6292 lookup_lhs = NULL_TREE;
6293 else
6294 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6295 TREE_OPERAND (lhs, 0),
6296 TYPE_SIZE (TREE_TYPE (lhs)),
6297 bit_from_pos
6298 (foff, DECL_FIELD_BIT_OFFSET (f)));
6300 else
6301 lookup_lhs = build2 (MEM_REF, ltype,
6302 TREE_OPERAND (lhs, 0),
6303 TREE_OPERAND (lhs, 1));
6305 else
6306 lookup_lhs = NULL_TREE;
6308 tree val = NULL_TREE;
6309 if (lookup_lhs)
6310 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6311 VN_WALKREWRITE, &vnresult, false);
6312 if (TREE_CODE (rhs) == SSA_NAME)
6313 rhs = VN_INFO (rhs)->valnum;
6314 if (val
6315 && (operand_equal_p (val, rhs, 0)
6316 /* Due to the bitfield lookups above we can get bit
6317 interpretations of the same RHS as values here. Those
6318 are redundant as well. */
6319 || (TREE_CODE (val) == SSA_NAME
6320 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6321 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6322 && TREE_CODE (val) == VIEW_CONVERT_EXPR
6323 && TREE_OPERAND (val, 0) == rhs)))
6325 /* We can only remove the later store if the former aliases
6326 at least all accesses the later one does or if the store
6327 was to readonly memory storing the same value. */
6328 ao_ref lhs_ref;
6329 ao_ref_init (&lhs_ref, lhs);
6330 alias_set_type set = ao_ref_alias_set (&lhs_ref);
6331 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6332 if (! vnresult
6333 || ((vnresult->set == set
6334 || alias_set_subset_of (set, vnresult->set))
6335 && (vnresult->base_set == base_set
6336 || alias_set_subset_of (base_set, vnresult->base_set))))
6338 if (dump_file && (dump_flags & TDF_DETAILS))
6340 fprintf (dump_file, "Deleted redundant store ");
6341 print_gimple_stmt (dump_file, stmt, 0);
6344 /* Queue stmt for removal. */
6345 to_remove.safe_push (stmt);
6346 return;
6351 /* If this is a control statement value numbering left edges
6352 unexecuted on force the condition in a way consistent with
6353 that. */
6354 if (gcond *cond = dyn_cast <gcond *> (stmt))
6356 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6357 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6359 if (dump_file && (dump_flags & TDF_DETAILS))
6361 fprintf (dump_file, "Removing unexecutable edge from ");
6362 print_gimple_stmt (dump_file, stmt, 0);
6364 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6365 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6366 gimple_cond_make_true (cond);
6367 else
6368 gimple_cond_make_false (cond);
6369 update_stmt (cond);
6370 el_todo |= TODO_cleanup_cfg;
6371 return;
6375 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6376 bool was_noreturn = (is_gimple_call (stmt)
6377 && gimple_call_noreturn_p (stmt));
6378 tree vdef = gimple_vdef (stmt);
6379 tree vuse = gimple_vuse (stmt);
6381 /* If we didn't replace the whole stmt (or propagate the result
6382 into all uses), replace all uses on this stmt with their
6383 leaders. */
6384 bool modified = false;
6385 use_operand_p use_p;
6386 ssa_op_iter iter;
6387 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6389 tree use = USE_FROM_PTR (use_p);
6390 /* ??? The call code above leaves stmt operands un-updated. */
6391 if (TREE_CODE (use) != SSA_NAME)
6392 continue;
6393 tree sprime;
6394 if (SSA_NAME_IS_DEFAULT_DEF (use))
6395 /* ??? For default defs BB shouldn't matter, but we have to
6396 solve the inconsistency between rpo eliminate and
6397 dom eliminate avail valueization first. */
6398 sprime = eliminate_avail (b, use);
6399 else
6400 /* Look for sth available at the definition block of the argument.
6401 This avoids inconsistencies between availability there which
6402 decides if the stmt can be removed and availability at the
6403 use site. The SSA property ensures that things available
6404 at the definition are also available at uses. */
6405 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6406 if (sprime && sprime != use
6407 && may_propagate_copy (use, sprime)
6408 /* We substitute into debug stmts to avoid excessive
6409 debug temporaries created by removed stmts, but we need
6410 to avoid doing so for inserted sprimes as we never want
6411 to create debug temporaries for them. */
6412 && (!inserted_exprs
6413 || TREE_CODE (sprime) != SSA_NAME
6414 || !is_gimple_debug (stmt)
6415 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6417 propagate_value (use_p, sprime);
6418 modified = true;
6422 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6423 into which is a requirement for the IPA devirt machinery. */
6424 gimple *old_stmt = stmt;
6425 if (modified)
6427 /* If a formerly non-invariant ADDR_EXPR is turned into an
6428 invariant one it was on a separate stmt. */
6429 if (gimple_assign_single_p (stmt)
6430 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6431 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6432 gimple_stmt_iterator prev = *gsi;
6433 gsi_prev (&prev);
6434 if (fold_stmt (gsi, follow_all_ssa_edges))
6436 /* fold_stmt may have created new stmts inbetween
6437 the previous stmt and the folded stmt. Mark
6438 all defs created there as varying to not confuse
6439 the SCCVN machinery as we're using that even during
6440 elimination. */
6441 if (gsi_end_p (prev))
6442 prev = gsi_start_bb (b);
6443 else
6444 gsi_next (&prev);
6445 if (gsi_stmt (prev) != gsi_stmt (*gsi))
6448 tree def;
6449 ssa_op_iter dit;
6450 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6451 dit, SSA_OP_ALL_DEFS)
6452 /* As existing DEFs may move between stmts
6453 only process new ones. */
6454 if (! has_VN_INFO (def))
6456 vn_ssa_aux_t vn_info = VN_INFO (def);
6457 vn_info->valnum = def;
6458 vn_info->visited = true;
6460 if (gsi_stmt (prev) == gsi_stmt (*gsi))
6461 break;
6462 gsi_next (&prev);
6464 while (1);
6466 stmt = gsi_stmt (*gsi);
6467 /* In case we folded the stmt away schedule the NOP for removal. */
6468 if (gimple_nop_p (stmt))
6469 to_remove.safe_push (stmt);
6472 /* Visit indirect calls and turn them into direct calls if
6473 possible using the devirtualization machinery. Do this before
6474 checking for required EH/abnormal/noreturn cleanup as devird
6475 may expose more of those. */
6476 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6478 tree fn = gimple_call_fn (call_stmt);
6479 if (fn
6480 && flag_devirtualize
6481 && virtual_method_call_p (fn))
6483 tree otr_type = obj_type_ref_class (fn);
6484 unsigned HOST_WIDE_INT otr_tok
6485 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6486 tree instance;
6487 ipa_polymorphic_call_context context (current_function_decl,
6488 fn, stmt, &instance);
6489 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6490 otr_type, stmt, NULL);
6491 bool final;
6492 vec <cgraph_node *> targets
6493 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6494 otr_tok, context, &final);
6495 if (dump_file)
6496 dump_possible_polymorphic_call_targets (dump_file,
6497 obj_type_ref_class (fn),
6498 otr_tok, context);
6499 if (final && targets.length () <= 1 && dbg_cnt (devirt))
6501 tree fn;
6502 if (targets.length () == 1)
6503 fn = targets[0]->decl;
6504 else
6505 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6506 if (dump_enabled_p ())
6508 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6509 "converting indirect call to "
6510 "function %s\n",
6511 lang_hooks.decl_printable_name (fn, 2));
6513 gimple_call_set_fndecl (call_stmt, fn);
6514 /* If changing the call to __builtin_unreachable
6515 or similar noreturn function, adjust gimple_call_fntype
6516 too. */
6517 if (gimple_call_noreturn_p (call_stmt)
6518 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6519 && TYPE_ARG_TYPES (TREE_TYPE (fn))
6520 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6521 == void_type_node))
6522 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6523 maybe_remove_unused_call_args (cfun, call_stmt);
6524 modified = true;
6529 if (modified)
6531 /* When changing a call into a noreturn call, cfg cleanup
6532 is needed to fix up the noreturn call. */
6533 if (!was_noreturn
6534 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6535 to_fixup.safe_push (stmt);
6536 /* When changing a condition or switch into one we know what
6537 edge will be executed, schedule a cfg cleanup. */
6538 if ((gimple_code (stmt) == GIMPLE_COND
6539 && (gimple_cond_true_p (as_a <gcond *> (stmt))
6540 || gimple_cond_false_p (as_a <gcond *> (stmt))))
6541 || (gimple_code (stmt) == GIMPLE_SWITCH
6542 && TREE_CODE (gimple_switch_index
6543 (as_a <gswitch *> (stmt))) == INTEGER_CST))
6544 el_todo |= TODO_cleanup_cfg;
6545 /* If we removed EH side-effects from the statement, clean
6546 its EH information. */
6547 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6549 bitmap_set_bit (need_eh_cleanup,
6550 gimple_bb (stmt)->index);
6551 if (dump_file && (dump_flags & TDF_DETAILS))
6552 fprintf (dump_file, " Removed EH side-effects.\n");
6554 /* Likewise for AB side-effects. */
6555 if (can_make_abnormal_goto
6556 && !stmt_can_make_abnormal_goto (stmt))
6558 bitmap_set_bit (need_ab_cleanup,
6559 gimple_bb (stmt)->index);
6560 if (dump_file && (dump_flags & TDF_DETAILS))
6561 fprintf (dump_file, " Removed AB side-effects.\n");
6563 update_stmt (stmt);
6564 /* In case the VDEF on the original stmt was released, value-number
6565 it to the VUSE. This is to make vuse_ssa_val able to skip
6566 released virtual operands. */
6567 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6568 VN_INFO (vdef)->valnum = vuse;
6571 /* Make new values available - for fully redundant LHS we
6572 continue with the next stmt above and skip this. */
6573 def_operand_p defp;
6574 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6575 eliminate_push_avail (b, DEF_FROM_PTR (defp));
6578 /* Perform elimination for the basic-block B during the domwalk. */
6580 edge
6581 eliminate_dom_walker::before_dom_children (basic_block b)
6583 /* Mark new bb. */
6584 avail_stack.safe_push (NULL_TREE);
6586 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6587 if (!(b->flags & BB_EXECUTABLE))
6588 return NULL;
6590 vn_context_bb = b;
6592 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6594 gphi *phi = gsi.phi ();
6595 tree res = PHI_RESULT (phi);
6597 if (virtual_operand_p (res))
6599 gsi_next (&gsi);
6600 continue;
6603 tree sprime = eliminate_avail (b, res);
6604 if (sprime
6605 && sprime != res)
6607 if (dump_file && (dump_flags & TDF_DETAILS))
6609 fprintf (dump_file, "Replaced redundant PHI node defining ");
6610 print_generic_expr (dump_file, res);
6611 fprintf (dump_file, " with ");
6612 print_generic_expr (dump_file, sprime);
6613 fprintf (dump_file, "\n");
6616 /* If we inserted this PHI node ourself, it's not an elimination. */
6617 if (! inserted_exprs
6618 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6619 eliminations++;
6621 /* If we will propagate into all uses don't bother to do
6622 anything. */
6623 if (may_propagate_copy (res, sprime))
6625 /* Mark the PHI for removal. */
6626 to_remove.safe_push (phi);
6627 gsi_next (&gsi);
6628 continue;
6631 remove_phi_node (&gsi, false);
6633 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6634 sprime = fold_convert (TREE_TYPE (res), sprime);
6635 gimple *stmt = gimple_build_assign (res, sprime);
6636 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6637 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6638 continue;
6641 eliminate_push_avail (b, res);
6642 gsi_next (&gsi);
6645 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6646 !gsi_end_p (gsi);
6647 gsi_next (&gsi))
6648 eliminate_stmt (b, &gsi);
6650 /* Replace destination PHI arguments. */
6651 edge_iterator ei;
6652 edge e;
6653 FOR_EACH_EDGE (e, ei, b->succs)
6654 if (e->flags & EDGE_EXECUTABLE)
6655 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6656 !gsi_end_p (gsi);
6657 gsi_next (&gsi))
6659 gphi *phi = gsi.phi ();
6660 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6661 tree arg = USE_FROM_PTR (use_p);
6662 if (TREE_CODE (arg) != SSA_NAME
6663 || virtual_operand_p (arg))
6664 continue;
6665 tree sprime = eliminate_avail (b, arg);
6666 if (sprime && may_propagate_copy (arg, sprime))
6667 propagate_value (use_p, sprime);
6670 vn_context_bb = NULL;
6672 return NULL;
6675 /* Make no longer available leaders no longer available. */
6677 void
6678 eliminate_dom_walker::after_dom_children (basic_block)
6680 tree entry;
6681 while ((entry = avail_stack.pop ()) != NULL_TREE)
6683 tree valnum = VN_INFO (entry)->valnum;
6684 tree old = avail[SSA_NAME_VERSION (valnum)];
6685 if (old == entry)
6686 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6687 else
6688 avail[SSA_NAME_VERSION (valnum)] = entry;
6692 /* Remove queued stmts and perform delayed cleanups. */
6694 unsigned
6695 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6697 statistics_counter_event (cfun, "Eliminated", eliminations);
6698 statistics_counter_event (cfun, "Insertions", insertions);
6700 /* We cannot remove stmts during BB walk, especially not release SSA
6701 names there as this confuses the VN machinery. The stmts ending
6702 up in to_remove are either stores or simple copies.
6703 Remove stmts in reverse order to make debug stmt creation possible. */
6704 while (!to_remove.is_empty ())
6706 bool do_release_defs = true;
6707 gimple *stmt = to_remove.pop ();
6709 /* When we are value-numbering a region we do not require exit PHIs to
6710 be present so we have to make sure to deal with uses outside of the
6711 region of stmts that we thought are eliminated.
6712 ??? Note we may be confused by uses in dead regions we didn't run
6713 elimination on. Rather than checking individual uses we accept
6714 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6715 contains such example). */
6716 if (region_p)
6718 if (gphi *phi = dyn_cast <gphi *> (stmt))
6720 tree lhs = gimple_phi_result (phi);
6721 if (!has_zero_uses (lhs))
6723 if (dump_file && (dump_flags & TDF_DETAILS))
6724 fprintf (dump_file, "Keeping eliminated stmt live "
6725 "as copy because of out-of-region uses\n");
6726 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6727 gimple *copy = gimple_build_assign (lhs, sprime);
6728 gimple_stmt_iterator gsi
6729 = gsi_after_labels (gimple_bb (stmt));
6730 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6731 do_release_defs = false;
6734 else if (tree lhs = gimple_get_lhs (stmt))
6735 if (TREE_CODE (lhs) == SSA_NAME
6736 && !has_zero_uses (lhs))
6738 if (dump_file && (dump_flags & TDF_DETAILS))
6739 fprintf (dump_file, "Keeping eliminated stmt live "
6740 "as copy because of out-of-region uses\n");
6741 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6742 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6743 if (is_gimple_assign (stmt))
6745 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6746 stmt = gsi_stmt (gsi);
6747 update_stmt (stmt);
6748 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6749 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6750 continue;
6752 else
6754 gimple *copy = gimple_build_assign (lhs, sprime);
6755 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6756 do_release_defs = false;
6761 if (dump_file && (dump_flags & TDF_DETAILS))
6763 fprintf (dump_file, "Removing dead stmt ");
6764 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6767 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6768 if (gimple_code (stmt) == GIMPLE_PHI)
6769 remove_phi_node (&gsi, do_release_defs);
6770 else
6772 basic_block bb = gimple_bb (stmt);
6773 unlink_stmt_vdef (stmt);
6774 if (gsi_remove (&gsi, true))
6775 bitmap_set_bit (need_eh_cleanup, bb->index);
6776 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6777 bitmap_set_bit (need_ab_cleanup, bb->index);
6778 if (do_release_defs)
6779 release_defs (stmt);
6782 /* Removing a stmt may expose a forwarder block. */
6783 el_todo |= TODO_cleanup_cfg;
6786 /* Fixup stmts that became noreturn calls. This may require splitting
6787 blocks and thus isn't possible during the dominator walk. Do this
6788 in reverse order so we don't inadvertedly remove a stmt we want to
6789 fixup by visiting a dominating now noreturn call first. */
6790 while (!to_fixup.is_empty ())
6792 gimple *stmt = to_fixup.pop ();
6794 if (dump_file && (dump_flags & TDF_DETAILS))
6796 fprintf (dump_file, "Fixing up noreturn call ");
6797 print_gimple_stmt (dump_file, stmt, 0);
6800 if (fixup_noreturn_call (stmt))
6801 el_todo |= TODO_cleanup_cfg;
6804 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6805 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6807 if (do_eh_cleanup)
6808 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6810 if (do_ab_cleanup)
6811 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6813 if (do_eh_cleanup || do_ab_cleanup)
6814 el_todo |= TODO_cleanup_cfg;
6816 return el_todo;
6819 /* Eliminate fully redundant computations. */
6821 unsigned
6822 eliminate_with_rpo_vn (bitmap inserted_exprs)
6824 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6826 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6827 rpo_avail = &walker;
6828 walker.walk (cfun->cfg->x_entry_block_ptr);
6829 rpo_avail = saved_rpo_avail;
6831 return walker.eliminate_cleanup ();
6834 static unsigned
6835 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6836 bool iterate, bool eliminate);
6838 void
6839 run_rpo_vn (vn_lookup_kind kind)
6841 default_vn_walk_kind = kind;
6842 do_rpo_vn (cfun, NULL, NULL, true, false);
6844 /* ??? Prune requirement of these. */
6845 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6847 /* Initialize the value ids and prune out remaining VN_TOPs
6848 from dead code. */
6849 tree name;
6850 unsigned i;
6851 FOR_EACH_SSA_NAME (i, name, cfun)
6853 vn_ssa_aux_t info = VN_INFO (name);
6854 if (!info->visited
6855 || info->valnum == VN_TOP)
6856 info->valnum = name;
6857 if (info->valnum == name)
6858 info->value_id = get_next_value_id ();
6859 else if (is_gimple_min_invariant (info->valnum))
6860 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6863 /* Propagate. */
6864 FOR_EACH_SSA_NAME (i, name, cfun)
6866 vn_ssa_aux_t info = VN_INFO (name);
6867 if (TREE_CODE (info->valnum) == SSA_NAME
6868 && info->valnum != name
6869 && info->value_id != VN_INFO (info->valnum)->value_id)
6870 info->value_id = VN_INFO (info->valnum)->value_id;
6873 set_hashtable_value_ids ();
6875 if (dump_file && (dump_flags & TDF_DETAILS))
6877 fprintf (dump_file, "Value numbers:\n");
6878 FOR_EACH_SSA_NAME (i, name, cfun)
6880 if (VN_INFO (name)->visited
6881 && SSA_VAL (name) != name)
6883 print_generic_expr (dump_file, name);
6884 fprintf (dump_file, " = ");
6885 print_generic_expr (dump_file, SSA_VAL (name));
6886 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6892 /* Free VN associated data structures. */
6894 void
6895 free_rpo_vn (void)
6897 free_vn_table (valid_info);
6898 XDELETE (valid_info);
6899 obstack_free (&vn_tables_obstack, NULL);
6900 obstack_free (&vn_tables_insert_obstack, NULL);
6902 vn_ssa_aux_iterator_type it;
6903 vn_ssa_aux_t info;
6904 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6905 if (info->needs_insertion)
6906 release_ssa_name (info->name);
6907 obstack_free (&vn_ssa_aux_obstack, NULL);
6908 delete vn_ssa_aux_hash;
6910 delete constant_to_value_id;
6911 constant_to_value_id = NULL;
6914 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6916 static tree
6917 vn_lookup_simplify_result (gimple_match_op *res_op)
6919 if (!res_op->code.is_tree_code ())
6920 return NULL_TREE;
6921 tree *ops = res_op->ops;
6922 unsigned int length = res_op->num_ops;
6923 if (res_op->code == CONSTRUCTOR
6924 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6925 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6926 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6928 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6929 ops = XALLOCAVEC (tree, length);
6930 for (unsigned i = 0; i < length; ++i)
6931 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6933 vn_nary_op_t vnresult = NULL;
6934 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6935 res_op->type, ops, &vnresult);
6936 /* If this is used from expression simplification make sure to
6937 return an available expression. */
6938 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6939 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6940 return res;
6943 /* Return a leader for OPs value that is valid at BB. */
6945 tree
6946 rpo_elim::eliminate_avail (basic_block bb, tree op)
6948 bool visited;
6949 tree valnum = SSA_VAL (op, &visited);
6950 /* If we didn't visit OP then it must be defined outside of the
6951 region we process and also dominate it. So it is available. */
6952 if (!visited)
6953 return op;
6954 if (TREE_CODE (valnum) == SSA_NAME)
6956 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
6957 return valnum;
6958 vn_avail *av = VN_INFO (valnum)->avail;
6959 if (!av)
6960 return NULL_TREE;
6961 if (av->location == bb->index)
6962 /* On tramp3d 90% of the cases are here. */
6963 return ssa_name (av->leader);
6966 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
6967 /* ??? During elimination we have to use availability at the
6968 definition site of a use we try to replace. This
6969 is required to not run into inconsistencies because
6970 of dominated_by_p_w_unex behavior and removing a definition
6971 while not replacing all uses.
6972 ??? We could try to consistently walk dominators
6973 ignoring non-executable regions. The nearest common
6974 dominator of bb and abb is where we can stop walking. We
6975 may also be able to "pre-compute" (bits of) the next immediate
6976 (non-)dominator during the RPO walk when marking edges as
6977 executable. */
6978 if (dominated_by_p_w_unex (bb, abb, true))
6980 tree leader = ssa_name (av->leader);
6981 /* Prevent eliminations that break loop-closed SSA. */
6982 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
6983 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
6984 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
6985 (leader))->loop_father,
6986 bb))
6987 return NULL_TREE;
6988 if (dump_file && (dump_flags & TDF_DETAILS))
6990 print_generic_expr (dump_file, leader);
6991 fprintf (dump_file, " is available for ");
6992 print_generic_expr (dump_file, valnum);
6993 fprintf (dump_file, "\n");
6995 /* On tramp3d 99% of the _remaining_ cases succeed at
6996 the first enty. */
6997 return leader;
6999 /* ??? Can we somehow skip to the immediate dominator
7000 RPO index (bb_to_rpo)? Again, maybe not worth, on
7001 tramp3d the worst number of elements in the vector is 9. */
7002 av = av->next;
7004 while (av);
7006 else if (valnum != VN_TOP)
7007 /* valnum is is_gimple_min_invariant. */
7008 return valnum;
7009 return NULL_TREE;
7012 /* Make LEADER a leader for its value at BB. */
7014 void
7015 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7017 tree valnum = VN_INFO (leader)->valnum;
7018 if (valnum == VN_TOP
7019 || is_gimple_min_invariant (valnum))
7020 return;
7021 if (dump_file && (dump_flags & TDF_DETAILS))
7023 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7024 print_generic_expr (dump_file, leader);
7025 fprintf (dump_file, " for value ");
7026 print_generic_expr (dump_file, valnum);
7027 fprintf (dump_file, "\n");
7029 vn_ssa_aux_t value = VN_INFO (valnum);
7030 vn_avail *av;
7031 if (m_avail_freelist)
7033 av = m_avail_freelist;
7034 m_avail_freelist = m_avail_freelist->next;
7036 else
7037 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7038 av->location = bb->index;
7039 av->leader = SSA_NAME_VERSION (leader);
7040 av->next = value->avail;
7041 av->next_undo = last_pushed_avail;
7042 last_pushed_avail = value;
7043 value->avail = av;
7046 /* Valueization hook for RPO VN plus required state. */
7048 tree
7049 rpo_vn_valueize (tree name)
7051 if (TREE_CODE (name) == SSA_NAME)
7053 vn_ssa_aux_t val = VN_INFO (name);
7054 if (val)
7056 tree tem = val->valnum;
7057 if (tem != VN_TOP && tem != name)
7059 if (TREE_CODE (tem) != SSA_NAME)
7060 return tem;
7061 /* For all values we only valueize to an available leader
7062 which means we can use SSA name info without restriction. */
7063 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7064 if (tem)
7065 return tem;
7069 return name;
7072 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7073 inverted condition. */
7075 static void
7076 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7078 switch (code)
7080 case LT_EXPR:
7081 /* a < b -> a {!,<}= b */
7082 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7083 ops, boolean_true_node, 0, pred_e);
7084 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7085 ops, boolean_true_node, 0, pred_e);
7086 /* a < b -> ! a {>,=} b */
7087 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7088 ops, boolean_false_node, 0, pred_e);
7089 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7090 ops, boolean_false_node, 0, pred_e);
7091 break;
7092 case GT_EXPR:
7093 /* a > b -> a {!,>}= b */
7094 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7095 ops, boolean_true_node, 0, pred_e);
7096 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7097 ops, boolean_true_node, 0, pred_e);
7098 /* a > b -> ! a {<,=} b */
7099 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7100 ops, boolean_false_node, 0, pred_e);
7101 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7102 ops, boolean_false_node, 0, pred_e);
7103 break;
7104 case EQ_EXPR:
7105 /* a == b -> ! a {<,>} b */
7106 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7107 ops, boolean_false_node, 0, pred_e);
7108 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7109 ops, boolean_false_node, 0, pred_e);
7110 break;
7111 case LE_EXPR:
7112 case GE_EXPR:
7113 case NE_EXPR:
7114 /* Nothing besides inverted condition. */
7115 break;
7116 default:;
7120 /* Main stmt worker for RPO VN, process BB. */
7122 static unsigned
7123 process_bb (rpo_elim &avail, basic_block bb,
7124 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7125 bool do_region, bitmap exit_bbs, bool skip_phis)
7127 unsigned todo = 0;
7128 edge_iterator ei;
7129 edge e;
7131 vn_context_bb = bb;
7133 /* If we are in loop-closed SSA preserve this state. This is
7134 relevant when called on regions from outside of FRE/PRE. */
7135 bool lc_phi_nodes = false;
7136 if (!skip_phis
7137 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7138 FOR_EACH_EDGE (e, ei, bb->preds)
7139 if (e->src->loop_father != e->dest->loop_father
7140 && flow_loop_nested_p (e->dest->loop_father,
7141 e->src->loop_father))
7143 lc_phi_nodes = true;
7144 break;
7147 /* When we visit a loop header substitute into loop info. */
7148 if (!iterate && eliminate && bb->loop_father->header == bb)
7150 /* Keep fields in sync with substitute_in_loop_info. */
7151 if (bb->loop_father->nb_iterations)
7152 bb->loop_father->nb_iterations
7153 = simplify_replace_tree (bb->loop_father->nb_iterations,
7154 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7157 /* Value-number all defs in the basic-block. */
7158 if (!skip_phis)
7159 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7160 gsi_next (&gsi))
7162 gphi *phi = gsi.phi ();
7163 tree res = PHI_RESULT (phi);
7164 vn_ssa_aux_t res_info = VN_INFO (res);
7165 if (!bb_visited)
7167 gcc_assert (!res_info->visited);
7168 res_info->valnum = VN_TOP;
7169 res_info->visited = true;
7172 /* When not iterating force backedge values to varying. */
7173 visit_stmt (phi, !iterate_phis);
7174 if (virtual_operand_p (res))
7175 continue;
7177 /* Eliminate */
7178 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7179 how we handle backedges and availability.
7180 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7181 tree val = res_info->valnum;
7182 if (res != val && !iterate && eliminate)
7184 if (tree leader = avail.eliminate_avail (bb, res))
7186 if (leader != res
7187 /* Preserve loop-closed SSA form. */
7188 && (! lc_phi_nodes
7189 || is_gimple_min_invariant (leader)))
7191 if (dump_file && (dump_flags & TDF_DETAILS))
7193 fprintf (dump_file, "Replaced redundant PHI node "
7194 "defining ");
7195 print_generic_expr (dump_file, res);
7196 fprintf (dump_file, " with ");
7197 print_generic_expr (dump_file, leader);
7198 fprintf (dump_file, "\n");
7200 avail.eliminations++;
7202 if (may_propagate_copy (res, leader))
7204 /* Schedule for removal. */
7205 avail.to_remove.safe_push (phi);
7206 continue;
7208 /* ??? Else generate a copy stmt. */
7212 /* Only make defs available that not already are. But make
7213 sure loop-closed SSA PHI node defs are picked up for
7214 downstream uses. */
7215 if (lc_phi_nodes
7216 || res == val
7217 || ! avail.eliminate_avail (bb, res))
7218 avail.eliminate_push_avail (bb, res);
7221 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7222 we do this when processing the last stmt as we have to do this
7223 before elimination which otherwise forces GIMPLE_CONDs to
7224 if (1 != 0) style when seeing non-executable edges. */
7225 if (gsi_end_p (gsi_start_bb (bb)))
7227 FOR_EACH_EDGE (e, ei, bb->succs)
7229 if (!(e->flags & EDGE_EXECUTABLE))
7231 if (dump_file && (dump_flags & TDF_DETAILS))
7232 fprintf (dump_file,
7233 "marking outgoing edge %d -> %d executable\n",
7234 e->src->index, e->dest->index);
7235 e->flags |= EDGE_EXECUTABLE;
7236 e->dest->flags |= BB_EXECUTABLE;
7238 else if (!(e->dest->flags & BB_EXECUTABLE))
7240 if (dump_file && (dump_flags & TDF_DETAILS))
7241 fprintf (dump_file,
7242 "marking destination block %d reachable\n",
7243 e->dest->index);
7244 e->dest->flags |= BB_EXECUTABLE;
7248 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7249 !gsi_end_p (gsi); gsi_next (&gsi))
7251 ssa_op_iter i;
7252 tree op;
7253 if (!bb_visited)
7255 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7257 vn_ssa_aux_t op_info = VN_INFO (op);
7258 gcc_assert (!op_info->visited);
7259 op_info->valnum = VN_TOP;
7260 op_info->visited = true;
7263 /* We somehow have to deal with uses that are not defined
7264 in the processed region. Forcing unvisited uses to
7265 varying here doesn't play well with def-use following during
7266 expression simplification, so we deal with this by checking
7267 the visited flag in SSA_VAL. */
7270 visit_stmt (gsi_stmt (gsi));
7272 gimple *last = gsi_stmt (gsi);
7273 e = NULL;
7274 switch (gimple_code (last))
7276 case GIMPLE_SWITCH:
7277 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7278 (as_a <gswitch *> (last))));
7279 break;
7280 case GIMPLE_COND:
7282 tree lhs = vn_valueize (gimple_cond_lhs (last));
7283 tree rhs = vn_valueize (gimple_cond_rhs (last));
7284 tree val = gimple_simplify (gimple_cond_code (last),
7285 boolean_type_node, lhs, rhs,
7286 NULL, vn_valueize);
7287 /* If the condition didn't simplfy see if we have recorded
7288 an expression from sofar taken edges. */
7289 if (! val || TREE_CODE (val) != INTEGER_CST)
7291 vn_nary_op_t vnresult;
7292 tree ops[2];
7293 ops[0] = lhs;
7294 ops[1] = rhs;
7295 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7296 boolean_type_node, ops,
7297 &vnresult);
7298 /* Did we get a predicated value? */
7299 if (! val && vnresult && vnresult->predicated_values)
7301 val = vn_nary_op_get_predicated_value (vnresult, bb);
7302 if (val && dump_file && (dump_flags & TDF_DETAILS))
7304 fprintf (dump_file, "Got predicated value ");
7305 print_generic_expr (dump_file, val, TDF_NONE);
7306 fprintf (dump_file, " for ");
7307 print_gimple_stmt (dump_file, last, TDF_SLIM);
7311 if (val)
7312 e = find_taken_edge (bb, val);
7313 if (! e)
7315 /* If we didn't manage to compute the taken edge then
7316 push predicated expressions for the condition itself
7317 and related conditions to the hashtables. This allows
7318 simplification of redundant conditions which is
7319 important as early cleanup. */
7320 edge true_e, false_e;
7321 extract_true_false_edges_from_block (bb, &true_e, &false_e);
7322 enum tree_code code = gimple_cond_code (last);
7323 enum tree_code icode
7324 = invert_tree_comparison (code, HONOR_NANS (lhs));
7325 tree ops[2];
7326 ops[0] = lhs;
7327 ops[1] = rhs;
7328 if (do_region
7329 && bitmap_bit_p (exit_bbs, true_e->dest->index))
7330 true_e = NULL;
7331 if (do_region
7332 && bitmap_bit_p (exit_bbs, false_e->dest->index))
7333 false_e = NULL;
7334 if (true_e)
7335 vn_nary_op_insert_pieces_predicated
7336 (2, code, boolean_type_node, ops,
7337 boolean_true_node, 0, true_e);
7338 if (false_e)
7339 vn_nary_op_insert_pieces_predicated
7340 (2, code, boolean_type_node, ops,
7341 boolean_false_node, 0, false_e);
7342 if (icode != ERROR_MARK)
7344 if (true_e)
7345 vn_nary_op_insert_pieces_predicated
7346 (2, icode, boolean_type_node, ops,
7347 boolean_false_node, 0, true_e);
7348 if (false_e)
7349 vn_nary_op_insert_pieces_predicated
7350 (2, icode, boolean_type_node, ops,
7351 boolean_true_node, 0, false_e);
7353 /* Relax for non-integers, inverted condition handled
7354 above. */
7355 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7357 if (true_e)
7358 insert_related_predicates_on_edge (code, ops, true_e);
7359 if (false_e)
7360 insert_related_predicates_on_edge (icode, ops, false_e);
7363 break;
7365 case GIMPLE_GOTO:
7366 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7367 break;
7368 default:
7369 e = NULL;
7371 if (e)
7373 todo = TODO_cleanup_cfg;
7374 if (!(e->flags & EDGE_EXECUTABLE))
7376 if (dump_file && (dump_flags & TDF_DETAILS))
7377 fprintf (dump_file,
7378 "marking known outgoing %sedge %d -> %d executable\n",
7379 e->flags & EDGE_DFS_BACK ? "back-" : "",
7380 e->src->index, e->dest->index);
7381 e->flags |= EDGE_EXECUTABLE;
7382 e->dest->flags |= BB_EXECUTABLE;
7384 else if (!(e->dest->flags & BB_EXECUTABLE))
7386 if (dump_file && (dump_flags & TDF_DETAILS))
7387 fprintf (dump_file,
7388 "marking destination block %d reachable\n",
7389 e->dest->index);
7390 e->dest->flags |= BB_EXECUTABLE;
7393 else if (gsi_one_before_end_p (gsi))
7395 FOR_EACH_EDGE (e, ei, bb->succs)
7397 if (!(e->flags & EDGE_EXECUTABLE))
7399 if (dump_file && (dump_flags & TDF_DETAILS))
7400 fprintf (dump_file,
7401 "marking outgoing edge %d -> %d executable\n",
7402 e->src->index, e->dest->index);
7403 e->flags |= EDGE_EXECUTABLE;
7404 e->dest->flags |= BB_EXECUTABLE;
7406 else if (!(e->dest->flags & BB_EXECUTABLE))
7408 if (dump_file && (dump_flags & TDF_DETAILS))
7409 fprintf (dump_file,
7410 "marking destination block %d reachable\n",
7411 e->dest->index);
7412 e->dest->flags |= BB_EXECUTABLE;
7417 /* Eliminate. That also pushes to avail. */
7418 if (eliminate && ! iterate)
7419 avail.eliminate_stmt (bb, &gsi);
7420 else
7421 /* If not eliminating, make all not already available defs
7422 available. */
7423 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7424 if (! avail.eliminate_avail (bb, op))
7425 avail.eliminate_push_avail (bb, op);
7428 /* Eliminate in destination PHI arguments. Always substitute in dest
7429 PHIs, even for non-executable edges. This handles region
7430 exits PHIs. */
7431 if (!iterate && eliminate)
7432 FOR_EACH_EDGE (e, ei, bb->succs)
7433 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7434 !gsi_end_p (gsi); gsi_next (&gsi))
7436 gphi *phi = gsi.phi ();
7437 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7438 tree arg = USE_FROM_PTR (use_p);
7439 if (TREE_CODE (arg) != SSA_NAME
7440 || virtual_operand_p (arg))
7441 continue;
7442 tree sprime;
7443 if (SSA_NAME_IS_DEFAULT_DEF (arg))
7445 sprime = SSA_VAL (arg);
7446 gcc_assert (TREE_CODE (sprime) != SSA_NAME
7447 || SSA_NAME_IS_DEFAULT_DEF (sprime));
7449 else
7450 /* Look for sth available at the definition block of the argument.
7451 This avoids inconsistencies between availability there which
7452 decides if the stmt can be removed and availability at the
7453 use site. The SSA property ensures that things available
7454 at the definition are also available at uses. */
7455 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7456 arg);
7457 if (sprime
7458 && sprime != arg
7459 && may_propagate_copy (arg, sprime))
7460 propagate_value (use_p, sprime);
7463 vn_context_bb = NULL;
7464 return todo;
7467 /* Unwind state per basic-block. */
7469 struct unwind_state
7471 /* Times this block has been visited. */
7472 unsigned visited;
7473 /* Whether to handle this as iteration point or whether to treat
7474 incoming backedge PHI values as varying. */
7475 bool iterate;
7476 /* Maximum RPO index this block is reachable from. */
7477 int max_rpo;
7478 /* Unwind state. */
7479 void *ob_top;
7480 vn_reference_t ref_top;
7481 vn_phi_t phi_top;
7482 vn_nary_op_t nary_top;
7483 vn_avail *avail_top;
7486 /* Unwind the RPO VN state for iteration. */
7488 static void
7489 do_unwind (unwind_state *to, rpo_elim &avail)
7491 gcc_assert (to->iterate);
7492 for (; last_inserted_nary != to->nary_top;
7493 last_inserted_nary = last_inserted_nary->next)
7495 vn_nary_op_t *slot;
7496 slot = valid_info->nary->find_slot_with_hash
7497 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7498 /* Predication causes the need to restore previous state. */
7499 if ((*slot)->unwind_to)
7500 *slot = (*slot)->unwind_to;
7501 else
7502 valid_info->nary->clear_slot (slot);
7504 for (; last_inserted_phi != to->phi_top;
7505 last_inserted_phi = last_inserted_phi->next)
7507 vn_phi_t *slot;
7508 slot = valid_info->phis->find_slot_with_hash
7509 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7510 valid_info->phis->clear_slot (slot);
7512 for (; last_inserted_ref != to->ref_top;
7513 last_inserted_ref = last_inserted_ref->next)
7515 vn_reference_t *slot;
7516 slot = valid_info->references->find_slot_with_hash
7517 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7518 (*slot)->operands.release ();
7519 valid_info->references->clear_slot (slot);
7521 obstack_free (&vn_tables_obstack, to->ob_top);
7523 /* Prune [rpo_idx, ] from avail. */
7524 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7526 vn_ssa_aux_t val = last_pushed_avail;
7527 vn_avail *av = val->avail;
7528 val->avail = av->next;
7529 last_pushed_avail = av->next_undo;
7530 av->next = avail.m_avail_freelist;
7531 avail.m_avail_freelist = av;
7535 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7536 If ITERATE is true then treat backedges optimistically as not
7537 executed and iterate. If ELIMINATE is true then perform
7538 elimination, otherwise leave that to the caller. */
7540 static unsigned
7541 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7542 bool iterate, bool eliminate)
7544 unsigned todo = 0;
7546 /* We currently do not support region-based iteration when
7547 elimination is requested. */
7548 gcc_assert (!entry || !iterate || !eliminate);
7549 /* When iterating we need loop info up-to-date. */
7550 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7552 bool do_region = entry != NULL;
7553 if (!do_region)
7555 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7556 exit_bbs = BITMAP_ALLOC (NULL);
7557 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7560 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7561 re-mark those that are contained in the region. */
7562 edge_iterator ei;
7563 edge e;
7564 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7565 e->flags &= ~EDGE_DFS_BACK;
7567 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7568 auto_vec<std::pair<int, int> > toplevel_scc_extents;
7569 int n = rev_post_order_and_mark_dfs_back_seme
7570 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7572 if (!do_region)
7573 BITMAP_FREE (exit_bbs);
7575 /* If there are any non-DFS_BACK edges into entry->dest skip
7576 processing PHI nodes for that block. This supports
7577 value-numbering loop bodies w/o the actual loop. */
7578 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7579 if (e != entry
7580 && !(e->flags & EDGE_DFS_BACK))
7581 break;
7582 bool skip_entry_phis = e != NULL;
7583 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7584 fprintf (dump_file, "Region does not contain all edges into "
7585 "the entry block, skipping its PHIs.\n");
7587 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7588 for (int i = 0; i < n; ++i)
7589 bb_to_rpo[rpo[i]] = i;
7591 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7593 rpo_elim avail (entry->dest);
7594 rpo_avail = &avail;
7596 /* Verify we have no extra entries into the region. */
7597 if (flag_checking && do_region)
7599 auto_bb_flag bb_in_region (fn);
7600 for (int i = 0; i < n; ++i)
7602 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7603 bb->flags |= bb_in_region;
7605 /* We can't merge the first two loops because we cannot rely
7606 on EDGE_DFS_BACK for edges not within the region. But if
7607 we decide to always have the bb_in_region flag we can
7608 do the checking during the RPO walk itself (but then it's
7609 also easy to handle MEME conservatively). */
7610 for (int i = 0; i < n; ++i)
7612 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7613 edge e;
7614 edge_iterator ei;
7615 FOR_EACH_EDGE (e, ei, bb->preds)
7616 gcc_assert (e == entry
7617 || (skip_entry_phis && bb == entry->dest)
7618 || (e->src->flags & bb_in_region));
7620 for (int i = 0; i < n; ++i)
7622 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7623 bb->flags &= ~bb_in_region;
7627 /* Create the VN state. For the initial size of the various hashtables
7628 use a heuristic based on region size and number of SSA names. */
7629 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7630 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7631 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7632 next_value_id = 1;
7633 next_constant_value_id = -1;
7635 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7636 gcc_obstack_init (&vn_ssa_aux_obstack);
7638 gcc_obstack_init (&vn_tables_obstack);
7639 gcc_obstack_init (&vn_tables_insert_obstack);
7640 valid_info = XCNEW (struct vn_tables_s);
7641 allocate_vn_table (valid_info, region_size);
7642 last_inserted_ref = NULL;
7643 last_inserted_phi = NULL;
7644 last_inserted_nary = NULL;
7645 last_pushed_avail = NULL;
7647 vn_valueize = rpo_vn_valueize;
7649 /* Initialize the unwind state and edge/BB executable state. */
7650 unsigned curr_scc = 0;
7651 for (int i = 0; i < n; ++i)
7653 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7654 rpo_state[i].visited = 0;
7655 rpo_state[i].max_rpo = i;
7656 if (!iterate && curr_scc < toplevel_scc_extents.length ())
7658 if (i >= toplevel_scc_extents[curr_scc].first
7659 && i <= toplevel_scc_extents[curr_scc].second)
7660 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7661 if (i == toplevel_scc_extents[curr_scc].second)
7662 curr_scc++;
7664 bb->flags &= ~BB_EXECUTABLE;
7665 bool has_backedges = false;
7666 edge e;
7667 edge_iterator ei;
7668 FOR_EACH_EDGE (e, ei, bb->preds)
7670 if (e->flags & EDGE_DFS_BACK)
7671 has_backedges = true;
7672 e->flags &= ~EDGE_EXECUTABLE;
7673 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7674 continue;
7676 rpo_state[i].iterate = iterate && has_backedges;
7678 entry->flags |= EDGE_EXECUTABLE;
7679 entry->dest->flags |= BB_EXECUTABLE;
7681 /* As heuristic to improve compile-time we handle only the N innermost
7682 loops and the outermost one optimistically. */
7683 if (iterate)
7685 unsigned max_depth = param_rpo_vn_max_loop_depth;
7686 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
7687 if (loop_depth (loop) > max_depth)
7688 for (unsigned i = 2;
7689 i < loop_depth (loop) - max_depth; ++i)
7691 basic_block header = superloop_at_depth (loop, i)->header;
7692 bool non_latch_backedge = false;
7693 edge e;
7694 edge_iterator ei;
7695 FOR_EACH_EDGE (e, ei, header->preds)
7696 if (e->flags & EDGE_DFS_BACK)
7698 /* There can be a non-latch backedge into the header
7699 which is part of an outer irreducible region. We
7700 cannot avoid iterating this block then. */
7701 if (!dominated_by_p (CDI_DOMINATORS,
7702 e->src, e->dest))
7704 if (dump_file && (dump_flags & TDF_DETAILS))
7705 fprintf (dump_file, "non-latch backedge %d -> %d "
7706 "forces iteration of loop %d\n",
7707 e->src->index, e->dest->index, loop->num);
7708 non_latch_backedge = true;
7710 else
7711 e->flags |= EDGE_EXECUTABLE;
7713 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7717 uint64_t nblk = 0;
7718 int idx = 0;
7719 if (iterate)
7720 /* Go and process all blocks, iterating as necessary. */
7723 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7725 /* If the block has incoming backedges remember unwind state. This
7726 is required even for non-executable blocks since in irreducible
7727 regions we might reach them via the backedge and re-start iterating
7728 from there.
7729 Note we can individually mark blocks with incoming backedges to
7730 not iterate where we then handle PHIs conservatively. We do that
7731 heuristically to reduce compile-time for degenerate cases. */
7732 if (rpo_state[idx].iterate)
7734 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7735 rpo_state[idx].ref_top = last_inserted_ref;
7736 rpo_state[idx].phi_top = last_inserted_phi;
7737 rpo_state[idx].nary_top = last_inserted_nary;
7738 rpo_state[idx].avail_top
7739 = last_pushed_avail ? last_pushed_avail->avail : NULL;
7742 if (!(bb->flags & BB_EXECUTABLE))
7744 if (dump_file && (dump_flags & TDF_DETAILS))
7745 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7746 idx, bb->index);
7747 idx++;
7748 continue;
7751 if (dump_file && (dump_flags & TDF_DETAILS))
7752 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7753 nblk++;
7754 todo |= process_bb (avail, bb,
7755 rpo_state[idx].visited != 0,
7756 rpo_state[idx].iterate,
7757 iterate, eliminate, do_region, exit_bbs, false);
7758 rpo_state[idx].visited++;
7760 /* Verify if changed values flow over executable outgoing backedges
7761 and those change destination PHI values (that's the thing we
7762 can easily verify). Reduce over all such edges to the farthest
7763 away PHI. */
7764 int iterate_to = -1;
7765 edge_iterator ei;
7766 edge e;
7767 FOR_EACH_EDGE (e, ei, bb->succs)
7768 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7769 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7770 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7772 int destidx = bb_to_rpo[e->dest->index];
7773 if (!rpo_state[destidx].visited)
7775 if (dump_file && (dump_flags & TDF_DETAILS))
7776 fprintf (dump_file, "Unvisited destination %d\n",
7777 e->dest->index);
7778 if (iterate_to == -1 || destidx < iterate_to)
7779 iterate_to = destidx;
7780 continue;
7782 if (dump_file && (dump_flags & TDF_DETAILS))
7783 fprintf (dump_file, "Looking for changed values of backedge"
7784 " %d->%d destination PHIs\n",
7785 e->src->index, e->dest->index);
7786 vn_context_bb = e->dest;
7787 gphi_iterator gsi;
7788 for (gsi = gsi_start_phis (e->dest);
7789 !gsi_end_p (gsi); gsi_next (&gsi))
7791 bool inserted = false;
7792 /* While we'd ideally just iterate on value changes
7793 we CSE PHIs and do that even across basic-block
7794 boundaries. So even hashtable state changes can
7795 be important (which is roughly equivalent to
7796 PHI argument value changes). To not excessively
7797 iterate because of that we track whether a PHI
7798 was CSEd to with GF_PLF_1. */
7799 bool phival_changed;
7800 if ((phival_changed = visit_phi (gsi.phi (),
7801 &inserted, false))
7802 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7804 if (!phival_changed
7805 && dump_file && (dump_flags & TDF_DETAILS))
7806 fprintf (dump_file, "PHI was CSEd and hashtable "
7807 "state (changed)\n");
7808 if (iterate_to == -1 || destidx < iterate_to)
7809 iterate_to = destidx;
7810 break;
7813 vn_context_bb = NULL;
7815 if (iterate_to != -1)
7817 do_unwind (&rpo_state[iterate_to], avail);
7818 idx = iterate_to;
7819 if (dump_file && (dump_flags & TDF_DETAILS))
7820 fprintf (dump_file, "Iterating to %d BB%d\n",
7821 iterate_to, rpo[iterate_to]);
7822 continue;
7825 idx++;
7827 while (idx < n);
7829 else /* !iterate */
7831 /* Process all blocks greedily with a worklist that enforces RPO
7832 processing of reachable blocks. */
7833 auto_bitmap worklist;
7834 bitmap_set_bit (worklist, 0);
7835 while (!bitmap_empty_p (worklist))
7837 int idx = bitmap_first_set_bit (worklist);
7838 bitmap_clear_bit (worklist, idx);
7839 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7840 gcc_assert ((bb->flags & BB_EXECUTABLE)
7841 && !rpo_state[idx].visited);
7843 if (dump_file && (dump_flags & TDF_DETAILS))
7844 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7846 /* When we run into predecessor edges where we cannot trust its
7847 executable state mark them executable so PHI processing will
7848 be conservative.
7849 ??? Do we need to force arguments flowing over that edge
7850 to be varying or will they even always be? */
7851 edge_iterator ei;
7852 edge e;
7853 FOR_EACH_EDGE (e, ei, bb->preds)
7854 if (!(e->flags & EDGE_EXECUTABLE)
7855 && (bb == entry->dest
7856 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7857 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7858 >= (int)idx))))
7860 if (dump_file && (dump_flags & TDF_DETAILS))
7861 fprintf (dump_file, "Cannot trust state of predecessor "
7862 "edge %d -> %d, marking executable\n",
7863 e->src->index, e->dest->index);
7864 e->flags |= EDGE_EXECUTABLE;
7867 nblk++;
7868 todo |= process_bb (avail, bb, false, false, false, eliminate,
7869 do_region, exit_bbs,
7870 skip_entry_phis && bb == entry->dest);
7871 rpo_state[idx].visited++;
7873 FOR_EACH_EDGE (e, ei, bb->succs)
7874 if ((e->flags & EDGE_EXECUTABLE)
7875 && e->dest->index != EXIT_BLOCK
7876 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7877 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7878 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7882 /* If statistics or dump file active. */
7883 int nex = 0;
7884 unsigned max_visited = 1;
7885 for (int i = 0; i < n; ++i)
7887 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7888 if (bb->flags & BB_EXECUTABLE)
7889 nex++;
7890 statistics_histogram_event (cfun, "RPO block visited times",
7891 rpo_state[i].visited);
7892 if (rpo_state[i].visited > max_visited)
7893 max_visited = rpo_state[i].visited;
7895 unsigned nvalues = 0, navail = 0;
7896 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7897 i != vn_ssa_aux_hash->end (); ++i)
7899 nvalues++;
7900 vn_avail *av = (*i)->avail;
7901 while (av)
7903 navail++;
7904 av = av->next;
7907 statistics_counter_event (cfun, "RPO blocks", n);
7908 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7909 statistics_counter_event (cfun, "RPO blocks executable", nex);
7910 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7911 statistics_histogram_event (cfun, "RPO num values", nvalues);
7912 statistics_histogram_event (cfun, "RPO num avail", navail);
7913 statistics_histogram_event (cfun, "RPO num lattice",
7914 vn_ssa_aux_hash->elements ());
7915 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7917 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7918 " blocks in total discovering %d executable blocks iterating "
7919 "%d.%d times, a block was visited max. %u times\n",
7920 n, nblk, nex,
7921 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7922 max_visited);
7923 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7924 "and %" PRIu64 " lattice elements\n",
7925 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7928 if (eliminate)
7930 /* When !iterate we already performed elimination during the RPO
7931 walk. */
7932 if (iterate)
7934 /* Elimination for region-based VN needs to be done within the
7935 RPO walk. */
7936 gcc_assert (! do_region);
7937 /* Note we can't use avail.walk here because that gets confused
7938 by the existing availability and it will be less efficient
7939 as well. */
7940 todo |= eliminate_with_rpo_vn (NULL);
7942 else
7943 todo |= avail.eliminate_cleanup (do_region);
7946 vn_valueize = NULL;
7947 rpo_avail = NULL;
7949 XDELETEVEC (bb_to_rpo);
7950 XDELETEVEC (rpo);
7951 XDELETEVEC (rpo_state);
7953 return todo;
7956 /* Region-based entry for RPO VN. Performs value-numbering and elimination
7957 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
7958 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
7959 are not considered. */
7961 unsigned
7962 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
7964 default_vn_walk_kind = VN_WALKREWRITE;
7965 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
7966 free_rpo_vn ();
7967 return todo;
7971 namespace {
7973 const pass_data pass_data_fre =
7975 GIMPLE_PASS, /* type */
7976 "fre", /* name */
7977 OPTGROUP_NONE, /* optinfo_flags */
7978 TV_TREE_FRE, /* tv_id */
7979 ( PROP_cfg | PROP_ssa ), /* properties_required */
7980 0, /* properties_provided */
7981 0, /* properties_destroyed */
7982 0, /* todo_flags_start */
7983 0, /* todo_flags_finish */
7986 class pass_fre : public gimple_opt_pass
7988 public:
7989 pass_fre (gcc::context *ctxt)
7990 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
7993 /* opt_pass methods: */
7994 opt_pass * clone () { return new pass_fre (m_ctxt); }
7995 void set_pass_param (unsigned int n, bool param)
7997 gcc_assert (n == 0);
7998 may_iterate = param;
8000 virtual bool gate (function *)
8002 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8004 virtual unsigned int execute (function *);
8006 private:
8007 bool may_iterate;
8008 }; // class pass_fre
8010 unsigned int
8011 pass_fre::execute (function *fun)
8013 unsigned todo = 0;
8015 /* At -O[1g] use the cheap non-iterating mode. */
8016 bool iterate_p = may_iterate && (optimize > 1);
8017 calculate_dominance_info (CDI_DOMINATORS);
8018 if (iterate_p)
8019 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8021 default_vn_walk_kind = VN_WALKREWRITE;
8022 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
8023 free_rpo_vn ();
8025 if (iterate_p)
8026 loop_optimizer_finalize ();
8028 if (scev_initialized_p ())
8029 scev_reset_htab ();
8031 /* For late FRE after IVOPTs and unrolling, see if we can
8032 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8033 if (!may_iterate)
8034 todo |= TODO_update_address_taken;
8036 return todo;
8039 } // anon namespace
8041 gimple_opt_pass *
8042 make_pass_fre (gcc::context *ctxt)
8044 return new pass_fre (ctxt);
8047 #undef BB_EXECUTABLE