Daily bump.
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobae0172a143e5f1476fe2d67efa9bfba147f33a50
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "fold-const-call.h"
74 #include "tree-ssa-sccvn.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2)
217 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
220 /* Free a reference operation structure VP. */
222 static inline void
223 free_reference (vn_reference_s *vr)
225 vr->operands.release ();
229 /* vn_reference hashtable helpers. */
231 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
233 static inline hashval_t hash (const vn_reference_s *);
234 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
237 /* Return the hashcode for a given reference operation P1. */
239 inline hashval_t
240 vn_reference_hasher::hash (const vn_reference_s *vr1)
242 return vr1->hashcode;
245 inline bool
246 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
248 return v == c || vn_reference_eq (v, c);
251 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
252 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
254 /* Pretty-print OPS to OUTFILE. */
256 void
257 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
259 vn_reference_op_t vro;
260 unsigned int i;
261 fprintf (outfile, "{");
262 for (i = 0; ops.iterate (i, &vro); i++)
264 bool closebrace = false;
265 if (vro->opcode != SSA_NAME
266 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
268 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
269 if (vro->op0 || vro->opcode == CALL_EXPR)
271 fprintf (outfile, "<");
272 closebrace = true;
275 if (vro->op0 || vro->opcode == CALL_EXPR)
277 if (!vro->op0)
278 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
279 else
280 print_generic_expr (outfile, vro->op0);
281 if (vro->op1)
283 fprintf (outfile, ",");
284 print_generic_expr (outfile, vro->op1);
286 if (vro->op2)
288 fprintf (outfile, ",");
289 print_generic_expr (outfile, vro->op2);
292 if (closebrace)
293 fprintf (outfile, ">");
294 if (i != ops.length () - 1)
295 fprintf (outfile, ",");
297 fprintf (outfile, "}");
300 DEBUG_FUNCTION void
301 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
303 print_vn_reference_ops (stderr, ops);
304 fputc ('\n', stderr);
307 /* The set of VN hashtables. */
309 typedef struct vn_tables_s
311 vn_nary_op_table_type *nary;
312 vn_phi_table_type *phis;
313 vn_reference_table_type *references;
314 } *vn_tables_t;
317 /* vn_constant hashtable helpers. */
319 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
321 static inline hashval_t hash (const vn_constant_s *);
322 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
325 /* Hash table hash function for vn_constant_t. */
327 inline hashval_t
328 vn_constant_hasher::hash (const vn_constant_s *vc1)
330 return vc1->hashcode;
333 /* Hash table equality function for vn_constant_t. */
335 inline bool
336 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
338 if (vc1->hashcode != vc2->hashcode)
339 return false;
341 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
344 static hash_table<vn_constant_hasher> *constant_to_value_id;
347 /* Obstack we allocate the vn-tables elements from. */
348 static obstack vn_tables_obstack;
349 /* Special obstack we never unwind. */
350 static obstack vn_tables_insert_obstack;
352 static vn_reference_t last_inserted_ref;
353 static vn_phi_t last_inserted_phi;
354 static vn_nary_op_t last_inserted_nary;
355 static vn_ssa_aux_t last_pushed_avail;
357 /* Valid hashtables storing information we have proven to be
358 correct. */
359 static vn_tables_t valid_info;
362 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
363 an SSA name, otherwise just return it. */
364 tree (*vn_valueize) (tree);
365 static tree
366 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
368 basic_block saved_vn_context_bb = vn_context_bb;
369 /* Look for sth available at the definition block of the argument.
370 This avoids inconsistencies between availability there which
371 decides if the stmt can be removed and availability at the
372 use site. The SSA property ensures that things available
373 at the definition are also available at uses. */
374 if (!SSA_NAME_IS_DEFAULT_DEF (t))
375 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
376 tree res = vn_valueize (t);
377 vn_context_bb = saved_vn_context_bb;
378 return res;
382 /* This represents the top of the VN lattice, which is the universal
383 value. */
385 tree VN_TOP;
387 /* Unique counter for our value ids. */
389 static unsigned int next_value_id;
390 static int next_constant_value_id;
393 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
394 are allocated on an obstack for locality reasons, and to free them
395 without looping over the vec. */
397 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
399 typedef vn_ssa_aux_t value_type;
400 typedef tree compare_type;
401 static inline hashval_t hash (const value_type &);
402 static inline bool equal (const value_type &, const compare_type &);
403 static inline void mark_deleted (value_type &) {}
404 static const bool empty_zero_p = true;
405 static inline void mark_empty (value_type &e) { e = NULL; }
406 static inline bool is_deleted (value_type &) { return false; }
407 static inline bool is_empty (value_type &e) { return e == NULL; }
410 hashval_t
411 vn_ssa_aux_hasher::hash (const value_type &entry)
413 return SSA_NAME_VERSION (entry->name);
416 bool
417 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
419 return name == entry->name;
422 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
423 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
424 static struct obstack vn_ssa_aux_obstack;
426 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
427 static unsigned int vn_nary_length_from_stmt (gimple *);
428 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
429 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
430 vn_nary_op_table_type *, bool);
431 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
432 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
433 enum tree_code, tree, tree *);
434 static tree vn_lookup_simplify_result (gimple_match_op *);
435 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
436 (tree, alias_set_type, alias_set_type, tree,
437 vec<vn_reference_op_s, va_heap>, tree);
439 /* Return whether there is value numbering information for a given SSA name. */
441 bool
442 has_VN_INFO (tree name)
444 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
447 vn_ssa_aux_t
448 VN_INFO (tree name)
450 vn_ssa_aux_t *res
451 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
452 INSERT);
453 if (*res != NULL)
454 return *res;
456 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
457 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
458 newinfo->name = name;
459 newinfo->valnum = VN_TOP;
460 /* We are using the visited flag to handle uses with defs not within the
461 region being value-numbered. */
462 newinfo->visited = false;
464 /* Given we create the VN_INFOs on-demand now we have to do initialization
465 different than VN_TOP here. */
466 if (SSA_NAME_IS_DEFAULT_DEF (name))
467 switch (TREE_CODE (SSA_NAME_VAR (name)))
469 case VAR_DECL:
470 /* All undefined vars are VARYING. */
471 newinfo->valnum = name;
472 newinfo->visited = true;
473 break;
475 case PARM_DECL:
476 /* Parameters are VARYING but we can record a condition
477 if we know it is a non-NULL pointer. */
478 newinfo->visited = true;
479 newinfo->valnum = name;
480 if (POINTER_TYPE_P (TREE_TYPE (name))
481 && nonnull_arg_p (SSA_NAME_VAR (name)))
483 tree ops[2];
484 ops[0] = name;
485 ops[1] = build_int_cst (TREE_TYPE (name), 0);
486 vn_nary_op_t nary;
487 /* Allocate from non-unwinding stack. */
488 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
489 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
490 boolean_type_node, ops);
491 nary->predicated_values = 0;
492 nary->u.result = boolean_true_node;
493 vn_nary_op_insert_into (nary, valid_info->nary, true);
494 gcc_assert (nary->unwind_to == NULL);
495 /* Also do not link it into the undo chain. */
496 last_inserted_nary = nary->next;
497 nary->next = (vn_nary_op_t)(void *)-1;
498 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
499 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
500 boolean_type_node, ops);
501 nary->predicated_values = 0;
502 nary->u.result = boolean_false_node;
503 vn_nary_op_insert_into (nary, valid_info->nary, true);
504 gcc_assert (nary->unwind_to == NULL);
505 last_inserted_nary = nary->next;
506 nary->next = (vn_nary_op_t)(void *)-1;
507 if (dump_file && (dump_flags & TDF_DETAILS))
509 fprintf (dump_file, "Recording ");
510 print_generic_expr (dump_file, name, TDF_SLIM);
511 fprintf (dump_file, " != 0\n");
514 break;
516 case RESULT_DECL:
517 /* If the result is passed by invisible reference the default
518 def is initialized, otherwise it's uninitialized. Still
519 undefined is varying. */
520 newinfo->visited = true;
521 newinfo->valnum = name;
522 break;
524 default:
525 gcc_unreachable ();
527 return newinfo;
530 /* Return the SSA value of X. */
532 inline tree
533 SSA_VAL (tree x, bool *visited = NULL)
535 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
536 if (visited)
537 *visited = tem && tem->visited;
538 return tem && tem->visited ? tem->valnum : x;
541 /* Return the SSA value of the VUSE x, supporting released VDEFs
542 during elimination which will value-number the VDEF to the
543 associated VUSE (but not substitute in the whole lattice). */
545 static inline tree
546 vuse_ssa_val (tree x)
548 if (!x)
549 return NULL_TREE;
553 x = SSA_VAL (x);
554 gcc_assert (x != VN_TOP);
556 while (SSA_NAME_IN_FREE_LIST (x));
558 return x;
561 /* Similar to the above but used as callback for walk_non_aliased_vuses
562 and thus should stop at unvisited VUSE to not walk across region
563 boundaries. */
565 static tree
566 vuse_valueize (tree vuse)
570 bool visited;
571 vuse = SSA_VAL (vuse, &visited);
572 if (!visited)
573 return NULL_TREE;
574 gcc_assert (vuse != VN_TOP);
576 while (SSA_NAME_IN_FREE_LIST (vuse));
577 return vuse;
581 /* Return the vn_kind the expression computed by the stmt should be
582 associated with. */
584 enum vn_kind
585 vn_get_stmt_kind (gimple *stmt)
587 switch (gimple_code (stmt))
589 case GIMPLE_CALL:
590 return VN_REFERENCE;
591 case GIMPLE_PHI:
592 return VN_PHI;
593 case GIMPLE_ASSIGN:
595 enum tree_code code = gimple_assign_rhs_code (stmt);
596 tree rhs1 = gimple_assign_rhs1 (stmt);
597 switch (get_gimple_rhs_class (code))
599 case GIMPLE_UNARY_RHS:
600 case GIMPLE_BINARY_RHS:
601 case GIMPLE_TERNARY_RHS:
602 return VN_NARY;
603 case GIMPLE_SINGLE_RHS:
604 switch (TREE_CODE_CLASS (code))
606 case tcc_reference:
607 /* VOP-less references can go through unary case. */
608 if ((code == REALPART_EXPR
609 || code == IMAGPART_EXPR
610 || code == VIEW_CONVERT_EXPR
611 || code == BIT_FIELD_REF)
612 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
613 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
614 return VN_NARY;
616 /* Fallthrough. */
617 case tcc_declaration:
618 return VN_REFERENCE;
620 case tcc_constant:
621 return VN_CONSTANT;
623 default:
624 if (code == ADDR_EXPR)
625 return (is_gimple_min_invariant (rhs1)
626 ? VN_CONSTANT : VN_REFERENCE);
627 else if (code == CONSTRUCTOR)
628 return VN_NARY;
629 return VN_NONE;
631 default:
632 return VN_NONE;
635 default:
636 return VN_NONE;
640 /* Lookup a value id for CONSTANT and return it. If it does not
641 exist returns 0. */
643 unsigned int
644 get_constant_value_id (tree constant)
646 vn_constant_s **slot;
647 struct vn_constant_s vc;
649 vc.hashcode = vn_hash_constant_with_type (constant);
650 vc.constant = constant;
651 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
652 if (slot)
653 return (*slot)->value_id;
654 return 0;
657 /* Lookup a value id for CONSTANT, and if it does not exist, create a
658 new one and return it. If it does exist, return it. */
660 unsigned int
661 get_or_alloc_constant_value_id (tree constant)
663 vn_constant_s **slot;
664 struct vn_constant_s vc;
665 vn_constant_t vcp;
667 /* If the hashtable isn't initialized we're not running from PRE and thus
668 do not need value-ids. */
669 if (!constant_to_value_id)
670 return 0;
672 vc.hashcode = vn_hash_constant_with_type (constant);
673 vc.constant = constant;
674 slot = constant_to_value_id->find_slot (&vc, INSERT);
675 if (*slot)
676 return (*slot)->value_id;
678 vcp = XNEW (struct vn_constant_s);
679 vcp->hashcode = vc.hashcode;
680 vcp->constant = constant;
681 vcp->value_id = get_next_constant_value_id ();
682 *slot = vcp;
683 return vcp->value_id;
686 /* Compute the hash for a reference operand VRO1. */
688 static void
689 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
691 hstate.add_int (vro1->opcode);
692 if (vro1->opcode == CALL_EXPR && !vro1->op0)
693 hstate.add_int (vro1->clique);
694 if (vro1->op0)
695 inchash::add_expr (vro1->op0, hstate);
696 if (vro1->op1)
697 inchash::add_expr (vro1->op1, hstate);
698 if (vro1->op2)
699 inchash::add_expr (vro1->op2, hstate);
702 /* Compute a hash for the reference operation VR1 and return it. */
704 static hashval_t
705 vn_reference_compute_hash (const vn_reference_t vr1)
707 inchash::hash hstate;
708 hashval_t result;
709 int i;
710 vn_reference_op_t vro;
711 poly_int64 off = -1;
712 bool deref = false;
714 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
716 if (vro->opcode == MEM_REF)
717 deref = true;
718 else if (vro->opcode != ADDR_EXPR)
719 deref = false;
720 if (maybe_ne (vro->off, -1))
722 if (known_eq (off, -1))
723 off = 0;
724 off += vro->off;
726 else
728 if (maybe_ne (off, -1)
729 && maybe_ne (off, 0))
730 hstate.add_poly_int (off);
731 off = -1;
732 if (deref
733 && vro->opcode == ADDR_EXPR)
735 if (vro->op0)
737 tree op = TREE_OPERAND (vro->op0, 0);
738 hstate.add_int (TREE_CODE (op));
739 inchash::add_expr (op, hstate);
742 else
743 vn_reference_op_compute_hash (vro, hstate);
746 result = hstate.end ();
747 /* ??? We would ICE later if we hash instead of adding that in. */
748 if (vr1->vuse)
749 result += SSA_NAME_VERSION (vr1->vuse);
751 return result;
754 /* Return true if reference operations VR1 and VR2 are equivalent. This
755 means they have the same set of operands and vuses. */
757 bool
758 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
760 unsigned i, j;
762 /* Early out if this is not a hash collision. */
763 if (vr1->hashcode != vr2->hashcode)
764 return false;
766 /* The VOP needs to be the same. */
767 if (vr1->vuse != vr2->vuse)
768 return false;
770 /* If the operands are the same we are done. */
771 if (vr1->operands == vr2->operands)
772 return true;
774 if (!vr1->type || !vr2->type)
776 if (vr1->type != vr2->type)
777 return false;
779 else if (vr1->type == vr2->type)
781 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
782 || (COMPLETE_TYPE_P (vr1->type)
783 && !expressions_equal_p (TYPE_SIZE (vr1->type),
784 TYPE_SIZE (vr2->type))))
785 return false;
786 else if (vr1->operands[0].opcode == CALL_EXPR
787 && !types_compatible_p (vr1->type, vr2->type))
788 return false;
789 else if (INTEGRAL_TYPE_P (vr1->type)
790 && INTEGRAL_TYPE_P (vr2->type))
792 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
793 return false;
795 else if (INTEGRAL_TYPE_P (vr1->type)
796 && (TYPE_PRECISION (vr1->type)
797 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
798 return false;
799 else if (INTEGRAL_TYPE_P (vr2->type)
800 && (TYPE_PRECISION (vr2->type)
801 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
802 return false;
804 i = 0;
805 j = 0;
808 poly_int64 off1 = 0, off2 = 0;
809 vn_reference_op_t vro1, vro2;
810 vn_reference_op_s tem1, tem2;
811 bool deref1 = false, deref2 = false;
812 bool reverse1 = false, reverse2 = false;
813 for (; vr1->operands.iterate (i, &vro1); i++)
815 if (vro1->opcode == MEM_REF)
816 deref1 = true;
817 /* Do not look through a storage order barrier. */
818 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
819 return false;
820 reverse1 |= vro1->reverse;
821 if (known_eq (vro1->off, -1))
822 break;
823 off1 += vro1->off;
825 for (; vr2->operands.iterate (j, &vro2); j++)
827 if (vro2->opcode == MEM_REF)
828 deref2 = true;
829 /* Do not look through a storage order barrier. */
830 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
831 return false;
832 reverse2 |= vro2->reverse;
833 if (known_eq (vro2->off, -1))
834 break;
835 off2 += vro2->off;
837 if (maybe_ne (off1, off2) || reverse1 != reverse2)
838 return false;
839 if (deref1 && vro1->opcode == ADDR_EXPR)
841 memset (&tem1, 0, sizeof (tem1));
842 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
843 tem1.type = TREE_TYPE (tem1.op0);
844 tem1.opcode = TREE_CODE (tem1.op0);
845 vro1 = &tem1;
846 deref1 = false;
848 if (deref2 && vro2->opcode == ADDR_EXPR)
850 memset (&tem2, 0, sizeof (tem2));
851 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
852 tem2.type = TREE_TYPE (tem2.op0);
853 tem2.opcode = TREE_CODE (tem2.op0);
854 vro2 = &tem2;
855 deref2 = false;
857 if (deref1 != deref2)
858 return false;
859 if (!vn_reference_op_eq (vro1, vro2))
860 return false;
861 ++j;
862 ++i;
864 while (vr1->operands.length () != i
865 || vr2->operands.length () != j);
867 return true;
870 /* Copy the operations present in load/store REF into RESULT, a vector of
871 vn_reference_op_s's. */
873 static void
874 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
876 /* For non-calls, store the information that makes up the address. */
877 tree orig = ref;
878 while (ref)
880 vn_reference_op_s temp;
882 memset (&temp, 0, sizeof (temp));
883 temp.type = TREE_TYPE (ref);
884 temp.opcode = TREE_CODE (ref);
885 temp.off = -1;
887 switch (temp.opcode)
889 case MODIFY_EXPR:
890 temp.op0 = TREE_OPERAND (ref, 1);
891 break;
892 case WITH_SIZE_EXPR:
893 temp.op0 = TREE_OPERAND (ref, 1);
894 temp.off = 0;
895 break;
896 case MEM_REF:
897 /* The base address gets its own vn_reference_op_s structure. */
898 temp.op0 = TREE_OPERAND (ref, 1);
899 if (!mem_ref_offset (ref).to_shwi (&temp.off))
900 temp.off = -1;
901 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
902 temp.base = MR_DEPENDENCE_BASE (ref);
903 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
904 break;
905 case TARGET_MEM_REF:
906 /* The base address gets its own vn_reference_op_s structure. */
907 temp.op0 = TMR_INDEX (ref);
908 temp.op1 = TMR_STEP (ref);
909 temp.op2 = TMR_OFFSET (ref);
910 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
911 temp.base = MR_DEPENDENCE_BASE (ref);
912 result->safe_push (temp);
913 memset (&temp, 0, sizeof (temp));
914 temp.type = NULL_TREE;
915 temp.opcode = ERROR_MARK;
916 temp.op0 = TMR_INDEX2 (ref);
917 temp.off = -1;
918 break;
919 case BIT_FIELD_REF:
920 /* Record bits, position and storage order. */
921 temp.op0 = TREE_OPERAND (ref, 1);
922 temp.op1 = TREE_OPERAND (ref, 2);
923 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
924 temp.off = -1;
925 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
926 break;
927 case COMPONENT_REF:
928 /* The field decl is enough to unambiguously specify the field,
929 so use its type here. */
930 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
931 temp.op0 = TREE_OPERAND (ref, 1);
932 temp.op1 = TREE_OPERAND (ref, 2);
933 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
934 && TYPE_REVERSE_STORAGE_ORDER
935 (TREE_TYPE (TREE_OPERAND (ref, 0))));
937 tree this_offset = component_ref_field_offset (ref);
938 if (this_offset
939 && poly_int_tree_p (this_offset))
941 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
942 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
944 poly_offset_int off
945 = (wi::to_poly_offset (this_offset)
946 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
947 /* Probibit value-numbering zero offset components
948 of addresses the same before the pass folding
949 __builtin_object_size had a chance to run. */
950 if (TREE_CODE (orig) != ADDR_EXPR
951 || maybe_ne (off, 0)
952 || (cfun->curr_properties & PROP_objsz))
953 off.to_shwi (&temp.off);
957 break;
958 case ARRAY_RANGE_REF:
959 case ARRAY_REF:
961 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
962 /* Record index as operand. */
963 temp.op0 = TREE_OPERAND (ref, 1);
964 /* Always record lower bounds and element size. */
965 temp.op1 = array_ref_low_bound (ref);
966 /* But record element size in units of the type alignment. */
967 temp.op2 = TREE_OPERAND (ref, 3);
968 temp.align = eltype->type_common.align;
969 if (! temp.op2)
970 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
971 size_int (TYPE_ALIGN_UNIT (eltype)));
972 if (poly_int_tree_p (temp.op0)
973 && poly_int_tree_p (temp.op1)
974 && TREE_CODE (temp.op2) == INTEGER_CST)
976 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
977 - wi::to_poly_offset (temp.op1))
978 * wi::to_offset (temp.op2)
979 * vn_ref_op_align_unit (&temp));
980 off.to_shwi (&temp.off);
982 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
983 && TYPE_REVERSE_STORAGE_ORDER
984 (TREE_TYPE (TREE_OPERAND (ref, 0))));
986 break;
987 case VAR_DECL:
988 if (DECL_HARD_REGISTER (ref))
990 temp.op0 = ref;
991 break;
993 /* Fallthru. */
994 case PARM_DECL:
995 case CONST_DECL:
996 case RESULT_DECL:
997 /* Canonicalize decls to MEM[&decl] which is what we end up with
998 when valueizing MEM[ptr] with ptr = &decl. */
999 temp.opcode = MEM_REF;
1000 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1001 temp.off = 0;
1002 result->safe_push (temp);
1003 temp.opcode = ADDR_EXPR;
1004 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1005 temp.type = TREE_TYPE (temp.op0);
1006 temp.off = -1;
1007 break;
1008 case STRING_CST:
1009 case INTEGER_CST:
1010 case POLY_INT_CST:
1011 case COMPLEX_CST:
1012 case VECTOR_CST:
1013 case REAL_CST:
1014 case FIXED_CST:
1015 case CONSTRUCTOR:
1016 case SSA_NAME:
1017 temp.op0 = ref;
1018 break;
1019 case ADDR_EXPR:
1020 if (is_gimple_min_invariant (ref))
1022 temp.op0 = ref;
1023 break;
1025 break;
1026 /* These are only interesting for their operands, their
1027 existence, and their type. They will never be the last
1028 ref in the chain of references (IE they require an
1029 operand), so we don't have to put anything
1030 for op* as it will be handled by the iteration */
1031 case REALPART_EXPR:
1032 temp.off = 0;
1033 break;
1034 case VIEW_CONVERT_EXPR:
1035 temp.off = 0;
1036 temp.reverse = storage_order_barrier_p (ref);
1037 break;
1038 case IMAGPART_EXPR:
1039 /* This is only interesting for its constant offset. */
1040 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1041 break;
1042 default:
1043 gcc_unreachable ();
1045 result->safe_push (temp);
1047 if (REFERENCE_CLASS_P (ref)
1048 || TREE_CODE (ref) == MODIFY_EXPR
1049 || TREE_CODE (ref) == WITH_SIZE_EXPR
1050 || (TREE_CODE (ref) == ADDR_EXPR
1051 && !is_gimple_min_invariant (ref)))
1052 ref = TREE_OPERAND (ref, 0);
1053 else
1054 ref = NULL_TREE;
1058 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1059 operands in *OPS, the reference alias set SET and the reference type TYPE.
1060 Return true if something useful was produced. */
1062 bool
1063 ao_ref_init_from_vn_reference (ao_ref *ref,
1064 alias_set_type set, alias_set_type base_set,
1065 tree type, const vec<vn_reference_op_s> &ops)
1067 unsigned i;
1068 tree base = NULL_TREE;
1069 tree *op0_p = &base;
1070 poly_offset_int offset = 0;
1071 poly_offset_int max_size;
1072 poly_offset_int size = -1;
1073 tree size_tree = NULL_TREE;
1075 /* We don't handle calls. */
1076 if (!type)
1077 return false;
1079 machine_mode mode = TYPE_MODE (type);
1080 if (mode == BLKmode)
1081 size_tree = TYPE_SIZE (type);
1082 else
1083 size = GET_MODE_BITSIZE (mode);
1084 if (size_tree != NULL_TREE
1085 && poly_int_tree_p (size_tree))
1086 size = wi::to_poly_offset (size_tree);
1088 /* Lower the final access size from the outermost expression. */
1089 const_vn_reference_op_t cst_op = &ops[0];
1090 /* Cast away constness for the sake of the const-unsafe
1091 FOR_EACH_VEC_ELT(). */
1092 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1093 size_tree = NULL_TREE;
1094 if (op->opcode == COMPONENT_REF)
1095 size_tree = DECL_SIZE (op->op0);
1096 else if (op->opcode == BIT_FIELD_REF)
1097 size_tree = op->op0;
1098 if (size_tree != NULL_TREE
1099 && poly_int_tree_p (size_tree)
1100 && (!known_size_p (size)
1101 || known_lt (wi::to_poly_offset (size_tree), size)))
1102 size = wi::to_poly_offset (size_tree);
1104 /* Initially, maxsize is the same as the accessed element size.
1105 In the following it will only grow (or become -1). */
1106 max_size = size;
1108 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1109 and find the ultimate containing object. */
1110 FOR_EACH_VEC_ELT (ops, i, op)
1112 switch (op->opcode)
1114 /* These may be in the reference ops, but we cannot do anything
1115 sensible with them here. */
1116 case ADDR_EXPR:
1117 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1118 if (base != NULL_TREE
1119 && TREE_CODE (base) == MEM_REF
1120 && op->op0
1121 && DECL_P (TREE_OPERAND (op->op0, 0)))
1123 const_vn_reference_op_t pop = &ops[i-1];
1124 base = TREE_OPERAND (op->op0, 0);
1125 if (known_eq (pop->off, -1))
1127 max_size = -1;
1128 offset = 0;
1130 else
1131 offset += pop->off * BITS_PER_UNIT;
1132 op0_p = NULL;
1133 break;
1135 /* Fallthru. */
1136 case CALL_EXPR:
1137 return false;
1139 /* Record the base objects. */
1140 case MEM_REF:
1141 *op0_p = build2 (MEM_REF, op->type,
1142 NULL_TREE, op->op0);
1143 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1144 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1145 op0_p = &TREE_OPERAND (*op0_p, 0);
1146 break;
1148 case VAR_DECL:
1149 case PARM_DECL:
1150 case RESULT_DECL:
1151 case SSA_NAME:
1152 *op0_p = op->op0;
1153 op0_p = NULL;
1154 break;
1156 /* And now the usual component-reference style ops. */
1157 case BIT_FIELD_REF:
1158 offset += wi::to_poly_offset (op->op1);
1159 break;
1161 case COMPONENT_REF:
1163 tree field = op->op0;
1164 /* We do not have a complete COMPONENT_REF tree here so we
1165 cannot use component_ref_field_offset. Do the interesting
1166 parts manually. */
1167 tree this_offset = DECL_FIELD_OFFSET (field);
1169 if (op->op1 || !poly_int_tree_p (this_offset))
1170 max_size = -1;
1171 else
1173 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1174 << LOG2_BITS_PER_UNIT);
1175 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1176 offset += woffset;
1178 break;
1181 case ARRAY_RANGE_REF:
1182 case ARRAY_REF:
1183 /* We recorded the lower bound and the element size. */
1184 if (!poly_int_tree_p (op->op0)
1185 || !poly_int_tree_p (op->op1)
1186 || TREE_CODE (op->op2) != INTEGER_CST)
1187 max_size = -1;
1188 else
1190 poly_offset_int woffset
1191 = wi::sext (wi::to_poly_offset (op->op0)
1192 - wi::to_poly_offset (op->op1),
1193 TYPE_PRECISION (sizetype));
1194 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1195 woffset <<= LOG2_BITS_PER_UNIT;
1196 offset += woffset;
1198 break;
1200 case REALPART_EXPR:
1201 break;
1203 case IMAGPART_EXPR:
1204 offset += size;
1205 break;
1207 case VIEW_CONVERT_EXPR:
1208 break;
1210 case STRING_CST:
1211 case INTEGER_CST:
1212 case COMPLEX_CST:
1213 case VECTOR_CST:
1214 case REAL_CST:
1215 case CONSTRUCTOR:
1216 case CONST_DECL:
1217 return false;
1219 default:
1220 return false;
1224 if (base == NULL_TREE)
1225 return false;
1227 ref->ref = NULL_TREE;
1228 ref->base = base;
1229 ref->ref_alias_set = set;
1230 ref->base_alias_set = base_set;
1231 /* We discount volatiles from value-numbering elsewhere. */
1232 ref->volatile_p = false;
1234 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1236 ref->offset = 0;
1237 ref->size = -1;
1238 ref->max_size = -1;
1239 return true;
1242 if (!offset.to_shwi (&ref->offset))
1244 ref->offset = 0;
1245 ref->max_size = -1;
1246 return true;
1249 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1250 ref->max_size = -1;
1252 return true;
1255 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1256 vn_reference_op_s's. */
1258 static void
1259 copy_reference_ops_from_call (gcall *call,
1260 vec<vn_reference_op_s> *result)
1262 vn_reference_op_s temp;
1263 unsigned i;
1264 tree lhs = gimple_call_lhs (call);
1265 int lr;
1267 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1268 different. By adding the lhs here in the vector, we ensure that the
1269 hashcode is different, guaranteeing a different value number. */
1270 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1272 memset (&temp, 0, sizeof (temp));
1273 temp.opcode = MODIFY_EXPR;
1274 temp.type = TREE_TYPE (lhs);
1275 temp.op0 = lhs;
1276 temp.off = -1;
1277 result->safe_push (temp);
1280 /* Copy the type, opcode, function, static chain and EH region, if any. */
1281 memset (&temp, 0, sizeof (temp));
1282 temp.type = gimple_call_fntype (call);
1283 temp.opcode = CALL_EXPR;
1284 temp.op0 = gimple_call_fn (call);
1285 if (gimple_call_internal_p (call))
1286 temp.clique = gimple_call_internal_fn (call);
1287 temp.op1 = gimple_call_chain (call);
1288 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1289 temp.op2 = size_int (lr);
1290 temp.off = -1;
1291 result->safe_push (temp);
1293 /* Copy the call arguments. As they can be references as well,
1294 just chain them together. */
1295 for (i = 0; i < gimple_call_num_args (call); ++i)
1297 tree callarg = gimple_call_arg (call, i);
1298 copy_reference_ops_from_ref (callarg, result);
1302 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1303 *I_P to point to the last element of the replacement. */
1304 static bool
1305 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1306 unsigned int *i_p)
1308 unsigned int i = *i_p;
1309 vn_reference_op_t op = &(*ops)[i];
1310 vn_reference_op_t mem_op = &(*ops)[i - 1];
1311 tree addr_base;
1312 poly_int64 addr_offset = 0;
1314 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1315 from .foo.bar to the preceding MEM_REF offset and replace the
1316 address with &OBJ. */
1317 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1318 &addr_offset, vn_valueize);
1319 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1320 if (addr_base != TREE_OPERAND (op->op0, 0))
1322 poly_offset_int off
1323 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1324 SIGNED)
1325 + addr_offset);
1326 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1327 op->op0 = build_fold_addr_expr (addr_base);
1328 if (tree_fits_shwi_p (mem_op->op0))
1329 mem_op->off = tree_to_shwi (mem_op->op0);
1330 else
1331 mem_op->off = -1;
1332 return true;
1334 return false;
1337 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1338 *I_P to point to the last element of the replacement. */
1339 static bool
1340 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1341 unsigned int *i_p)
1343 bool changed = false;
1344 vn_reference_op_t op;
1348 unsigned int i = *i_p;
1349 op = &(*ops)[i];
1350 vn_reference_op_t mem_op = &(*ops)[i - 1];
1351 gimple *def_stmt;
1352 enum tree_code code;
1353 poly_offset_int off;
1355 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1356 if (!is_gimple_assign (def_stmt))
1357 return changed;
1359 code = gimple_assign_rhs_code (def_stmt);
1360 if (code != ADDR_EXPR
1361 && code != POINTER_PLUS_EXPR)
1362 return changed;
1364 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1366 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1367 from .foo.bar to the preceding MEM_REF offset and replace the
1368 address with &OBJ. */
1369 if (code == ADDR_EXPR)
1371 tree addr, addr_base;
1372 poly_int64 addr_offset;
1374 addr = gimple_assign_rhs1 (def_stmt);
1375 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1376 &addr_offset,
1377 vn_valueize);
1378 /* If that didn't work because the address isn't invariant propagate
1379 the reference tree from the address operation in case the current
1380 dereference isn't offsetted. */
1381 if (!addr_base
1382 && *i_p == ops->length () - 1
1383 && known_eq (off, 0)
1384 /* This makes us disable this transform for PRE where the
1385 reference ops might be also used for code insertion which
1386 is invalid. */
1387 && default_vn_walk_kind == VN_WALKREWRITE)
1389 auto_vec<vn_reference_op_s, 32> tem;
1390 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1391 /* Make sure to preserve TBAA info. The only objects not
1392 wrapped in MEM_REFs that can have their address taken are
1393 STRING_CSTs. */
1394 if (tem.length () >= 2
1395 && tem[tem.length () - 2].opcode == MEM_REF)
1397 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1398 new_mem_op->op0
1399 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1400 wi::to_poly_wide (new_mem_op->op0));
1402 else
1403 gcc_assert (tem.last ().opcode == STRING_CST);
1404 ops->pop ();
1405 ops->pop ();
1406 ops->safe_splice (tem);
1407 --*i_p;
1408 return true;
1410 if (!addr_base
1411 || TREE_CODE (addr_base) != MEM_REF
1412 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1413 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1414 0))))
1415 return changed;
1417 off += addr_offset;
1418 off += mem_ref_offset (addr_base);
1419 op->op0 = TREE_OPERAND (addr_base, 0);
1421 else
1423 tree ptr, ptroff;
1424 ptr = gimple_assign_rhs1 (def_stmt);
1425 ptroff = gimple_assign_rhs2 (def_stmt);
1426 if (TREE_CODE (ptr) != SSA_NAME
1427 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1428 /* Make sure to not endlessly recurse.
1429 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1430 happen when we value-number a PHI to its backedge value. */
1431 || SSA_VAL (ptr) == op->op0
1432 || !poly_int_tree_p (ptroff))
1433 return changed;
1435 off += wi::to_poly_offset (ptroff);
1436 op->op0 = ptr;
1439 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1440 if (tree_fits_shwi_p (mem_op->op0))
1441 mem_op->off = tree_to_shwi (mem_op->op0);
1442 else
1443 mem_op->off = -1;
1444 /* ??? Can end up with endless recursion here!?
1445 gcc.c-torture/execute/strcmp-1.c */
1446 if (TREE_CODE (op->op0) == SSA_NAME)
1447 op->op0 = SSA_VAL (op->op0);
1448 if (TREE_CODE (op->op0) != SSA_NAME)
1449 op->opcode = TREE_CODE (op->op0);
1451 changed = true;
1453 /* Tail-recurse. */
1454 while (TREE_CODE (op->op0) == SSA_NAME);
1456 /* Fold a remaining *&. */
1457 if (TREE_CODE (op->op0) == ADDR_EXPR)
1458 vn_reference_fold_indirect (ops, i_p);
1460 return changed;
1463 /* Optimize the reference REF to a constant if possible or return
1464 NULL_TREE if not. */
1466 tree
1467 fully_constant_vn_reference_p (vn_reference_t ref)
1469 vec<vn_reference_op_s> operands = ref->operands;
1470 vn_reference_op_t op;
1472 /* Try to simplify the translated expression if it is
1473 a call to a builtin function with at most two arguments. */
1474 op = &operands[0];
1475 if (op->opcode == CALL_EXPR
1476 && (!op->op0
1477 || (TREE_CODE (op->op0) == ADDR_EXPR
1478 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1479 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1480 BUILT_IN_NORMAL)))
1481 && operands.length () >= 2
1482 && operands.length () <= 3)
1484 vn_reference_op_t arg0, arg1 = NULL;
1485 bool anyconst = false;
1486 arg0 = &operands[1];
1487 if (operands.length () > 2)
1488 arg1 = &operands[2];
1489 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1490 || (arg0->opcode == ADDR_EXPR
1491 && is_gimple_min_invariant (arg0->op0)))
1492 anyconst = true;
1493 if (arg1
1494 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1495 || (arg1->opcode == ADDR_EXPR
1496 && is_gimple_min_invariant (arg1->op0))))
1497 anyconst = true;
1498 if (anyconst)
1500 combined_fn fn;
1501 if (op->op0)
1502 fn = as_combined_fn (DECL_FUNCTION_CODE
1503 (TREE_OPERAND (op->op0, 0)));
1504 else
1505 fn = as_combined_fn ((internal_fn) op->clique);
1506 tree folded;
1507 if (arg1)
1508 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1509 else
1510 folded = fold_const_call (fn, ref->type, arg0->op0);
1511 if (folded
1512 && is_gimple_min_invariant (folded))
1513 return folded;
1517 /* Simplify reads from constants or constant initializers. */
1518 else if (BITS_PER_UNIT == 8
1519 && ref->type
1520 && COMPLETE_TYPE_P (ref->type)
1521 && is_gimple_reg_type (ref->type))
1523 poly_int64 off = 0;
1524 HOST_WIDE_INT size;
1525 if (INTEGRAL_TYPE_P (ref->type))
1526 size = TYPE_PRECISION (ref->type);
1527 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1528 size = tree_to_shwi (TYPE_SIZE (ref->type));
1529 else
1530 return NULL_TREE;
1531 if (size % BITS_PER_UNIT != 0
1532 || size > MAX_BITSIZE_MODE_ANY_MODE)
1533 return NULL_TREE;
1534 size /= BITS_PER_UNIT;
1535 unsigned i;
1536 for (i = 0; i < operands.length (); ++i)
1538 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1540 ++i;
1541 break;
1543 if (known_eq (operands[i].off, -1))
1544 return NULL_TREE;
1545 off += operands[i].off;
1546 if (operands[i].opcode == MEM_REF)
1548 ++i;
1549 break;
1552 vn_reference_op_t base = &operands[--i];
1553 tree ctor = error_mark_node;
1554 tree decl = NULL_TREE;
1555 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1556 ctor = base->op0;
1557 else if (base->opcode == MEM_REF
1558 && base[1].opcode == ADDR_EXPR
1559 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1560 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1561 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1563 decl = TREE_OPERAND (base[1].op0, 0);
1564 if (TREE_CODE (decl) == STRING_CST)
1565 ctor = decl;
1566 else
1567 ctor = ctor_for_folding (decl);
1569 if (ctor == NULL_TREE)
1570 return build_zero_cst (ref->type);
1571 else if (ctor != error_mark_node)
1573 HOST_WIDE_INT const_off;
1574 if (decl)
1576 tree res = fold_ctor_reference (ref->type, ctor,
1577 off * BITS_PER_UNIT,
1578 size * BITS_PER_UNIT, decl);
1579 if (res)
1581 STRIP_USELESS_TYPE_CONVERSION (res);
1582 if (is_gimple_min_invariant (res))
1583 return res;
1586 else if (off.is_constant (&const_off))
1588 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1589 int len = native_encode_expr (ctor, buf, size, const_off);
1590 if (len > 0)
1591 return native_interpret_expr (ref->type, buf, len);
1596 return NULL_TREE;
1599 /* Return true if OPS contain a storage order barrier. */
1601 static bool
1602 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1604 vn_reference_op_t op;
1605 unsigned i;
1607 FOR_EACH_VEC_ELT (ops, i, op)
1608 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1609 return true;
1611 return false;
1614 /* Return true if OPS represent an access with reverse storage order. */
1616 static bool
1617 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1619 unsigned i = 0;
1620 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1621 ++i;
1622 switch (ops[i].opcode)
1624 case ARRAY_REF:
1625 case COMPONENT_REF:
1626 case BIT_FIELD_REF:
1627 case MEM_REF:
1628 return ops[i].reverse;
1629 default:
1630 return false;
1634 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1635 structures into their value numbers. This is done in-place, and
1636 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1637 whether any operands were valueized. */
1639 static void
1640 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1641 bool with_avail = false)
1643 vn_reference_op_t vro;
1644 unsigned int i;
1646 *valueized_anything = false;
1648 FOR_EACH_VEC_ELT (*orig, i, vro)
1650 if (vro->opcode == SSA_NAME
1651 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1653 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1654 if (tem != vro->op0)
1656 *valueized_anything = true;
1657 vro->op0 = tem;
1659 /* If it transforms from an SSA_NAME to a constant, update
1660 the opcode. */
1661 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1662 vro->opcode = TREE_CODE (vro->op0);
1664 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1666 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1667 if (tem != vro->op1)
1669 *valueized_anything = true;
1670 vro->op1 = tem;
1673 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1675 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1676 if (tem != vro->op2)
1678 *valueized_anything = true;
1679 vro->op2 = tem;
1682 /* If it transforms from an SSA_NAME to an address, fold with
1683 a preceding indirect reference. */
1684 if (i > 0
1685 && vro->op0
1686 && TREE_CODE (vro->op0) == ADDR_EXPR
1687 && (*orig)[i - 1].opcode == MEM_REF)
1689 if (vn_reference_fold_indirect (orig, &i))
1690 *valueized_anything = true;
1692 else if (i > 0
1693 && vro->opcode == SSA_NAME
1694 && (*orig)[i - 1].opcode == MEM_REF)
1696 if (vn_reference_maybe_forwprop_address (orig, &i))
1697 *valueized_anything = true;
1699 /* If it transforms a non-constant ARRAY_REF into a constant
1700 one, adjust the constant offset. */
1701 else if (vro->opcode == ARRAY_REF
1702 && known_eq (vro->off, -1)
1703 && poly_int_tree_p (vro->op0)
1704 && poly_int_tree_p (vro->op1)
1705 && TREE_CODE (vro->op2) == INTEGER_CST)
1707 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1708 - wi::to_poly_offset (vro->op1))
1709 * wi::to_offset (vro->op2)
1710 * vn_ref_op_align_unit (vro));
1711 off.to_shwi (&vro->off);
1716 static void
1717 valueize_refs (vec<vn_reference_op_s> *orig)
1719 bool tem;
1720 valueize_refs_1 (orig, &tem);
1723 static vec<vn_reference_op_s> shared_lookup_references;
1725 /* Create a vector of vn_reference_op_s structures from REF, a
1726 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1727 this function. *VALUEIZED_ANYTHING will specify whether any
1728 operands were valueized. */
1730 static vec<vn_reference_op_s>
1731 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1733 if (!ref)
1734 return vNULL;
1735 shared_lookup_references.truncate (0);
1736 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1737 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1738 return shared_lookup_references;
1741 /* Create a vector of vn_reference_op_s structures from CALL, a
1742 call statement. The vector is shared among all callers of
1743 this function. */
1745 static vec<vn_reference_op_s>
1746 valueize_shared_reference_ops_from_call (gcall *call)
1748 if (!call)
1749 return vNULL;
1750 shared_lookup_references.truncate (0);
1751 copy_reference_ops_from_call (call, &shared_lookup_references);
1752 valueize_refs (&shared_lookup_references);
1753 return shared_lookup_references;
1756 /* Lookup a SCCVN reference operation VR in the current hash table.
1757 Returns the resulting value number if it exists in the hash table,
1758 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1759 vn_reference_t stored in the hashtable if something is found. */
1761 static tree
1762 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1764 vn_reference_s **slot;
1765 hashval_t hash;
1767 hash = vr->hashcode;
1768 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1769 if (slot)
1771 if (vnresult)
1772 *vnresult = (vn_reference_t)*slot;
1773 return ((vn_reference_t)*slot)->result;
1776 return NULL_TREE;
1780 /* Partial definition tracking support. */
1782 struct pd_range
1784 HOST_WIDE_INT offset;
1785 HOST_WIDE_INT size;
1788 struct pd_data
1790 tree rhs;
1791 HOST_WIDE_INT offset;
1792 HOST_WIDE_INT size;
1795 /* Context for alias walking. */
1797 struct vn_walk_cb_data
1799 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1800 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1801 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1802 mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1803 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1804 first_base_set (-2), known_ranges (NULL)
1806 if (!last_vuse_ptr)
1807 last_vuse_ptr = &last_vuse;
1808 ao_ref_init (&orig_ref, orig_ref_);
1809 if (mask)
1811 wide_int w = wi::to_wide (mask);
1812 unsigned int pos = 0, prec = w.get_precision ();
1813 pd_data pd;
1814 pd.rhs = build_constructor (NULL_TREE, NULL);
1815 /* When bitwise and with a constant is done on a memory load,
1816 we don't really need all the bits to be defined or defined
1817 to constants, we don't really care what is in the position
1818 corresponding to 0 bits in the mask.
1819 So, push the ranges of those 0 bits in the mask as artificial
1820 zero stores and let the partial def handling code do the
1821 rest. */
1822 while (pos < prec)
1824 int tz = wi::ctz (w);
1825 if (pos + tz > prec)
1826 tz = prec - pos;
1827 if (tz)
1829 if (BYTES_BIG_ENDIAN)
1830 pd.offset = prec - pos - tz;
1831 else
1832 pd.offset = pos;
1833 pd.size = tz;
1834 void *r = push_partial_def (pd, 0, 0, 0, prec);
1835 gcc_assert (r == NULL_TREE);
1837 pos += tz;
1838 if (pos == prec)
1839 break;
1840 w = wi::lrshift (w, tz);
1841 tz = wi::ctz (wi::bit_not (w));
1842 if (pos + tz > prec)
1843 tz = prec - pos;
1844 pos += tz;
1845 w = wi::lrshift (w, tz);
1849 ~vn_walk_cb_data ();
1850 void *finish (alias_set_type, alias_set_type, tree);
1851 void *push_partial_def (pd_data pd,
1852 alias_set_type, alias_set_type, HOST_WIDE_INT,
1853 HOST_WIDE_INT);
1855 vn_reference_t vr;
1856 ao_ref orig_ref;
1857 tree *last_vuse_ptr;
1858 tree last_vuse;
1859 tree mask;
1860 tree masked_result;
1861 vn_lookup_kind vn_walk_kind;
1862 bool tbaa_p;
1863 vec<vn_reference_op_s> saved_operands;
1865 /* The VDEFs of partial defs we come along. */
1866 auto_vec<pd_data, 2> partial_defs;
1867 /* The first defs range to avoid splay tree setup in most cases. */
1868 pd_range first_range;
1869 alias_set_type first_set;
1870 alias_set_type first_base_set;
1871 splay_tree known_ranges;
1872 obstack ranges_obstack;
1875 vn_walk_cb_data::~vn_walk_cb_data ()
1877 if (known_ranges)
1879 splay_tree_delete (known_ranges);
1880 obstack_free (&ranges_obstack, NULL);
1882 saved_operands.release ();
1885 void *
1886 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1888 if (first_set != -2)
1890 set = first_set;
1891 base_set = first_base_set;
1893 if (mask)
1895 masked_result = val;
1896 return (void *) -1;
1898 vec<vn_reference_op_s> &operands
1899 = saved_operands.exists () ? saved_operands : vr->operands;
1900 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1901 vr->type, operands, val);
1904 /* pd_range splay-tree helpers. */
1906 static int
1907 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1909 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1910 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1911 if (offset1 < offset2)
1912 return -1;
1913 else if (offset1 > offset2)
1914 return 1;
1915 return 0;
1918 static void *
1919 pd_tree_alloc (int size, void *data_)
1921 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1922 return obstack_alloc (&data->ranges_obstack, size);
1925 static void
1926 pd_tree_dealloc (void *, void *)
1930 /* Push PD to the vector of partial definitions returning a
1931 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1932 NULL when we want to continue looking for partial defs or -1
1933 on failure. */
1935 void *
1936 vn_walk_cb_data::push_partial_def (pd_data pd,
1937 alias_set_type set, alias_set_type base_set,
1938 HOST_WIDE_INT offseti,
1939 HOST_WIDE_INT maxsizei)
1941 const HOST_WIDE_INT bufsize = 64;
1942 /* We're using a fixed buffer for encoding so fail early if the object
1943 we want to interpret is bigger. */
1944 if (maxsizei > bufsize * BITS_PER_UNIT
1945 || CHAR_BIT != 8
1946 || BITS_PER_UNIT != 8
1947 /* Not prepared to handle PDP endian. */
1948 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1949 return (void *)-1;
1951 /* Turn too large constant stores into non-constant stores. */
1952 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1953 pd.rhs = error_mark_node;
1955 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1956 most a partial byte before and/or after the region. */
1957 if (!CONSTANT_CLASS_P (pd.rhs))
1959 if (pd.offset < offseti)
1961 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1962 gcc_assert (pd.size > o);
1963 pd.size -= o;
1964 pd.offset += o;
1966 if (pd.size > maxsizei)
1967 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1970 pd.offset -= offseti;
1972 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1973 || CONSTANT_CLASS_P (pd.rhs));
1974 if (partial_defs.is_empty ())
1976 /* If we get a clobber upfront, fail. */
1977 if (TREE_CLOBBER_P (pd.rhs))
1978 return (void *)-1;
1979 if (!pd_constant_p)
1980 return (void *)-1;
1981 partial_defs.safe_push (pd);
1982 first_range.offset = pd.offset;
1983 first_range.size = pd.size;
1984 first_set = set;
1985 first_base_set = base_set;
1986 last_vuse_ptr = NULL;
1987 /* Continue looking for partial defs. */
1988 return NULL;
1991 if (!known_ranges)
1993 /* ??? Optimize the case where the 2nd partial def completes things. */
1994 gcc_obstack_init (&ranges_obstack);
1995 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1996 pd_tree_alloc,
1997 pd_tree_dealloc, this);
1998 splay_tree_insert (known_ranges,
1999 (splay_tree_key)&first_range.offset,
2000 (splay_tree_value)&first_range);
2003 pd_range newr = { pd.offset, pd.size };
2004 splay_tree_node n;
2005 pd_range *r;
2006 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2007 HOST_WIDE_INT loffset = newr.offset + 1;
2008 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2009 && ((r = (pd_range *)n->value), true)
2010 && ranges_known_overlap_p (r->offset, r->size + 1,
2011 newr.offset, newr.size))
2013 /* Ignore partial defs already covered. Here we also drop shadowed
2014 clobbers arriving here at the floor. */
2015 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2016 return NULL;
2017 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2019 else
2021 /* newr.offset wasn't covered yet, insert the range. */
2022 r = XOBNEW (&ranges_obstack, pd_range);
2023 *r = newr;
2024 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2025 (splay_tree_value)r);
2027 /* Merge r which now contains newr and is a member of the splay tree with
2028 adjacent overlapping ranges. */
2029 pd_range *rafter;
2030 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2031 && ((rafter = (pd_range *)n->value), true)
2032 && ranges_known_overlap_p (r->offset, r->size + 1,
2033 rafter->offset, rafter->size))
2035 r->size = MAX (r->offset + r->size,
2036 rafter->offset + rafter->size) - r->offset;
2037 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2039 /* If we get a clobber, fail. */
2040 if (TREE_CLOBBER_P (pd.rhs))
2041 return (void *)-1;
2042 /* Non-constants are OK as long as they are shadowed by a constant. */
2043 if (!pd_constant_p)
2044 return (void *)-1;
2045 partial_defs.safe_push (pd);
2047 /* Now we have merged newr into the range tree. When we have covered
2048 [offseti, sizei] then the tree will contain exactly one node which has
2049 the desired properties and it will be 'r'. */
2050 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2051 /* Continue looking for partial defs. */
2052 return NULL;
2054 /* Now simply native encode all partial defs in reverse order. */
2055 unsigned ndefs = partial_defs.length ();
2056 /* We support up to 512-bit values (for V8DFmode). */
2057 unsigned char buffer[bufsize + 1];
2058 unsigned char this_buffer[bufsize + 1];
2059 int len;
2061 memset (buffer, 0, bufsize + 1);
2062 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2063 while (!partial_defs.is_empty ())
2065 pd_data pd = partial_defs.pop ();
2066 unsigned int amnt;
2067 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2069 /* Empty CONSTRUCTOR. */
2070 if (pd.size >= needed_len * BITS_PER_UNIT)
2071 len = needed_len;
2072 else
2073 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2074 memset (this_buffer, 0, len);
2076 else
2078 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2079 MAX (0, -pd.offset) / BITS_PER_UNIT);
2080 if (len <= 0
2081 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2082 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2084 if (dump_file && (dump_flags & TDF_DETAILS))
2085 fprintf (dump_file, "Failed to encode %u "
2086 "partial definitions\n", ndefs);
2087 return (void *)-1;
2091 unsigned char *p = buffer;
2092 HOST_WIDE_INT size = pd.size;
2093 if (pd.offset < 0)
2094 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2095 this_buffer[len] = 0;
2096 if (BYTES_BIG_ENDIAN)
2098 /* LSB of this_buffer[len - 1] byte should be at
2099 pd.offset + pd.size - 1 bits in buffer. */
2100 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2101 + pd.size) % BITS_PER_UNIT;
2102 if (amnt)
2103 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2104 unsigned char *q = this_buffer;
2105 unsigned int off = 0;
2106 if (pd.offset >= 0)
2108 unsigned int msk;
2109 off = pd.offset / BITS_PER_UNIT;
2110 gcc_assert (off < needed_len);
2111 p = buffer + off;
2112 if (size <= amnt)
2114 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2115 *p = (*p & ~msk) | (this_buffer[len] & msk);
2116 size = 0;
2118 else
2120 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2121 q = (this_buffer + len
2122 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2123 / BITS_PER_UNIT));
2124 if (pd.offset % BITS_PER_UNIT)
2126 msk = -1U << (BITS_PER_UNIT
2127 - (pd.offset % BITS_PER_UNIT));
2128 *p = (*p & msk) | (*q & ~msk);
2129 p++;
2130 q++;
2131 off++;
2132 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2133 gcc_assert (size >= 0);
2137 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2139 q = (this_buffer + len
2140 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2141 / BITS_PER_UNIT));
2142 if (pd.offset % BITS_PER_UNIT)
2144 q++;
2145 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2146 % BITS_PER_UNIT);
2147 gcc_assert (size >= 0);
2150 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2151 > needed_len)
2152 size = (needed_len - off) * BITS_PER_UNIT;
2153 memcpy (p, q, size / BITS_PER_UNIT);
2154 if (size % BITS_PER_UNIT)
2156 unsigned int msk
2157 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2158 p += size / BITS_PER_UNIT;
2159 q += size / BITS_PER_UNIT;
2160 *p = (*q & msk) | (*p & ~msk);
2163 else
2165 if (pd.offset >= 0)
2167 /* LSB of this_buffer[0] byte should be at pd.offset bits
2168 in buffer. */
2169 unsigned int msk;
2170 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2171 amnt = pd.offset % BITS_PER_UNIT;
2172 if (amnt)
2173 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2174 unsigned int off = pd.offset / BITS_PER_UNIT;
2175 gcc_assert (off < needed_len);
2176 size = MIN (size,
2177 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2178 p = buffer + off;
2179 if (amnt + size < BITS_PER_UNIT)
2181 /* Low amnt bits come from *p, then size bits
2182 from this_buffer[0] and the remaining again from
2183 *p. */
2184 msk = ((1 << size) - 1) << amnt;
2185 *p = (*p & ~msk) | (this_buffer[0] & msk);
2186 size = 0;
2188 else if (amnt)
2190 msk = -1U << amnt;
2191 *p = (*p & ~msk) | (this_buffer[0] & msk);
2192 p++;
2193 size -= (BITS_PER_UNIT - amnt);
2196 else
2198 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2199 if (amnt)
2200 size -= BITS_PER_UNIT - amnt;
2201 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2202 if (amnt)
2203 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2205 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2206 p += size / BITS_PER_UNIT;
2207 if (size % BITS_PER_UNIT)
2209 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2210 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2211 & ~msk) | (*p & msk);
2216 tree type = vr->type;
2217 /* Make sure to interpret in a type that has a range covering the whole
2218 access size. */
2219 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2220 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2221 tree val;
2222 if (BYTES_BIG_ENDIAN)
2224 unsigned sz = needed_len;
2225 if (maxsizei % BITS_PER_UNIT)
2226 shift_bytes_in_array_right (buffer, needed_len,
2227 BITS_PER_UNIT
2228 - (maxsizei % BITS_PER_UNIT));
2229 if (INTEGRAL_TYPE_P (type))
2230 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2231 if (sz > needed_len)
2233 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2234 val = native_interpret_expr (type, this_buffer, sz);
2236 else
2237 val = native_interpret_expr (type, buffer, needed_len);
2239 else
2240 val = native_interpret_expr (type, buffer, bufsize);
2241 /* If we chop off bits because the types precision doesn't match the memory
2242 access size this is ok when optimizing reads but not when called from
2243 the DSE code during elimination. */
2244 if (val && type != vr->type)
2246 if (! int_fits_type_p (val, vr->type))
2247 val = NULL_TREE;
2248 else
2249 val = fold_convert (vr->type, val);
2252 if (val)
2254 if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file,
2256 "Successfully combined %u partial definitions\n", ndefs);
2257 /* We are using the alias-set of the first store we encounter which
2258 should be appropriate here. */
2259 return finish (first_set, first_base_set, val);
2261 else
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2264 fprintf (dump_file,
2265 "Failed to interpret %u encoded partial definitions\n", ndefs);
2266 return (void *)-1;
2270 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2271 with the current VUSE and performs the expression lookup. */
2273 static void *
2274 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2276 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2277 vn_reference_t vr = data->vr;
2278 vn_reference_s **slot;
2279 hashval_t hash;
2281 /* If we have partial definitions recorded we have to go through
2282 vn_reference_lookup_3. */
2283 if (!data->partial_defs.is_empty ())
2284 return NULL;
2286 if (data->last_vuse_ptr)
2288 *data->last_vuse_ptr = vuse;
2289 data->last_vuse = vuse;
2292 /* Fixup vuse and hash. */
2293 if (vr->vuse)
2294 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2295 vr->vuse = vuse_ssa_val (vuse);
2296 if (vr->vuse)
2297 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2299 hash = vr->hashcode;
2300 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2301 if (slot)
2303 if ((*slot)->result && data->saved_operands.exists ())
2304 return data->finish (vr->set, vr->base_set, (*slot)->result);
2305 return *slot;
2308 return NULL;
2311 /* Lookup an existing or insert a new vn_reference entry into the
2312 value table for the VUSE, SET, TYPE, OPERANDS reference which
2313 has the value VALUE which is either a constant or an SSA name. */
2315 static vn_reference_t
2316 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2317 alias_set_type set,
2318 alias_set_type base_set,
2319 tree type,
2320 vec<vn_reference_op_s,
2321 va_heap> operands,
2322 tree value)
2324 vn_reference_s vr1;
2325 vn_reference_t result;
2326 unsigned value_id;
2327 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2328 vr1.operands = operands;
2329 vr1.type = type;
2330 vr1.set = set;
2331 vr1.base_set = base_set;
2332 vr1.hashcode = vn_reference_compute_hash (&vr1);
2333 if (vn_reference_lookup_1 (&vr1, &result))
2334 return result;
2335 if (TREE_CODE (value) == SSA_NAME)
2336 value_id = VN_INFO (value)->value_id;
2337 else
2338 value_id = get_or_alloc_constant_value_id (value);
2339 return vn_reference_insert_pieces (vuse, set, base_set, type,
2340 operands.copy (), value, value_id);
2343 /* Return a value-number for RCODE OPS... either by looking up an existing
2344 value-number for the possibly simplified result or by inserting the
2345 operation if INSERT is true. If SIMPLIFY is false, return a value
2346 number for the unsimplified expression. */
2348 static tree
2349 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2350 bool simplify)
2352 tree result = NULL_TREE;
2353 /* We will be creating a value number for
2354 RCODE (OPS...).
2355 So first simplify and lookup this expression to see if it
2356 is already available. */
2357 /* For simplification valueize. */
2358 unsigned i = 0;
2359 if (simplify)
2360 for (i = 0; i < res_op->num_ops; ++i)
2361 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2363 tree tem = vn_valueize (res_op->ops[i]);
2364 if (!tem)
2365 break;
2366 res_op->ops[i] = tem;
2368 /* If valueization of an operand fails (it is not available), skip
2369 simplification. */
2370 bool res = false;
2371 if (i == res_op->num_ops)
2373 mprts_hook = vn_lookup_simplify_result;
2374 res = res_op->resimplify (NULL, vn_valueize);
2375 mprts_hook = NULL;
2377 gimple *new_stmt = NULL;
2378 if (res
2379 && gimple_simplified_result_is_gimple_val (res_op))
2381 /* The expression is already available. */
2382 result = res_op->ops[0];
2383 /* Valueize it, simplification returns sth in AVAIL only. */
2384 if (TREE_CODE (result) == SSA_NAME)
2385 result = SSA_VAL (result);
2387 else
2389 tree val = vn_lookup_simplify_result (res_op);
2390 if (!val && insert)
2392 gimple_seq stmts = NULL;
2393 result = maybe_push_res_to_seq (res_op, &stmts);
2394 if (result)
2396 gcc_assert (gimple_seq_singleton_p (stmts));
2397 new_stmt = gimple_seq_first_stmt (stmts);
2400 else
2401 /* The expression is already available. */
2402 result = val;
2404 if (new_stmt)
2406 /* The expression is not yet available, value-number lhs to
2407 the new SSA_NAME we created. */
2408 /* Initialize value-number information properly. */
2409 vn_ssa_aux_t result_info = VN_INFO (result);
2410 result_info->valnum = result;
2411 result_info->value_id = get_next_value_id ();
2412 result_info->visited = 1;
2413 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2414 new_stmt);
2415 result_info->needs_insertion = true;
2416 /* ??? PRE phi-translation inserts NARYs without corresponding
2417 SSA name result. Re-use those but set their result according
2418 to the stmt we just built. */
2419 vn_nary_op_t nary = NULL;
2420 vn_nary_op_lookup_stmt (new_stmt, &nary);
2421 if (nary)
2423 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2424 nary->u.result = gimple_assign_lhs (new_stmt);
2426 /* As all "inserted" statements are singleton SCCs, insert
2427 to the valid table. This is strictly needed to
2428 avoid re-generating new value SSA_NAMEs for the same
2429 expression during SCC iteration over and over (the
2430 optimistic table gets cleared after each iteration).
2431 We do not need to insert into the optimistic table, as
2432 lookups there will fall back to the valid table. */
2433 else
2435 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2436 vn_nary_op_t vno1
2437 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2438 vno1->value_id = result_info->value_id;
2439 vno1->length = length;
2440 vno1->predicated_values = 0;
2441 vno1->u.result = result;
2442 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2443 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2444 /* Also do not link it into the undo chain. */
2445 last_inserted_nary = vno1->next;
2446 vno1->next = (vn_nary_op_t)(void *)-1;
2448 if (dump_file && (dump_flags & TDF_DETAILS))
2450 fprintf (dump_file, "Inserting name ");
2451 print_generic_expr (dump_file, result);
2452 fprintf (dump_file, " for expression ");
2453 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2454 fprintf (dump_file, "\n");
2457 return result;
2460 /* Return a value-number for RCODE OPS... either by looking up an existing
2461 value-number for the simplified result or by inserting the operation. */
2463 static tree
2464 vn_nary_build_or_lookup (gimple_match_op *res_op)
2466 return vn_nary_build_or_lookup_1 (res_op, true, true);
2469 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2470 its value if present. */
2472 tree
2473 vn_nary_simplify (vn_nary_op_t nary)
2475 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2476 return NULL_TREE;
2477 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2478 nary->type, nary->length);
2479 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2480 return vn_nary_build_or_lookup_1 (&op, false, true);
2483 /* Elimination engine. */
2485 class eliminate_dom_walker : public dom_walker
2487 public:
2488 eliminate_dom_walker (cdi_direction, bitmap);
2489 ~eliminate_dom_walker ();
2491 virtual edge before_dom_children (basic_block);
2492 virtual void after_dom_children (basic_block);
2494 virtual tree eliminate_avail (basic_block, tree op);
2495 virtual void eliminate_push_avail (basic_block, tree op);
2496 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2498 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2500 unsigned eliminate_cleanup (bool region_p = false);
2502 bool do_pre;
2503 unsigned int el_todo;
2504 unsigned int eliminations;
2505 unsigned int insertions;
2507 /* SSA names that had their defs inserted by PRE if do_pre. */
2508 bitmap inserted_exprs;
2510 /* Blocks with statements that have had their EH properties changed. */
2511 bitmap need_eh_cleanup;
2513 /* Blocks with statements that have had their AB properties changed. */
2514 bitmap need_ab_cleanup;
2516 /* Local state for the eliminate domwalk. */
2517 auto_vec<gimple *> to_remove;
2518 auto_vec<gimple *> to_fixup;
2519 auto_vec<tree> avail;
2520 auto_vec<tree> avail_stack;
2523 /* Adaptor to the elimination engine using RPO availability. */
2525 class rpo_elim : public eliminate_dom_walker
2527 public:
2528 rpo_elim(basic_block entry_)
2529 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2530 m_avail_freelist (NULL) {}
2532 virtual tree eliminate_avail (basic_block, tree op);
2534 virtual void eliminate_push_avail (basic_block, tree);
2536 basic_block entry;
2537 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2538 obstack. */
2539 vn_avail *m_avail_freelist;
2542 /* Global RPO state for access from hooks. */
2543 static eliminate_dom_walker *rpo_avail;
2544 basic_block vn_context_bb;
2546 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2547 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2548 Otherwise return false. */
2550 static bool
2551 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2552 tree base2, poly_int64 *offset2)
2554 poly_int64 soff;
2555 if (TREE_CODE (base1) == MEM_REF
2556 && TREE_CODE (base2) == MEM_REF)
2558 if (mem_ref_offset (base1).to_shwi (&soff))
2560 base1 = TREE_OPERAND (base1, 0);
2561 *offset1 += soff * BITS_PER_UNIT;
2563 if (mem_ref_offset (base2).to_shwi (&soff))
2565 base2 = TREE_OPERAND (base2, 0);
2566 *offset2 += soff * BITS_PER_UNIT;
2568 return operand_equal_p (base1, base2, 0);
2570 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2573 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2574 from the statement defining VUSE and if not successful tries to
2575 translate *REFP and VR_ through an aggregate copy at the definition
2576 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2577 of *REF and *VR. If only disambiguation was performed then
2578 *DISAMBIGUATE_ONLY is set to true. */
2580 static void *
2581 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2582 translate_flags *disambiguate_only)
2584 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2585 vn_reference_t vr = data->vr;
2586 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2587 tree base = ao_ref_base (ref);
2588 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2589 static vec<vn_reference_op_s> lhs_ops;
2590 ao_ref lhs_ref;
2591 bool lhs_ref_ok = false;
2592 poly_int64 copy_size;
2594 /* First try to disambiguate after value-replacing in the definitions LHS. */
2595 if (is_gimple_assign (def_stmt))
2597 tree lhs = gimple_assign_lhs (def_stmt);
2598 bool valueized_anything = false;
2599 /* Avoid re-allocation overhead. */
2600 lhs_ops.truncate (0);
2601 basic_block saved_rpo_bb = vn_context_bb;
2602 vn_context_bb = gimple_bb (def_stmt);
2603 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2605 copy_reference_ops_from_ref (lhs, &lhs_ops);
2606 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2608 vn_context_bb = saved_rpo_bb;
2609 ao_ref_init (&lhs_ref, lhs);
2610 lhs_ref_ok = true;
2611 if (valueized_anything
2612 && ao_ref_init_from_vn_reference
2613 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2614 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2615 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2617 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2618 return NULL;
2621 /* Besides valueizing the LHS we can also use access-path based
2622 disambiguation on the original non-valueized ref. */
2623 if (!ref->ref
2624 && lhs_ref_ok
2625 && data->orig_ref.ref)
2627 /* We want to use the non-valueized LHS for this, but avoid redundant
2628 work. */
2629 ao_ref *lref = &lhs_ref;
2630 ao_ref lref_alt;
2631 if (valueized_anything)
2633 ao_ref_init (&lref_alt, lhs);
2634 lref = &lref_alt;
2636 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2638 *disambiguate_only = (valueized_anything
2639 ? TR_VALUEIZE_AND_DISAMBIGUATE
2640 : TR_DISAMBIGUATE);
2641 return NULL;
2645 /* If we reach a clobbering statement try to skip it and see if
2646 we find a VN result with exactly the same value as the
2647 possible clobber. In this case we can ignore the clobber
2648 and return the found value. */
2649 if (is_gimple_reg_type (TREE_TYPE (lhs))
2650 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2651 && (ref->ref || data->orig_ref.ref))
2653 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2654 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2655 data->last_vuse_ptr = NULL;
2656 tree saved_vuse = vr->vuse;
2657 hashval_t saved_hashcode = vr->hashcode;
2658 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2659 /* Need to restore vr->vuse and vr->hashcode. */
2660 vr->vuse = saved_vuse;
2661 vr->hashcode = saved_hashcode;
2662 data->last_vuse_ptr = saved_last_vuse_ptr;
2663 if (res && res != (void *)-1)
2665 vn_reference_t vnresult = (vn_reference_t) res;
2666 tree rhs = gimple_assign_rhs1 (def_stmt);
2667 if (TREE_CODE (rhs) == SSA_NAME)
2668 rhs = SSA_VAL (rhs);
2669 if (vnresult->result
2670 && operand_equal_p (vnresult->result, rhs, 0)
2671 /* We have to honor our promise about union type punning
2672 and also support arbitrary overlaps with
2673 -fno-strict-aliasing. So simply resort to alignment to
2674 rule out overlaps. Do this check last because it is
2675 quite expensive compared to the hash-lookup above. */
2676 && multiple_p (get_object_alignment
2677 (ref->ref ? ref->ref : data->orig_ref.ref),
2678 ref->size)
2679 && multiple_p (get_object_alignment (lhs), ref->size))
2680 return res;
2684 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2685 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2686 && gimple_call_num_args (def_stmt) <= 4)
2688 /* For builtin calls valueize its arguments and call the
2689 alias oracle again. Valueization may improve points-to
2690 info of pointers and constify size and position arguments.
2691 Originally this was motivated by PR61034 which has
2692 conditional calls to free falsely clobbering ref because
2693 of imprecise points-to info of the argument. */
2694 tree oldargs[4];
2695 bool valueized_anything = false;
2696 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2698 oldargs[i] = gimple_call_arg (def_stmt, i);
2699 tree val = vn_valueize (oldargs[i]);
2700 if (val != oldargs[i])
2702 gimple_call_set_arg (def_stmt, i, val);
2703 valueized_anything = true;
2706 if (valueized_anything)
2708 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2709 ref, data->tbaa_p);
2710 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2711 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2712 if (!res)
2714 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2715 return NULL;
2720 if (*disambiguate_only > TR_TRANSLATE)
2721 return (void *)-1;
2723 /* If we cannot constrain the size of the reference we cannot
2724 test if anything kills it. */
2725 if (!ref->max_size_known_p ())
2726 return (void *)-1;
2728 poly_int64 offset = ref->offset;
2729 poly_int64 maxsize = ref->max_size;
2731 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2732 from that definition.
2733 1) Memset. */
2734 if (is_gimple_reg_type (vr->type)
2735 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2736 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2737 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2738 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2739 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2740 && CHAR_BIT == 8
2741 && BITS_PER_UNIT == 8
2742 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2743 && offset.is_constant (&offseti)
2744 && ref->size.is_constant (&sizei)
2745 && (offseti % BITS_PER_UNIT == 0
2746 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2747 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2748 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2749 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2750 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2751 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2753 tree base2;
2754 poly_int64 offset2, size2, maxsize2;
2755 bool reverse;
2756 tree ref2 = gimple_call_arg (def_stmt, 0);
2757 if (TREE_CODE (ref2) == SSA_NAME)
2759 ref2 = SSA_VAL (ref2);
2760 if (TREE_CODE (ref2) == SSA_NAME
2761 && (TREE_CODE (base) != MEM_REF
2762 || TREE_OPERAND (base, 0) != ref2))
2764 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2765 if (gimple_assign_single_p (def_stmt)
2766 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2767 ref2 = gimple_assign_rhs1 (def_stmt);
2770 if (TREE_CODE (ref2) == ADDR_EXPR)
2772 ref2 = TREE_OPERAND (ref2, 0);
2773 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2774 &reverse);
2775 if (!known_size_p (maxsize2)
2776 || !known_eq (maxsize2, size2)
2777 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2778 return (void *)-1;
2780 else if (TREE_CODE (ref2) == SSA_NAME)
2782 poly_int64 soff;
2783 if (TREE_CODE (base) != MEM_REF
2784 || !(mem_ref_offset (base)
2785 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2786 return (void *)-1;
2787 offset += soff;
2788 offset2 = 0;
2789 if (TREE_OPERAND (base, 0) != ref2)
2791 gimple *def = SSA_NAME_DEF_STMT (ref2);
2792 if (is_gimple_assign (def)
2793 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2794 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2795 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2797 tree rhs2 = gimple_assign_rhs2 (def);
2798 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2799 SIGNED)
2800 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2801 return (void *)-1;
2802 ref2 = gimple_assign_rhs1 (def);
2803 if (TREE_CODE (ref2) == SSA_NAME)
2804 ref2 = SSA_VAL (ref2);
2806 else
2807 return (void *)-1;
2810 else
2811 return (void *)-1;
2812 tree len = gimple_call_arg (def_stmt, 2);
2813 HOST_WIDE_INT leni, offset2i;
2814 if (TREE_CODE (len) == SSA_NAME)
2815 len = SSA_VAL (len);
2816 /* Sometimes the above trickery is smarter than alias analysis. Take
2817 advantage of that. */
2818 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2819 (wi::to_poly_offset (len)
2820 << LOG2_BITS_PER_UNIT)))
2821 return NULL;
2822 if (data->partial_defs.is_empty ()
2823 && known_subrange_p (offset, maxsize, offset2,
2824 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2826 tree val;
2827 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2828 val = build_zero_cst (vr->type);
2829 else if (INTEGRAL_TYPE_P (vr->type)
2830 && known_eq (ref->size, 8)
2831 && offseti % BITS_PER_UNIT == 0)
2833 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2834 vr->type, gimple_call_arg (def_stmt, 1));
2835 val = vn_nary_build_or_lookup (&res_op);
2836 if (!val
2837 || (TREE_CODE (val) == SSA_NAME
2838 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2839 return (void *)-1;
2841 else
2843 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2844 if (INTEGRAL_TYPE_P (vr->type))
2845 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2846 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2847 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2848 buflen);
2849 if (BYTES_BIG_ENDIAN)
2851 unsigned int amnt
2852 = (((unsigned HOST_WIDE_INT) offseti + sizei)
2853 % BITS_PER_UNIT);
2854 if (amnt)
2856 shift_bytes_in_array_right (buf, buflen,
2857 BITS_PER_UNIT - amnt);
2858 buf++;
2859 buflen--;
2862 else if (offseti % BITS_PER_UNIT != 0)
2864 unsigned int amnt
2865 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2866 % BITS_PER_UNIT);
2867 shift_bytes_in_array_left (buf, buflen, amnt);
2868 buf++;
2869 buflen--;
2871 val = native_interpret_expr (vr->type, buf, buflen);
2872 if (!val)
2873 return (void *)-1;
2875 return data->finish (0, 0, val);
2877 /* For now handle clearing memory with partial defs. */
2878 else if (known_eq (ref->size, maxsize)
2879 && integer_zerop (gimple_call_arg (def_stmt, 1))
2880 && tree_fits_poly_int64_p (len)
2881 && tree_to_poly_int64 (len).is_constant (&leni)
2882 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2883 && offset.is_constant (&offseti)
2884 && offset2.is_constant (&offset2i)
2885 && maxsize.is_constant (&maxsizei)
2886 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2887 leni << LOG2_BITS_PER_UNIT))
2889 pd_data pd;
2890 pd.rhs = build_constructor (NULL_TREE, NULL);
2891 pd.offset = offset2i;
2892 pd.size = leni << LOG2_BITS_PER_UNIT;
2893 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2897 /* 2) Assignment from an empty CONSTRUCTOR. */
2898 else if (is_gimple_reg_type (vr->type)
2899 && gimple_assign_single_p (def_stmt)
2900 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2901 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2903 tree base2;
2904 poly_int64 offset2, size2, maxsize2;
2905 HOST_WIDE_INT offset2i, size2i;
2906 gcc_assert (lhs_ref_ok);
2907 base2 = ao_ref_base (&lhs_ref);
2908 offset2 = lhs_ref.offset;
2909 size2 = lhs_ref.size;
2910 maxsize2 = lhs_ref.max_size;
2911 if (known_size_p (maxsize2)
2912 && known_eq (maxsize2, size2)
2913 && adjust_offsets_for_equal_base_address (base, &offset,
2914 base2, &offset2))
2916 if (data->partial_defs.is_empty ()
2917 && known_subrange_p (offset, maxsize, offset2, size2))
2919 /* While technically undefined behavior do not optimize
2920 a full read from a clobber. */
2921 if (gimple_clobber_p (def_stmt))
2922 return (void *)-1;
2923 tree val = build_zero_cst (vr->type);
2924 return data->finish (ao_ref_alias_set (&lhs_ref),
2925 ao_ref_base_alias_set (&lhs_ref), val);
2927 else if (known_eq (ref->size, maxsize)
2928 && maxsize.is_constant (&maxsizei)
2929 && offset.is_constant (&offseti)
2930 && offset2.is_constant (&offset2i)
2931 && size2.is_constant (&size2i)
2932 && ranges_known_overlap_p (offseti, maxsizei,
2933 offset2i, size2i))
2935 /* Let clobbers be consumed by the partial-def tracker
2936 which can choose to ignore them if they are shadowed
2937 by a later def. */
2938 pd_data pd;
2939 pd.rhs = gimple_assign_rhs1 (def_stmt);
2940 pd.offset = offset2i;
2941 pd.size = size2i;
2942 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2943 ao_ref_base_alias_set (&lhs_ref),
2944 offseti, maxsizei);
2949 /* 3) Assignment from a constant. We can use folds native encode/interpret
2950 routines to extract the assigned bits. */
2951 else if (known_eq (ref->size, maxsize)
2952 && is_gimple_reg_type (vr->type)
2953 && !reverse_storage_order_for_component_p (vr->operands)
2954 && !contains_storage_order_barrier_p (vr->operands)
2955 && gimple_assign_single_p (def_stmt)
2956 && CHAR_BIT == 8
2957 && BITS_PER_UNIT == 8
2958 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2959 /* native_encode and native_decode operate on arrays of bytes
2960 and so fundamentally need a compile-time size and offset. */
2961 && maxsize.is_constant (&maxsizei)
2962 && offset.is_constant (&offseti)
2963 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2964 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2965 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2967 tree lhs = gimple_assign_lhs (def_stmt);
2968 tree base2;
2969 poly_int64 offset2, size2, maxsize2;
2970 HOST_WIDE_INT offset2i, size2i;
2971 bool reverse;
2972 gcc_assert (lhs_ref_ok);
2973 base2 = ao_ref_base (&lhs_ref);
2974 offset2 = lhs_ref.offset;
2975 size2 = lhs_ref.size;
2976 maxsize2 = lhs_ref.max_size;
2977 reverse = reverse_storage_order_for_component_p (lhs);
2978 if (base2
2979 && !reverse
2980 && !storage_order_barrier_p (lhs)
2981 && known_eq (maxsize2, size2)
2982 && adjust_offsets_for_equal_base_address (base, &offset,
2983 base2, &offset2)
2984 && offset.is_constant (&offseti)
2985 && offset2.is_constant (&offset2i)
2986 && size2.is_constant (&size2i))
2988 if (data->partial_defs.is_empty ()
2989 && known_subrange_p (offseti, maxsizei, offset2, size2))
2991 /* We support up to 512-bit values (for V8DFmode). */
2992 unsigned char buffer[65];
2993 int len;
2995 tree rhs = gimple_assign_rhs1 (def_stmt);
2996 if (TREE_CODE (rhs) == SSA_NAME)
2997 rhs = SSA_VAL (rhs);
2998 len = native_encode_expr (rhs,
2999 buffer, sizeof (buffer) - 1,
3000 (offseti - offset2i) / BITS_PER_UNIT);
3001 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3003 tree type = vr->type;
3004 unsigned char *buf = buffer;
3005 unsigned int amnt = 0;
3006 /* Make sure to interpret in a type that has a range
3007 covering the whole access size. */
3008 if (INTEGRAL_TYPE_P (vr->type)
3009 && maxsizei != TYPE_PRECISION (vr->type))
3010 type = build_nonstandard_integer_type (maxsizei,
3011 TYPE_UNSIGNED (type));
3012 if (BYTES_BIG_ENDIAN)
3014 /* For big-endian native_encode_expr stored the rhs
3015 such that the LSB of it is the LSB of buffer[len - 1].
3016 That bit is stored into memory at position
3017 offset2 + size2 - 1, i.e. in byte
3018 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3019 E.g. for offset2 1 and size2 14, rhs -1 and memory
3020 previously cleared that is:
3022 01111111|11111110
3023 Now, if we want to extract offset 2 and size 12 from
3024 it using native_interpret_expr (which actually works
3025 for integral bitfield types in terms of byte size of
3026 the mode), the native_encode_expr stored the value
3027 into buffer as
3028 XX111111|11111111
3029 and returned len 2 (the X bits are outside of
3030 precision).
3031 Let sz be maxsize / BITS_PER_UNIT if not extracting
3032 a bitfield, and GET_MODE_SIZE otherwise.
3033 We need to align the LSB of the value we want to
3034 extract as the LSB of buf[sz - 1].
3035 The LSB from memory we need to read is at position
3036 offset + maxsize - 1. */
3037 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3038 if (INTEGRAL_TYPE_P (type))
3039 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3040 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3041 - offseti - maxsizei) % BITS_PER_UNIT;
3042 if (amnt)
3043 shift_bytes_in_array_right (buffer, len, amnt);
3044 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3045 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3046 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3047 len = 0;
3048 else
3050 buf = buffer + len - sz - amnt;
3051 len -= (buf - buffer);
3054 else
3056 amnt = ((unsigned HOST_WIDE_INT) offset2i
3057 - offseti) % BITS_PER_UNIT;
3058 if (amnt)
3060 buffer[len] = 0;
3061 shift_bytes_in_array_left (buffer, len + 1, amnt);
3062 buf = buffer + 1;
3065 tree val = native_interpret_expr (type, buf, len);
3066 /* If we chop off bits because the types precision doesn't
3067 match the memory access size this is ok when optimizing
3068 reads but not when called from the DSE code during
3069 elimination. */
3070 if (val
3071 && type != vr->type)
3073 if (! int_fits_type_p (val, vr->type))
3074 val = NULL_TREE;
3075 else
3076 val = fold_convert (vr->type, val);
3079 if (val)
3080 return data->finish (ao_ref_alias_set (&lhs_ref),
3081 ao_ref_base_alias_set (&lhs_ref), val);
3084 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3085 size2i))
3087 pd_data pd;
3088 tree rhs = gimple_assign_rhs1 (def_stmt);
3089 if (TREE_CODE (rhs) == SSA_NAME)
3090 rhs = SSA_VAL (rhs);
3091 pd.rhs = rhs;
3092 pd.offset = offset2i;
3093 pd.size = size2i;
3094 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3095 ao_ref_base_alias_set (&lhs_ref),
3096 offseti, maxsizei);
3101 /* 4) Assignment from an SSA name which definition we may be able
3102 to access pieces from or we can combine to a larger entity. */
3103 else if (known_eq (ref->size, maxsize)
3104 && is_gimple_reg_type (vr->type)
3105 && !reverse_storage_order_for_component_p (vr->operands)
3106 && !contains_storage_order_barrier_p (vr->operands)
3107 && gimple_assign_single_p (def_stmt)
3108 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3110 tree lhs = gimple_assign_lhs (def_stmt);
3111 tree base2;
3112 poly_int64 offset2, size2, maxsize2;
3113 HOST_WIDE_INT offset2i, size2i, offseti;
3114 bool reverse;
3115 gcc_assert (lhs_ref_ok);
3116 base2 = ao_ref_base (&lhs_ref);
3117 offset2 = lhs_ref.offset;
3118 size2 = lhs_ref.size;
3119 maxsize2 = lhs_ref.max_size;
3120 reverse = reverse_storage_order_for_component_p (lhs);
3121 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3122 if (!reverse
3123 && !storage_order_barrier_p (lhs)
3124 && known_size_p (maxsize2)
3125 && known_eq (maxsize2, size2)
3126 && adjust_offsets_for_equal_base_address (base, &offset,
3127 base2, &offset2))
3129 if (data->partial_defs.is_empty ()
3130 && known_subrange_p (offset, maxsize, offset2, size2)
3131 /* ??? We can't handle bitfield precision extracts without
3132 either using an alternate type for the BIT_FIELD_REF and
3133 then doing a conversion or possibly adjusting the offset
3134 according to endianness. */
3135 && (! INTEGRAL_TYPE_P (vr->type)
3136 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3137 && multiple_p (ref->size, BITS_PER_UNIT))
3139 tree val = NULL_TREE;
3140 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3141 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3143 gimple_match_op op (gimple_match_cond::UNCOND,
3144 BIT_FIELD_REF, vr->type,
3145 SSA_VAL (def_rhs),
3146 bitsize_int (ref->size),
3147 bitsize_int (offset - offset2));
3148 val = vn_nary_build_or_lookup (&op);
3150 else if (known_eq (ref->size, size2))
3152 gimple_match_op op (gimple_match_cond::UNCOND,
3153 VIEW_CONVERT_EXPR, vr->type,
3154 SSA_VAL (def_rhs));
3155 val = vn_nary_build_or_lookup (&op);
3157 if (val
3158 && (TREE_CODE (val) != SSA_NAME
3159 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3160 return data->finish (ao_ref_alias_set (&lhs_ref),
3161 ao_ref_base_alias_set (&lhs_ref), val);
3163 else if (maxsize.is_constant (&maxsizei)
3164 && offset.is_constant (&offseti)
3165 && offset2.is_constant (&offset2i)
3166 && size2.is_constant (&size2i)
3167 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3169 pd_data pd;
3170 pd.rhs = SSA_VAL (def_rhs);
3171 pd.offset = offset2i;
3172 pd.size = size2i;
3173 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3174 ao_ref_base_alias_set (&lhs_ref),
3175 offseti, maxsizei);
3180 /* 5) For aggregate copies translate the reference through them if
3181 the copy kills ref. */
3182 else if (data->vn_walk_kind == VN_WALKREWRITE
3183 && gimple_assign_single_p (def_stmt)
3184 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3185 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3186 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3188 tree base2;
3189 int i, j, k;
3190 auto_vec<vn_reference_op_s> rhs;
3191 vn_reference_op_t vro;
3192 ao_ref r;
3194 gcc_assert (lhs_ref_ok);
3196 /* See if the assignment kills REF. */
3197 base2 = ao_ref_base (&lhs_ref);
3198 if (!lhs_ref.max_size_known_p ()
3199 || (base != base2
3200 && (TREE_CODE (base) != MEM_REF
3201 || TREE_CODE (base2) != MEM_REF
3202 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3203 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3204 TREE_OPERAND (base2, 1))))
3205 || !stmt_kills_ref_p (def_stmt, ref))
3206 return (void *)-1;
3208 /* Find the common base of ref and the lhs. lhs_ops already
3209 contains valueized operands for the lhs. */
3210 i = vr->operands.length () - 1;
3211 j = lhs_ops.length () - 1;
3212 while (j >= 0 && i >= 0
3213 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3215 i--;
3216 j--;
3219 /* ??? The innermost op should always be a MEM_REF and we already
3220 checked that the assignment to the lhs kills vr. Thus for
3221 aggregate copies using char[] types the vn_reference_op_eq
3222 may fail when comparing types for compatibility. But we really
3223 don't care here - further lookups with the rewritten operands
3224 will simply fail if we messed up types too badly. */
3225 poly_int64 extra_off = 0;
3226 if (j == 0 && i >= 0
3227 && lhs_ops[0].opcode == MEM_REF
3228 && maybe_ne (lhs_ops[0].off, -1))
3230 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3231 i--, j--;
3232 else if (vr->operands[i].opcode == MEM_REF
3233 && maybe_ne (vr->operands[i].off, -1))
3235 extra_off = vr->operands[i].off - lhs_ops[0].off;
3236 i--, j--;
3240 /* i now points to the first additional op.
3241 ??? LHS may not be completely contained in VR, one or more
3242 VIEW_CONVERT_EXPRs could be in its way. We could at least
3243 try handling outermost VIEW_CONVERT_EXPRs. */
3244 if (j != -1)
3245 return (void *)-1;
3247 /* Punt if the additional ops contain a storage order barrier. */
3248 for (k = i; k >= 0; k--)
3250 vro = &vr->operands[k];
3251 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3252 return (void *)-1;
3255 /* Now re-write REF to be based on the rhs of the assignment. */
3256 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3257 copy_reference_ops_from_ref (rhs1, &rhs);
3259 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3260 if (maybe_ne (extra_off, 0))
3262 if (rhs.length () < 2)
3263 return (void *)-1;
3264 int ix = rhs.length () - 2;
3265 if (rhs[ix].opcode != MEM_REF
3266 || known_eq (rhs[ix].off, -1))
3267 return (void *)-1;
3268 rhs[ix].off += extra_off;
3269 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3270 build_int_cst (TREE_TYPE (rhs[ix].op0),
3271 extra_off));
3274 /* Save the operands since we need to use the original ones for
3275 the hash entry we use. */
3276 if (!data->saved_operands.exists ())
3277 data->saved_operands = vr->operands.copy ();
3279 /* We need to pre-pend vr->operands[0..i] to rhs. */
3280 vec<vn_reference_op_s> old = vr->operands;
3281 if (i + 1 + rhs.length () > vr->operands.length ())
3282 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3283 else
3284 vr->operands.truncate (i + 1 + rhs.length ());
3285 FOR_EACH_VEC_ELT (rhs, j, vro)
3286 vr->operands[i + 1 + j] = *vro;
3287 valueize_refs (&vr->operands);
3288 if (old == shared_lookup_references)
3289 shared_lookup_references = vr->operands;
3290 vr->hashcode = vn_reference_compute_hash (vr);
3292 /* Try folding the new reference to a constant. */
3293 tree val = fully_constant_vn_reference_p (vr);
3294 if (val)
3296 if (data->partial_defs.is_empty ())
3297 return data->finish (ao_ref_alias_set (&lhs_ref),
3298 ao_ref_base_alias_set (&lhs_ref), val);
3299 /* This is the only interesting case for partial-def handling
3300 coming from targets that like to gimplify init-ctors as
3301 aggregate copies from constant data like aarch64 for
3302 PR83518. */
3303 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3305 pd_data pd;
3306 pd.rhs = val;
3307 pd.offset = 0;
3308 pd.size = maxsizei;
3309 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3310 ao_ref_base_alias_set (&lhs_ref),
3311 0, maxsizei);
3315 /* Continuing with partial defs isn't easily possible here, we
3316 have to find a full def from further lookups from here. Probably
3317 not worth the special-casing everywhere. */
3318 if (!data->partial_defs.is_empty ())
3319 return (void *)-1;
3321 /* Adjust *ref from the new operands. */
3322 ao_ref rhs1_ref;
3323 ao_ref_init (&rhs1_ref, rhs1);
3324 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3325 ao_ref_base_alias_set (&rhs1_ref),
3326 vr->type, vr->operands))
3327 return (void *)-1;
3328 /* This can happen with bitfields. */
3329 if (maybe_ne (ref->size, r.size))
3331 /* If the access lacks some subsetting simply apply that by
3332 shortening it. That in the end can only be successful
3333 if we can pun the lookup result which in turn requires
3334 exact offsets. */
3335 if (known_eq (r.size, r.max_size)
3336 && known_lt (ref->size, r.size))
3337 r.size = r.max_size = ref->size;
3338 else
3339 return (void *)-1;
3341 *ref = r;
3343 /* Do not update last seen VUSE after translating. */
3344 data->last_vuse_ptr = NULL;
3345 /* Invalidate the original access path since it now contains
3346 the wrong base. */
3347 data->orig_ref.ref = NULL_TREE;
3348 /* Use the alias-set of this LHS for recording an eventual result. */
3349 if (data->first_set == -2)
3351 data->first_set = ao_ref_alias_set (&lhs_ref);
3352 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3355 /* Keep looking for the adjusted *REF / VR pair. */
3356 return NULL;
3359 /* 6) For memcpy copies translate the reference through them if the copy
3360 kills ref. But we cannot (easily) do this translation if the memcpy is
3361 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3362 can modify the storage order of objects (see storage_order_barrier_p). */
3363 else if (data->vn_walk_kind == VN_WALKREWRITE
3364 && is_gimple_reg_type (vr->type)
3365 /* ??? Handle BCOPY as well. */
3366 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3367 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3368 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3369 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3370 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3371 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3372 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3373 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3374 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3375 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3376 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3377 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3378 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3379 &copy_size)))
3380 /* Handling this is more complicated, give up for now. */
3381 && data->partial_defs.is_empty ())
3383 tree lhs, rhs;
3384 ao_ref r;
3385 poly_int64 rhs_offset, lhs_offset;
3386 vn_reference_op_s op;
3387 poly_uint64 mem_offset;
3388 poly_int64 at, byte_maxsize;
3390 /* Only handle non-variable, addressable refs. */
3391 if (maybe_ne (ref->size, maxsize)
3392 || !multiple_p (offset, BITS_PER_UNIT, &at)
3393 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3394 return (void *)-1;
3396 /* Extract a pointer base and an offset for the destination. */
3397 lhs = gimple_call_arg (def_stmt, 0);
3398 lhs_offset = 0;
3399 if (TREE_CODE (lhs) == SSA_NAME)
3401 lhs = vn_valueize (lhs);
3402 if (TREE_CODE (lhs) == SSA_NAME)
3404 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3405 if (gimple_assign_single_p (def_stmt)
3406 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3407 lhs = gimple_assign_rhs1 (def_stmt);
3410 if (TREE_CODE (lhs) == ADDR_EXPR)
3412 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3413 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3414 return (void *)-1;
3415 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3416 &lhs_offset);
3417 if (!tem)
3418 return (void *)-1;
3419 if (TREE_CODE (tem) == MEM_REF
3420 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3422 lhs = TREE_OPERAND (tem, 0);
3423 if (TREE_CODE (lhs) == SSA_NAME)
3424 lhs = vn_valueize (lhs);
3425 lhs_offset += mem_offset;
3427 else if (DECL_P (tem))
3428 lhs = build_fold_addr_expr (tem);
3429 else
3430 return (void *)-1;
3432 if (TREE_CODE (lhs) != SSA_NAME
3433 && TREE_CODE (lhs) != ADDR_EXPR)
3434 return (void *)-1;
3436 /* Extract a pointer base and an offset for the source. */
3437 rhs = gimple_call_arg (def_stmt, 1);
3438 rhs_offset = 0;
3439 if (TREE_CODE (rhs) == SSA_NAME)
3440 rhs = vn_valueize (rhs);
3441 if (TREE_CODE (rhs) == ADDR_EXPR)
3443 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3444 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3445 return (void *)-1;
3446 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3447 &rhs_offset);
3448 if (!tem)
3449 return (void *)-1;
3450 if (TREE_CODE (tem) == MEM_REF
3451 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3453 rhs = TREE_OPERAND (tem, 0);
3454 rhs_offset += mem_offset;
3456 else if (DECL_P (tem)
3457 || TREE_CODE (tem) == STRING_CST)
3458 rhs = build_fold_addr_expr (tem);
3459 else
3460 return (void *)-1;
3462 if (TREE_CODE (rhs) == SSA_NAME)
3463 rhs = SSA_VAL (rhs);
3464 else if (TREE_CODE (rhs) != ADDR_EXPR)
3465 return (void *)-1;
3467 /* The bases of the destination and the references have to agree. */
3468 if (TREE_CODE (base) == MEM_REF)
3470 if (TREE_OPERAND (base, 0) != lhs
3471 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3472 return (void *) -1;
3473 at += mem_offset;
3475 else if (!DECL_P (base)
3476 || TREE_CODE (lhs) != ADDR_EXPR
3477 || TREE_OPERAND (lhs, 0) != base)
3478 return (void *)-1;
3480 /* If the access is completely outside of the memcpy destination
3481 area there is no aliasing. */
3482 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3483 return NULL;
3484 /* And the access has to be contained within the memcpy destination. */
3485 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3486 return (void *)-1;
3488 /* Save the operands since we need to use the original ones for
3489 the hash entry we use. */
3490 if (!data->saved_operands.exists ())
3491 data->saved_operands = vr->operands.copy ();
3493 /* Make room for 2 operands in the new reference. */
3494 if (vr->operands.length () < 2)
3496 vec<vn_reference_op_s> old = vr->operands;
3497 vr->operands.safe_grow_cleared (2, true);
3498 if (old == shared_lookup_references)
3499 shared_lookup_references = vr->operands;
3501 else
3502 vr->operands.truncate (2);
3504 /* The looked-through reference is a simple MEM_REF. */
3505 memset (&op, 0, sizeof (op));
3506 op.type = vr->type;
3507 op.opcode = MEM_REF;
3508 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3509 op.off = at - lhs_offset + rhs_offset;
3510 vr->operands[0] = op;
3511 op.type = TREE_TYPE (rhs);
3512 op.opcode = TREE_CODE (rhs);
3513 op.op0 = rhs;
3514 op.off = -1;
3515 vr->operands[1] = op;
3516 vr->hashcode = vn_reference_compute_hash (vr);
3518 /* Try folding the new reference to a constant. */
3519 tree val = fully_constant_vn_reference_p (vr);
3520 if (val)
3521 return data->finish (0, 0, val);
3523 /* Adjust *ref from the new operands. */
3524 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3525 return (void *)-1;
3526 /* This can happen with bitfields. */
3527 if (maybe_ne (ref->size, r.size))
3528 return (void *)-1;
3529 *ref = r;
3531 /* Do not update last seen VUSE after translating. */
3532 data->last_vuse_ptr = NULL;
3533 /* Invalidate the original access path since it now contains
3534 the wrong base. */
3535 data->orig_ref.ref = NULL_TREE;
3536 /* Use the alias-set of this stmt for recording an eventual result. */
3537 if (data->first_set == -2)
3539 data->first_set = 0;
3540 data->first_base_set = 0;
3543 /* Keep looking for the adjusted *REF / VR pair. */
3544 return NULL;
3547 /* Bail out and stop walking. */
3548 return (void *)-1;
3551 /* Return a reference op vector from OP that can be used for
3552 vn_reference_lookup_pieces. The caller is responsible for releasing
3553 the vector. */
3555 vec<vn_reference_op_s>
3556 vn_reference_operands_for_lookup (tree op)
3558 bool valueized;
3559 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3562 /* Lookup a reference operation by it's parts, in the current hash table.
3563 Returns the resulting value number if it exists in the hash table,
3564 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3565 vn_reference_t stored in the hashtable if something is found. */
3567 tree
3568 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3569 alias_set_type base_set, tree type,
3570 vec<vn_reference_op_s> operands,
3571 vn_reference_t *vnresult, vn_lookup_kind kind)
3573 struct vn_reference_s vr1;
3574 vn_reference_t tmp;
3575 tree cst;
3577 if (!vnresult)
3578 vnresult = &tmp;
3579 *vnresult = NULL;
3581 vr1.vuse = vuse_ssa_val (vuse);
3582 shared_lookup_references.truncate (0);
3583 shared_lookup_references.safe_grow (operands.length (), true);
3584 memcpy (shared_lookup_references.address (),
3585 operands.address (),
3586 sizeof (vn_reference_op_s)
3587 * operands.length ());
3588 bool valueized_p;
3589 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3590 vr1.operands = shared_lookup_references;
3591 vr1.type = type;
3592 vr1.set = set;
3593 vr1.base_set = base_set;
3594 vr1.hashcode = vn_reference_compute_hash (&vr1);
3595 if ((cst = fully_constant_vn_reference_p (&vr1)))
3596 return cst;
3598 vn_reference_lookup_1 (&vr1, vnresult);
3599 if (!*vnresult
3600 && kind != VN_NOWALK
3601 && vr1.vuse)
3603 ao_ref r;
3604 unsigned limit = param_sccvn_max_alias_queries_per_access;
3605 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3606 vec<vn_reference_op_s> ops_for_ref;
3607 if (!valueized_p)
3608 ops_for_ref = vr1.operands;
3609 else
3611 /* For ao_ref_from_mem we have to ensure only available SSA names
3612 end up in base and the only convenient way to make this work
3613 for PRE is to re-valueize with that in mind. */
3614 ops_for_ref.create (operands.length ());
3615 ops_for_ref.quick_grow (operands.length ());
3616 memcpy (ops_for_ref.address (),
3617 operands.address (),
3618 sizeof (vn_reference_op_s)
3619 * operands.length ());
3620 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3622 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3623 ops_for_ref))
3624 *vnresult
3625 = ((vn_reference_t)
3626 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3627 vn_reference_lookup_3, vuse_valueize,
3628 limit, &data));
3629 if (ops_for_ref != shared_lookup_references)
3630 ops_for_ref.release ();
3631 gcc_checking_assert (vr1.operands == shared_lookup_references);
3634 if (*vnresult)
3635 return (*vnresult)->result;
3637 return NULL_TREE;
3640 /* Lookup OP in the current hash table, and return the resulting value
3641 number if it exists in the hash table. Return NULL_TREE if it does
3642 not exist in the hash table or if the result field of the structure
3643 was NULL.. VNRESULT will be filled in with the vn_reference_t
3644 stored in the hashtable if one exists. When TBAA_P is false assume
3645 we are looking up a store and treat it as having alias-set zero.
3646 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3647 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3648 load is bitwise anded with MASK and so we are only interested in a subset
3649 of the bits and can ignore if the other bits are uninitialized or
3650 not initialized with constants. */
3652 tree
3653 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3654 vn_reference_t *vnresult, bool tbaa_p,
3655 tree *last_vuse_ptr, tree mask)
3657 vec<vn_reference_op_s> operands;
3658 struct vn_reference_s vr1;
3659 bool valueized_anything;
3661 if (vnresult)
3662 *vnresult = NULL;
3664 vr1.vuse = vuse_ssa_val (vuse);
3665 vr1.operands = operands
3666 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3667 vr1.type = TREE_TYPE (op);
3668 ao_ref op_ref;
3669 ao_ref_init (&op_ref, op);
3670 vr1.set = ao_ref_alias_set (&op_ref);
3671 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3672 vr1.hashcode = vn_reference_compute_hash (&vr1);
3673 if (mask == NULL_TREE)
3674 if (tree cst = fully_constant_vn_reference_p (&vr1))
3675 return cst;
3677 if (kind != VN_NOWALK && vr1.vuse)
3679 vn_reference_t wvnresult;
3680 ao_ref r;
3681 unsigned limit = param_sccvn_max_alias_queries_per_access;
3682 auto_vec<vn_reference_op_s> ops_for_ref;
3683 if (valueized_anything)
3685 copy_reference_ops_from_ref (op, &ops_for_ref);
3686 bool tem;
3687 valueize_refs_1 (&ops_for_ref, &tem, true);
3689 /* Make sure to use a valueized reference if we valueized anything.
3690 Otherwise preserve the full reference for advanced TBAA. */
3691 if (!valueized_anything
3692 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3693 vr1.type, ops_for_ref))
3694 ao_ref_init (&r, op);
3695 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3696 last_vuse_ptr, kind, tbaa_p, mask);
3698 wvnresult
3699 = ((vn_reference_t)
3700 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3701 vn_reference_lookup_3, vuse_valueize, limit,
3702 &data));
3703 gcc_checking_assert (vr1.operands == shared_lookup_references);
3704 if (wvnresult)
3706 gcc_assert (mask == NULL_TREE);
3707 if (vnresult)
3708 *vnresult = wvnresult;
3709 return wvnresult->result;
3711 else if (mask)
3712 return data.masked_result;
3714 return NULL_TREE;
3717 if (last_vuse_ptr)
3718 *last_vuse_ptr = vr1.vuse;
3719 if (mask)
3720 return NULL_TREE;
3721 return vn_reference_lookup_1 (&vr1, vnresult);
3724 /* Lookup CALL in the current hash table and return the entry in
3725 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3727 void
3728 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3729 vn_reference_t vr)
3731 if (vnresult)
3732 *vnresult = NULL;
3734 tree vuse = gimple_vuse (call);
3736 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3737 vr->operands = valueize_shared_reference_ops_from_call (call);
3738 tree lhs = gimple_call_lhs (call);
3739 /* For non-SSA return values the referece ops contain the LHS. */
3740 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
3741 ? TREE_TYPE (lhs) : NULL_TREE);
3742 vr->punned = false;
3743 vr->set = 0;
3744 vr->base_set = 0;
3745 vr->hashcode = vn_reference_compute_hash (vr);
3746 vn_reference_lookup_1 (vr, vnresult);
3749 /* Insert OP into the current hash table with a value number of RESULT. */
3751 static void
3752 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3754 vn_reference_s **slot;
3755 vn_reference_t vr1;
3756 bool tem;
3758 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3759 if (TREE_CODE (result) == SSA_NAME)
3760 vr1->value_id = VN_INFO (result)->value_id;
3761 else
3762 vr1->value_id = get_or_alloc_constant_value_id (result);
3763 vr1->vuse = vuse_ssa_val (vuse);
3764 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3765 vr1->type = TREE_TYPE (op);
3766 vr1->punned = false;
3767 ao_ref op_ref;
3768 ao_ref_init (&op_ref, op);
3769 vr1->set = ao_ref_alias_set (&op_ref);
3770 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3771 vr1->hashcode = vn_reference_compute_hash (vr1);
3772 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3773 vr1->result_vdef = vdef;
3775 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3776 INSERT);
3778 /* Because IL walking on reference lookup can end up visiting
3779 a def that is only to be visited later in iteration order
3780 when we are about to make an irreducible region reducible
3781 the def can be effectively processed and its ref being inserted
3782 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3783 but save a lookup if we deal with already inserted refs here. */
3784 if (*slot)
3786 /* We cannot assert that we have the same value either because
3787 when disentangling an irreducible region we may end up visiting
3788 a use before the corresponding def. That's a missed optimization
3789 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3790 if (dump_file && (dump_flags & TDF_DETAILS)
3791 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3793 fprintf (dump_file, "Keeping old value ");
3794 print_generic_expr (dump_file, (*slot)->result);
3795 fprintf (dump_file, " because of collision\n");
3797 free_reference (vr1);
3798 obstack_free (&vn_tables_obstack, vr1);
3799 return;
3802 *slot = vr1;
3803 vr1->next = last_inserted_ref;
3804 last_inserted_ref = vr1;
3807 /* Insert a reference by it's pieces into the current hash table with
3808 a value number of RESULT. Return the resulting reference
3809 structure we created. */
3811 vn_reference_t
3812 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3813 alias_set_type base_set, tree type,
3814 vec<vn_reference_op_s> operands,
3815 tree result, unsigned int value_id)
3818 vn_reference_s **slot;
3819 vn_reference_t vr1;
3821 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3822 vr1->value_id = value_id;
3823 vr1->vuse = vuse_ssa_val (vuse);
3824 vr1->operands = operands;
3825 valueize_refs (&vr1->operands);
3826 vr1->type = type;
3827 vr1->punned = false;
3828 vr1->set = set;
3829 vr1->base_set = base_set;
3830 vr1->hashcode = vn_reference_compute_hash (vr1);
3831 if (result && TREE_CODE (result) == SSA_NAME)
3832 result = SSA_VAL (result);
3833 vr1->result = result;
3834 vr1->result_vdef = NULL_TREE;
3836 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3837 INSERT);
3839 /* At this point we should have all the things inserted that we have
3840 seen before, and we should never try inserting something that
3841 already exists. */
3842 gcc_assert (!*slot);
3844 *slot = vr1;
3845 vr1->next = last_inserted_ref;
3846 last_inserted_ref = vr1;
3847 return vr1;
3850 /* Compute and return the hash value for nary operation VBO1. */
3852 static hashval_t
3853 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3855 inchash::hash hstate;
3856 unsigned i;
3858 for (i = 0; i < vno1->length; ++i)
3859 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3860 vno1->op[i] = SSA_VAL (vno1->op[i]);
3862 if (((vno1->length == 2
3863 && commutative_tree_code (vno1->opcode))
3864 || (vno1->length == 3
3865 && commutative_ternary_tree_code (vno1->opcode)))
3866 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3867 std::swap (vno1->op[0], vno1->op[1]);
3868 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3869 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3871 std::swap (vno1->op[0], vno1->op[1]);
3872 vno1->opcode = swap_tree_comparison (vno1->opcode);
3875 hstate.add_int (vno1->opcode);
3876 for (i = 0; i < vno1->length; ++i)
3877 inchash::add_expr (vno1->op[i], hstate);
3879 return hstate.end ();
3882 /* Compare nary operations VNO1 and VNO2 and return true if they are
3883 equivalent. */
3885 bool
3886 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3888 unsigned i;
3890 if (vno1->hashcode != vno2->hashcode)
3891 return false;
3893 if (vno1->length != vno2->length)
3894 return false;
3896 if (vno1->opcode != vno2->opcode
3897 || !types_compatible_p (vno1->type, vno2->type))
3898 return false;
3900 for (i = 0; i < vno1->length; ++i)
3901 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3902 return false;
3904 /* BIT_INSERT_EXPR has an implict operand as the type precision
3905 of op1. Need to check to make sure they are the same. */
3906 if (vno1->opcode == BIT_INSERT_EXPR
3907 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3908 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3909 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3910 return false;
3912 return true;
3915 /* Initialize VNO from the pieces provided. */
3917 static void
3918 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3919 enum tree_code code, tree type, tree *ops)
3921 vno->opcode = code;
3922 vno->length = length;
3923 vno->type = type;
3924 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3927 /* Return the number of operands for a vn_nary ops structure from STMT. */
3929 static unsigned int
3930 vn_nary_length_from_stmt (gimple *stmt)
3932 switch (gimple_assign_rhs_code (stmt))
3934 case REALPART_EXPR:
3935 case IMAGPART_EXPR:
3936 case VIEW_CONVERT_EXPR:
3937 return 1;
3939 case BIT_FIELD_REF:
3940 return 3;
3942 case CONSTRUCTOR:
3943 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3945 default:
3946 return gimple_num_ops (stmt) - 1;
3950 /* Initialize VNO from STMT. */
3952 static void
3953 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
3955 unsigned i;
3957 vno->opcode = gimple_assign_rhs_code (stmt);
3958 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
3959 switch (vno->opcode)
3961 case REALPART_EXPR:
3962 case IMAGPART_EXPR:
3963 case VIEW_CONVERT_EXPR:
3964 vno->length = 1;
3965 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3966 break;
3968 case BIT_FIELD_REF:
3969 vno->length = 3;
3970 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3971 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3972 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3973 break;
3975 case CONSTRUCTOR:
3976 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3977 for (i = 0; i < vno->length; ++i)
3978 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3979 break;
3981 default:
3982 gcc_checking_assert (!gimple_assign_single_p (stmt));
3983 vno->length = gimple_num_ops (stmt) - 1;
3984 for (i = 0; i < vno->length; ++i)
3985 vno->op[i] = gimple_op (stmt, i + 1);
3989 /* Compute the hashcode for VNO and look for it in the hash table;
3990 return the resulting value number if it exists in the hash table.
3991 Return NULL_TREE if it does not exist in the hash table or if the
3992 result field of the operation is NULL. VNRESULT will contain the
3993 vn_nary_op_t from the hashtable if it exists. */
3995 static tree
3996 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3998 vn_nary_op_s **slot;
4000 if (vnresult)
4001 *vnresult = NULL;
4003 vno->hashcode = vn_nary_op_compute_hash (vno);
4004 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4005 if (!slot)
4006 return NULL_TREE;
4007 if (vnresult)
4008 *vnresult = *slot;
4009 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4012 /* Lookup a n-ary operation by its pieces and return the resulting value
4013 number if it exists in the hash table. Return NULL_TREE if it does
4014 not exist in the hash table or if the result field of the operation
4015 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4016 if it exists. */
4018 tree
4019 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4020 tree type, tree *ops, vn_nary_op_t *vnresult)
4022 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4023 sizeof_vn_nary_op (length));
4024 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4025 return vn_nary_op_lookup_1 (vno1, vnresult);
4028 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4029 value number if it exists in the hash table. Return NULL_TREE if
4030 it does not exist in the hash table. VNRESULT will contain the
4031 vn_nary_op_t from the hashtable if it exists. */
4033 tree
4034 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4036 vn_nary_op_t vno1
4037 = XALLOCAVAR (struct vn_nary_op_s,
4038 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4039 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4040 return vn_nary_op_lookup_1 (vno1, vnresult);
4043 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4045 static vn_nary_op_t
4046 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4048 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4051 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4052 obstack. */
4054 static vn_nary_op_t
4055 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4057 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4059 vno1->value_id = value_id;
4060 vno1->length = length;
4061 vno1->predicated_values = 0;
4062 vno1->u.result = result;
4064 return vno1;
4067 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
4068 VNO->HASHCODE first. */
4070 static vn_nary_op_t
4071 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
4072 bool compute_hash)
4074 vn_nary_op_s **slot;
4076 if (compute_hash)
4078 vno->hashcode = vn_nary_op_compute_hash (vno);
4079 gcc_assert (! vno->predicated_values
4080 || (! vno->u.values->next
4081 && vno->u.values->n == 1));
4084 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4085 vno->unwind_to = *slot;
4086 if (*slot)
4088 /* Prefer non-predicated values.
4089 ??? Only if those are constant, otherwise, with constant predicated
4090 value, turn them into predicated values with entry-block validity
4091 (??? but we always find the first valid result currently). */
4092 if ((*slot)->predicated_values
4093 && ! vno->predicated_values)
4095 /* ??? We cannot remove *slot from the unwind stack list.
4096 For the moment we deal with this by skipping not found
4097 entries but this isn't ideal ... */
4098 *slot = vno;
4099 /* ??? Maintain a stack of states we can unwind in
4100 vn_nary_op_s? But how far do we unwind? In reality
4101 we need to push change records somewhere... Or not
4102 unwind vn_nary_op_s and linking them but instead
4103 unwind the results "list", linking that, which also
4104 doesn't move on hashtable resize. */
4105 /* We can also have a ->unwind_to recording *slot there.
4106 That way we can make u.values a fixed size array with
4107 recording the number of entries but of course we then
4108 have always N copies for each unwind_to-state. Or we
4109 make sure to only ever append and each unwinding will
4110 pop off one entry (but how to deal with predicated
4111 replaced with non-predicated here?) */
4112 vno->next = last_inserted_nary;
4113 last_inserted_nary = vno;
4114 return vno;
4116 else if (vno->predicated_values
4117 && ! (*slot)->predicated_values)
4118 return *slot;
4119 else if (vno->predicated_values
4120 && (*slot)->predicated_values)
4122 /* ??? Factor this all into a insert_single_predicated_value
4123 routine. */
4124 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4125 basic_block vno_bb
4126 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4127 vn_pval *nval = vno->u.values;
4128 vn_pval **next = &vno->u.values;
4129 bool found = false;
4130 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4132 if (expressions_equal_p (val->result, nval->result))
4134 found = true;
4135 for (unsigned i = 0; i < val->n; ++i)
4137 basic_block val_bb
4138 = BASIC_BLOCK_FOR_FN (cfun,
4139 val->valid_dominated_by_p[i]);
4140 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4141 /* Value registered with more generic predicate. */
4142 return *slot;
4143 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4144 /* Shouldn't happen, we insert in RPO order. */
4145 gcc_unreachable ();
4147 /* Append value. */
4148 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4149 sizeof (vn_pval)
4150 + val->n * sizeof (int));
4151 (*next)->next = NULL;
4152 (*next)->result = val->result;
4153 (*next)->n = val->n + 1;
4154 memcpy ((*next)->valid_dominated_by_p,
4155 val->valid_dominated_by_p,
4156 val->n * sizeof (int));
4157 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4158 next = &(*next)->next;
4159 if (dump_file && (dump_flags & TDF_DETAILS))
4160 fprintf (dump_file, "Appending predicate to value.\n");
4161 continue;
4163 /* Copy other predicated values. */
4164 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4165 sizeof (vn_pval)
4166 + (val->n-1) * sizeof (int));
4167 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4168 (*next)->next = NULL;
4169 next = &(*next)->next;
4171 if (!found)
4172 *next = nval;
4174 *slot = vno;
4175 vno->next = last_inserted_nary;
4176 last_inserted_nary = vno;
4177 return vno;
4180 /* While we do not want to insert things twice it's awkward to
4181 avoid it in the case where visit_nary_op pattern-matches stuff
4182 and ends up simplifying the replacement to itself. We then
4183 get two inserts, one from visit_nary_op and one from
4184 vn_nary_build_or_lookup.
4185 So allow inserts with the same value number. */
4186 if ((*slot)->u.result == vno->u.result)
4187 return *slot;
4190 /* ??? There's also optimistic vs. previous commited state merging
4191 that is problematic for the case of unwinding. */
4193 /* ??? We should return NULL if we do not use 'vno' and have the
4194 caller release it. */
4195 gcc_assert (!*slot);
4197 *slot = vno;
4198 vno->next = last_inserted_nary;
4199 last_inserted_nary = vno;
4200 return vno;
4203 /* Insert a n-ary operation into the current hash table using it's
4204 pieces. Return the vn_nary_op_t structure we created and put in
4205 the hashtable. */
4207 vn_nary_op_t
4208 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4209 tree type, tree *ops,
4210 tree result, unsigned int value_id)
4212 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4213 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4214 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4217 static vn_nary_op_t
4218 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4219 tree type, tree *ops,
4220 tree result, unsigned int value_id,
4221 edge pred_e)
4223 /* ??? Currently tracking BBs. */
4224 if (! single_pred_p (pred_e->dest))
4226 /* Never record for backedges. */
4227 if (pred_e->flags & EDGE_DFS_BACK)
4228 return NULL;
4229 edge_iterator ei;
4230 edge e;
4231 int cnt = 0;
4232 /* Ignore backedges. */
4233 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4234 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4235 cnt++;
4236 if (cnt != 1)
4237 return NULL;
4239 if (dump_file && (dump_flags & TDF_DETAILS)
4240 /* ??? Fix dumping, but currently we only get comparisons. */
4241 && TREE_CODE_CLASS (code) == tcc_comparison)
4243 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4244 pred_e->dest->index);
4245 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4246 fprintf (dump_file, " %s ", get_tree_code_name (code));
4247 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4248 fprintf (dump_file, " == %s\n",
4249 integer_zerop (result) ? "false" : "true");
4251 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4252 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4253 vno1->predicated_values = 1;
4254 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4255 sizeof (vn_pval));
4256 vno1->u.values->next = NULL;
4257 vno1->u.values->result = result;
4258 vno1->u.values->n = 1;
4259 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4260 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4263 static bool
4264 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4266 static tree
4267 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4269 if (! vno->predicated_values)
4270 return vno->u.result;
4271 for (vn_pval *val = vno->u.values; val; val = val->next)
4272 for (unsigned i = 0; i < val->n; ++i)
4273 /* Do not handle backedge executability optimistically since
4274 when figuring out whether to iterate we do not consider
4275 changed predication. */
4276 if (dominated_by_p_w_unex
4277 (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4278 false))
4279 return val->result;
4280 return NULL_TREE;
4283 /* Insert the rhs of STMT into the current hash table with a value number of
4284 RESULT. */
4286 static vn_nary_op_t
4287 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4289 vn_nary_op_t vno1
4290 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4291 result, VN_INFO (result)->value_id);
4292 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4293 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4296 /* Compute a hashcode for PHI operation VP1 and return it. */
4298 static inline hashval_t
4299 vn_phi_compute_hash (vn_phi_t vp1)
4301 inchash::hash hstate;
4302 tree phi1op;
4303 tree type;
4304 edge e;
4305 edge_iterator ei;
4307 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4308 switch (EDGE_COUNT (vp1->block->preds))
4310 case 1:
4311 break;
4312 case 2:
4313 if (vp1->block->loop_father->header == vp1->block)
4315 else
4316 break;
4317 /* Fallthru. */
4318 default:
4319 hstate.add_int (vp1->block->index);
4322 /* If all PHI arguments are constants we need to distinguish
4323 the PHI node via its type. */
4324 type = vp1->type;
4325 hstate.merge_hash (vn_hash_type (type));
4327 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4329 /* Don't hash backedge values they need to be handled as VN_TOP
4330 for optimistic value-numbering. */
4331 if (e->flags & EDGE_DFS_BACK)
4332 continue;
4334 phi1op = vp1->phiargs[e->dest_idx];
4335 if (phi1op == VN_TOP)
4336 continue;
4337 inchash::add_expr (phi1op, hstate);
4340 return hstate.end ();
4344 /* Return true if COND1 and COND2 represent the same condition, set
4345 *INVERTED_P if one needs to be inverted to make it the same as
4346 the other. */
4348 static bool
4349 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4350 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4352 enum tree_code code1 = gimple_cond_code (cond1);
4353 enum tree_code code2 = gimple_cond_code (cond2);
4355 *inverted_p = false;
4356 if (code1 == code2)
4358 else if (code1 == swap_tree_comparison (code2))
4359 std::swap (lhs2, rhs2);
4360 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4361 *inverted_p = true;
4362 else if (code1 == invert_tree_comparison
4363 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4365 std::swap (lhs2, rhs2);
4366 *inverted_p = true;
4368 else
4369 return false;
4371 return ((expressions_equal_p (lhs1, lhs2)
4372 && expressions_equal_p (rhs1, rhs2))
4373 || (commutative_tree_code (code1)
4374 && expressions_equal_p (lhs1, rhs2)
4375 && expressions_equal_p (rhs1, lhs2)));
4378 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4380 static int
4381 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4383 if (vp1->hashcode != vp2->hashcode)
4384 return false;
4386 if (vp1->block != vp2->block)
4388 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4389 return false;
4391 switch (EDGE_COUNT (vp1->block->preds))
4393 case 1:
4394 /* Single-arg PHIs are just copies. */
4395 break;
4397 case 2:
4399 /* Rule out backedges into the PHI. */
4400 if (vp1->block->loop_father->header == vp1->block
4401 || vp2->block->loop_father->header == vp2->block)
4402 return false;
4404 /* If the PHI nodes do not have compatible types
4405 they are not the same. */
4406 if (!types_compatible_p (vp1->type, vp2->type))
4407 return false;
4409 basic_block idom1
4410 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4411 basic_block idom2
4412 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4413 /* If the immediate dominator end in switch stmts multiple
4414 values may end up in the same PHI arg via intermediate
4415 CFG merges. */
4416 if (EDGE_COUNT (idom1->succs) != 2
4417 || EDGE_COUNT (idom2->succs) != 2)
4418 return false;
4420 /* Verify the controlling stmt is the same. */
4421 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4422 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4423 if (! last1 || ! last2)
4424 return false;
4425 bool inverted_p;
4426 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4427 last2, vp2->cclhs, vp2->ccrhs,
4428 &inverted_p))
4429 return false;
4431 /* Get at true/false controlled edges into the PHI. */
4432 edge te1, te2, fe1, fe2;
4433 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4434 &te1, &fe1)
4435 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4436 &te2, &fe2))
4437 return false;
4439 /* Swap edges if the second condition is the inverted of the
4440 first. */
4441 if (inverted_p)
4442 std::swap (te2, fe2);
4444 /* ??? Handle VN_TOP specially. */
4445 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4446 vp2->phiargs[te2->dest_idx])
4447 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4448 vp2->phiargs[fe2->dest_idx]))
4449 return false;
4451 return true;
4454 default:
4455 return false;
4459 /* If the PHI nodes do not have compatible types
4460 they are not the same. */
4461 if (!types_compatible_p (vp1->type, vp2->type))
4462 return false;
4464 /* Any phi in the same block will have it's arguments in the
4465 same edge order, because of how we store phi nodes. */
4466 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4467 for (unsigned i = 0; i < nargs; ++i)
4469 tree phi1op = vp1->phiargs[i];
4470 tree phi2op = vp2->phiargs[i];
4471 if (phi1op == phi2op)
4472 continue;
4473 if (!expressions_equal_p (phi1op, phi2op))
4474 return false;
4477 return true;
4480 /* Lookup PHI in the current hash table, and return the resulting
4481 value number if it exists in the hash table. Return NULL_TREE if
4482 it does not exist in the hash table. */
4484 static tree
4485 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4487 vn_phi_s **slot;
4488 struct vn_phi_s *vp1;
4489 edge e;
4490 edge_iterator ei;
4492 vp1 = XALLOCAVAR (struct vn_phi_s,
4493 sizeof (struct vn_phi_s)
4494 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4496 /* Canonicalize the SSA_NAME's to their value number. */
4497 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4499 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4500 if (TREE_CODE (def) == SSA_NAME
4501 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4502 def = SSA_VAL (def);
4503 vp1->phiargs[e->dest_idx] = def;
4505 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4506 vp1->block = gimple_bb (phi);
4507 /* Extract values of the controlling condition. */
4508 vp1->cclhs = NULL_TREE;
4509 vp1->ccrhs = NULL_TREE;
4510 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4511 if (EDGE_COUNT (idom1->succs) == 2)
4512 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4514 /* ??? We want to use SSA_VAL here. But possibly not
4515 allow VN_TOP. */
4516 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4517 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4519 vp1->hashcode = vn_phi_compute_hash (vp1);
4520 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4521 if (!slot)
4522 return NULL_TREE;
4523 return (*slot)->result;
4526 /* Insert PHI into the current hash table with a value number of
4527 RESULT. */
4529 static vn_phi_t
4530 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4532 vn_phi_s **slot;
4533 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4534 sizeof (vn_phi_s)
4535 + ((gimple_phi_num_args (phi) - 1)
4536 * sizeof (tree)));
4537 edge e;
4538 edge_iterator ei;
4540 /* Canonicalize the SSA_NAME's to their value number. */
4541 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4543 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4544 if (TREE_CODE (def) == SSA_NAME
4545 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4546 def = SSA_VAL (def);
4547 vp1->phiargs[e->dest_idx] = def;
4549 vp1->value_id = VN_INFO (result)->value_id;
4550 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4551 vp1->block = gimple_bb (phi);
4552 /* Extract values of the controlling condition. */
4553 vp1->cclhs = NULL_TREE;
4554 vp1->ccrhs = NULL_TREE;
4555 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4556 if (EDGE_COUNT (idom1->succs) == 2)
4557 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4559 /* ??? We want to use SSA_VAL here. But possibly not
4560 allow VN_TOP. */
4561 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4562 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4564 vp1->result = result;
4565 vp1->hashcode = vn_phi_compute_hash (vp1);
4567 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4568 gcc_assert (!*slot);
4570 *slot = vp1;
4571 vp1->next = last_inserted_phi;
4572 last_inserted_phi = vp1;
4573 return vp1;
4577 /* Return true if BB1 is dominated by BB2 taking into account edges
4578 that are not executable. When ALLOW_BACK is false consider not
4579 executable backedges as executable. */
4581 static bool
4582 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4584 edge_iterator ei;
4585 edge e;
4587 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4588 return true;
4590 /* Before iterating we'd like to know if there exists a
4591 (executable) path from bb2 to bb1 at all, if not we can
4592 directly return false. For now simply iterate once. */
4594 /* Iterate to the single executable bb1 predecessor. */
4595 if (EDGE_COUNT (bb1->preds) > 1)
4597 edge prede = NULL;
4598 FOR_EACH_EDGE (e, ei, bb1->preds)
4599 if ((e->flags & EDGE_EXECUTABLE)
4600 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4602 if (prede)
4604 prede = NULL;
4605 break;
4607 prede = e;
4609 if (prede)
4611 bb1 = prede->src;
4613 /* Re-do the dominance check with changed bb1. */
4614 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4615 return true;
4619 /* Iterate to the single executable bb2 successor. */
4620 edge succe = NULL;
4621 FOR_EACH_EDGE (e, ei, bb2->succs)
4622 if ((e->flags & EDGE_EXECUTABLE)
4623 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4625 if (succe)
4627 succe = NULL;
4628 break;
4630 succe = e;
4632 if (succe)
4634 /* Verify the reached block is only reached through succe.
4635 If there is only one edge we can spare us the dominator
4636 check and iterate directly. */
4637 if (EDGE_COUNT (succe->dest->preds) > 1)
4639 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4640 if (e != succe
4641 && ((e->flags & EDGE_EXECUTABLE)
4642 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4644 succe = NULL;
4645 break;
4648 if (succe)
4650 bb2 = succe->dest;
4652 /* Re-do the dominance check with changed bb2. */
4653 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4654 return true;
4658 /* We could now iterate updating bb1 / bb2. */
4659 return false;
4662 /* Set the value number of FROM to TO, return true if it has changed
4663 as a result. */
4665 static inline bool
4666 set_ssa_val_to (tree from, tree to)
4668 vn_ssa_aux_t from_info = VN_INFO (from);
4669 tree currval = from_info->valnum; // SSA_VAL (from)
4670 poly_int64 toff, coff;
4671 bool curr_undefined = false;
4672 bool curr_invariant = false;
4674 /* The only thing we allow as value numbers are ssa_names
4675 and invariants. So assert that here. We don't allow VN_TOP
4676 as visiting a stmt should produce a value-number other than
4677 that.
4678 ??? Still VN_TOP can happen for unreachable code, so force
4679 it to varying in that case. Not all code is prepared to
4680 get VN_TOP on valueization. */
4681 if (to == VN_TOP)
4683 /* ??? When iterating and visiting PHI <undef, backedge-value>
4684 for the first time we rightfully get VN_TOP and we need to
4685 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4686 With SCCVN we were simply lucky we iterated the other PHI
4687 cycles first and thus visited the backedge-value DEF. */
4688 if (currval == VN_TOP)
4689 goto set_and_exit;
4690 if (dump_file && (dump_flags & TDF_DETAILS))
4691 fprintf (dump_file, "Forcing value number to varying on "
4692 "receiving VN_TOP\n");
4693 to = from;
4696 gcc_checking_assert (to != NULL_TREE
4697 && ((TREE_CODE (to) == SSA_NAME
4698 && (to == from || SSA_VAL (to) == to))
4699 || is_gimple_min_invariant (to)));
4701 if (from != to)
4703 if (currval == from)
4705 if (dump_file && (dump_flags & TDF_DETAILS))
4707 fprintf (dump_file, "Not changing value number of ");
4708 print_generic_expr (dump_file, from);
4709 fprintf (dump_file, " from VARYING to ");
4710 print_generic_expr (dump_file, to);
4711 fprintf (dump_file, "\n");
4713 return false;
4715 curr_invariant = is_gimple_min_invariant (currval);
4716 curr_undefined = (TREE_CODE (currval) == SSA_NAME
4717 && ssa_undefined_value_p (currval, false));
4718 if (currval != VN_TOP
4719 && !curr_invariant
4720 && !curr_undefined
4721 && is_gimple_min_invariant (to))
4723 if (dump_file && (dump_flags & TDF_DETAILS))
4725 fprintf (dump_file, "Forcing VARYING instead of changing "
4726 "value number of ");
4727 print_generic_expr (dump_file, from);
4728 fprintf (dump_file, " from ");
4729 print_generic_expr (dump_file, currval);
4730 fprintf (dump_file, " (non-constant) to ");
4731 print_generic_expr (dump_file, to);
4732 fprintf (dump_file, " (constant)\n");
4734 to = from;
4736 else if (currval != VN_TOP
4737 && !curr_undefined
4738 && TREE_CODE (to) == SSA_NAME
4739 && ssa_undefined_value_p (to, false))
4741 if (dump_file && (dump_flags & TDF_DETAILS))
4743 fprintf (dump_file, "Forcing VARYING instead of changing "
4744 "value number of ");
4745 print_generic_expr (dump_file, from);
4746 fprintf (dump_file, " from ");
4747 print_generic_expr (dump_file, currval);
4748 fprintf (dump_file, " (non-undefined) to ");
4749 print_generic_expr (dump_file, to);
4750 fprintf (dump_file, " (undefined)\n");
4752 to = from;
4754 else if (TREE_CODE (to) == SSA_NAME
4755 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4756 to = from;
4759 set_and_exit:
4760 if (dump_file && (dump_flags & TDF_DETAILS))
4762 fprintf (dump_file, "Setting value number of ");
4763 print_generic_expr (dump_file, from);
4764 fprintf (dump_file, " to ");
4765 print_generic_expr (dump_file, to);
4768 if (currval != to
4769 && !operand_equal_p (currval, to, 0)
4770 /* Different undefined SSA names are not actually different. See
4771 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4772 && !(curr_undefined
4773 && TREE_CODE (to) == SSA_NAME
4774 && ssa_undefined_value_p (to, false))
4775 /* ??? For addresses involving volatile objects or types operand_equal_p
4776 does not reliably detect ADDR_EXPRs as equal. We know we are only
4777 getting invariant gimple addresses here, so can use
4778 get_addr_base_and_unit_offset to do this comparison. */
4779 && !(TREE_CODE (currval) == ADDR_EXPR
4780 && TREE_CODE (to) == ADDR_EXPR
4781 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4782 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4783 && known_eq (coff, toff)))
4785 if (to != from
4786 && currval != VN_TOP
4787 && !curr_undefined
4788 /* We do not want to allow lattice transitions from one value
4789 to another since that may lead to not terminating iteration
4790 (see PR95049). Since there's no convenient way to check
4791 for the allowed transition of VAL -> PHI (loop entry value,
4792 same on two PHIs, to same PHI result) we restrict the check
4793 to invariants. */
4794 && curr_invariant
4795 && is_gimple_min_invariant (to))
4797 if (dump_file && (dump_flags & TDF_DETAILS))
4798 fprintf (dump_file, " forced VARYING");
4799 to = from;
4801 if (dump_file && (dump_flags & TDF_DETAILS))
4802 fprintf (dump_file, " (changed)\n");
4803 from_info->valnum = to;
4804 return true;
4806 if (dump_file && (dump_flags & TDF_DETAILS))
4807 fprintf (dump_file, "\n");
4808 return false;
4811 /* Set all definitions in STMT to value number to themselves.
4812 Return true if a value number changed. */
4814 static bool
4815 defs_to_varying (gimple *stmt)
4817 bool changed = false;
4818 ssa_op_iter iter;
4819 def_operand_p defp;
4821 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4823 tree def = DEF_FROM_PTR (defp);
4824 changed |= set_ssa_val_to (def, def);
4826 return changed;
4829 /* Visit a copy between LHS and RHS, return true if the value number
4830 changed. */
4832 static bool
4833 visit_copy (tree lhs, tree rhs)
4835 /* Valueize. */
4836 rhs = SSA_VAL (rhs);
4838 return set_ssa_val_to (lhs, rhs);
4841 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4842 is the same. */
4844 static tree
4845 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4847 if (TREE_CODE (op) == SSA_NAME)
4848 op = vn_valueize (op);
4850 /* Either we have the op widened available. */
4851 tree ops[3] = {};
4852 ops[0] = op;
4853 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4854 wide_type, ops, NULL);
4855 if (tem)
4856 return tem;
4858 /* Or the op is truncated from some existing value. */
4859 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4861 gimple *def = SSA_NAME_DEF_STMT (op);
4862 if (is_gimple_assign (def)
4863 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4865 tem = gimple_assign_rhs1 (def);
4866 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4868 if (TREE_CODE (tem) == SSA_NAME)
4869 tem = vn_valueize (tem);
4870 return tem;
4875 /* For constants simply extend it. */
4876 if (TREE_CODE (op) == INTEGER_CST)
4877 return wide_int_to_tree (wide_type, wi::to_wide (op));
4879 return NULL_TREE;
4882 /* Visit a nary operator RHS, value number it, and return true if the
4883 value number of LHS has changed as a result. */
4885 static bool
4886 visit_nary_op (tree lhs, gassign *stmt)
4888 vn_nary_op_t vnresult;
4889 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4890 if (! result && vnresult)
4891 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4892 if (result)
4893 return set_ssa_val_to (lhs, result);
4895 /* Do some special pattern matching for redundancies of operations
4896 in different types. */
4897 enum tree_code code = gimple_assign_rhs_code (stmt);
4898 tree type = TREE_TYPE (lhs);
4899 tree rhs1 = gimple_assign_rhs1 (stmt);
4900 switch (code)
4902 CASE_CONVERT:
4903 /* Match arithmetic done in a different type where we can easily
4904 substitute the result from some earlier sign-changed or widened
4905 operation. */
4906 if (INTEGRAL_TYPE_P (type)
4907 && TREE_CODE (rhs1) == SSA_NAME
4908 /* We only handle sign-changes, zero-extension -> & mask or
4909 sign-extension if we know the inner operation doesn't
4910 overflow. */
4911 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4912 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4913 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4914 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4915 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4917 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4918 if (def
4919 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4920 || gimple_assign_rhs_code (def) == MINUS_EXPR
4921 || gimple_assign_rhs_code (def) == MULT_EXPR))
4923 tree ops[3] = {};
4924 /* When requiring a sign-extension we cannot model a
4925 previous truncation with a single op so don't bother. */
4926 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4927 /* Either we have the op widened available. */
4928 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4929 allow_truncate);
4930 if (ops[0])
4931 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4932 allow_truncate);
4933 if (ops[0] && ops[1])
4935 ops[0] = vn_nary_op_lookup_pieces
4936 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4937 /* We have wider operation available. */
4938 if (ops[0]
4939 /* If the leader is a wrapping operation we can
4940 insert it for code hoisting w/o introducing
4941 undefined overflow. If it is not it has to
4942 be available. See PR86554. */
4943 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4944 || (rpo_avail && vn_context_bb
4945 && rpo_avail->eliminate_avail (vn_context_bb,
4946 ops[0]))))
4948 unsigned lhs_prec = TYPE_PRECISION (type);
4949 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4950 if (lhs_prec == rhs_prec
4951 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4952 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4954 gimple_match_op match_op (gimple_match_cond::UNCOND,
4955 NOP_EXPR, type, ops[0]);
4956 result = vn_nary_build_or_lookup (&match_op);
4957 if (result)
4959 bool changed = set_ssa_val_to (lhs, result);
4960 vn_nary_op_insert_stmt (stmt, result);
4961 return changed;
4964 else
4966 tree mask = wide_int_to_tree
4967 (type, wi::mask (rhs_prec, false, lhs_prec));
4968 gimple_match_op match_op (gimple_match_cond::UNCOND,
4969 BIT_AND_EXPR,
4970 TREE_TYPE (lhs),
4971 ops[0], mask);
4972 result = vn_nary_build_or_lookup (&match_op);
4973 if (result)
4975 bool changed = set_ssa_val_to (lhs, result);
4976 vn_nary_op_insert_stmt (stmt, result);
4977 return changed;
4984 break;
4985 case BIT_AND_EXPR:
4986 if (INTEGRAL_TYPE_P (type)
4987 && TREE_CODE (rhs1) == SSA_NAME
4988 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
4989 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
4990 && default_vn_walk_kind != VN_NOWALK
4991 && CHAR_BIT == 8
4992 && BITS_PER_UNIT == 8
4993 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
4994 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
4995 && !integer_zerop (gimple_assign_rhs2 (stmt)))
4997 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4998 if (ass
4999 && !gimple_has_volatile_ops (ass)
5000 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5002 tree last_vuse = gimple_vuse (ass);
5003 tree op = gimple_assign_rhs1 (ass);
5004 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5005 default_vn_walk_kind,
5006 NULL, true, &last_vuse,
5007 gimple_assign_rhs2 (stmt));
5008 if (result
5009 && useless_type_conversion_p (TREE_TYPE (result),
5010 TREE_TYPE (op)))
5011 return set_ssa_val_to (lhs, result);
5014 break;
5015 case TRUNC_DIV_EXPR:
5016 if (TYPE_UNSIGNED (type))
5017 break;
5018 /* Fallthru. */
5019 case RDIV_EXPR:
5020 case MULT_EXPR:
5021 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5022 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5024 tree rhs[2];
5025 rhs[0] = rhs1;
5026 rhs[1] = gimple_assign_rhs2 (stmt);
5027 for (unsigned i = 0; i <= 1; ++i)
5029 unsigned j = i == 0 ? 1 : 0;
5030 tree ops[2];
5031 gimple_match_op match_op (gimple_match_cond::UNCOND,
5032 NEGATE_EXPR, type, rhs[i]);
5033 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5034 ops[j] = rhs[j];
5035 if (ops[i]
5036 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5037 type, ops, NULL)))
5039 gimple_match_op match_op (gimple_match_cond::UNCOND,
5040 NEGATE_EXPR, type, ops[0]);
5041 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5042 if (result)
5044 bool changed = set_ssa_val_to (lhs, result);
5045 vn_nary_op_insert_stmt (stmt, result);
5046 return changed;
5051 break;
5052 default:
5053 break;
5056 bool changed = set_ssa_val_to (lhs, lhs);
5057 vn_nary_op_insert_stmt (stmt, lhs);
5058 return changed;
5061 /* Visit a call STMT storing into LHS. Return true if the value number
5062 of the LHS has changed as a result. */
5064 static bool
5065 visit_reference_op_call (tree lhs, gcall *stmt)
5067 bool changed = false;
5068 struct vn_reference_s vr1;
5069 vn_reference_t vnresult = NULL;
5070 tree vdef = gimple_vdef (stmt);
5072 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5073 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5074 lhs = NULL_TREE;
5076 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5077 if (vnresult)
5079 if (vnresult->result_vdef && vdef)
5080 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5081 else if (vdef)
5082 /* If the call was discovered to be pure or const reflect
5083 that as far as possible. */
5084 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5086 if (!vnresult->result && lhs)
5087 vnresult->result = lhs;
5089 if (vnresult->result && lhs)
5090 changed |= set_ssa_val_to (lhs, vnresult->result);
5092 else
5094 vn_reference_t vr2;
5095 vn_reference_s **slot;
5096 tree vdef_val = vdef;
5097 if (vdef)
5099 /* If we value numbered an indirect functions function to
5100 one not clobbering memory value number its VDEF to its
5101 VUSE. */
5102 tree fn = gimple_call_fn (stmt);
5103 if (fn && TREE_CODE (fn) == SSA_NAME)
5105 fn = SSA_VAL (fn);
5106 if (TREE_CODE (fn) == ADDR_EXPR
5107 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5108 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5109 & (ECF_CONST | ECF_PURE)))
5110 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5112 changed |= set_ssa_val_to (vdef, vdef_val);
5114 if (lhs)
5115 changed |= set_ssa_val_to (lhs, lhs);
5116 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5117 vr2->vuse = vr1.vuse;
5118 /* As we are not walking the virtual operand chain we know the
5119 shared_lookup_references are still original so we can re-use
5120 them here. */
5121 vr2->operands = vr1.operands.copy ();
5122 vr2->type = vr1.type;
5123 vr2->punned = vr1.punned;
5124 vr2->set = vr1.set;
5125 vr2->base_set = vr1.base_set;
5126 vr2->hashcode = vr1.hashcode;
5127 vr2->result = lhs;
5128 vr2->result_vdef = vdef_val;
5129 vr2->value_id = 0;
5130 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5131 INSERT);
5132 gcc_assert (!*slot);
5133 *slot = vr2;
5134 vr2->next = last_inserted_ref;
5135 last_inserted_ref = vr2;
5138 return changed;
5141 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5142 and return true if the value number of the LHS has changed as a result. */
5144 static bool
5145 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5147 bool changed = false;
5148 tree result;
5149 vn_reference_t res;
5151 tree vuse = gimple_vuse (stmt);
5152 tree last_vuse = vuse;
5153 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5155 /* We handle type-punning through unions by value-numbering based
5156 on offset and size of the access. Be prepared to handle a
5157 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5158 if (result
5159 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5161 /* Avoid the type punning in case the result mode has padding where
5162 the op we lookup has not. */
5163 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5164 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5165 result = NULL_TREE;
5166 else
5168 /* We will be setting the value number of lhs to the value number
5169 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5170 So first simplify and lookup this expression to see if it
5171 is already available. */
5172 gimple_match_op res_op (gimple_match_cond::UNCOND,
5173 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5174 result = vn_nary_build_or_lookup (&res_op);
5175 if (result
5176 && TREE_CODE (result) == SSA_NAME
5177 && VN_INFO (result)->needs_insertion)
5178 /* Track whether this is the canonical expression for different
5179 typed loads. We use that as a stopgap measure for code
5180 hoisting when dealing with floating point loads. */
5181 res->punned = true;
5184 /* When building the conversion fails avoid inserting the reference
5185 again. */
5186 if (!result)
5187 return set_ssa_val_to (lhs, lhs);
5190 if (result)
5191 changed = set_ssa_val_to (lhs, result);
5192 else
5194 changed = set_ssa_val_to (lhs, lhs);
5195 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5196 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5198 if (dump_file && (dump_flags & TDF_DETAILS))
5200 fprintf (dump_file, "Using extra use virtual operand ");
5201 print_generic_expr (dump_file, last_vuse);
5202 fprintf (dump_file, "\n");
5204 vn_reference_insert (op, lhs, vuse, NULL_TREE);
5208 return changed;
5212 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5213 and return true if the value number of the LHS has changed as a result. */
5215 static bool
5216 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5218 bool changed = false;
5219 vn_reference_t vnresult = NULL;
5220 tree assign;
5221 bool resultsame = false;
5222 tree vuse = gimple_vuse (stmt);
5223 tree vdef = gimple_vdef (stmt);
5225 if (TREE_CODE (op) == SSA_NAME)
5226 op = SSA_VAL (op);
5228 /* First we want to lookup using the *vuses* from the store and see
5229 if there the last store to this location with the same address
5230 had the same value.
5232 The vuses represent the memory state before the store. If the
5233 memory state, address, and value of the store is the same as the
5234 last store to this location, then this store will produce the
5235 same memory state as that store.
5237 In this case the vdef versions for this store are value numbered to those
5238 vuse versions, since they represent the same memory state after
5239 this store.
5241 Otherwise, the vdefs for the store are used when inserting into
5242 the table, since the store generates a new memory state. */
5244 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5245 if (vnresult
5246 && vnresult->result)
5248 tree result = vnresult->result;
5249 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5250 || result == SSA_VAL (result));
5251 resultsame = expressions_equal_p (result, op);
5252 if (resultsame)
5254 /* If the TBAA state isn't compatible for downstream reads
5255 we cannot value-number the VDEFs the same. */
5256 ao_ref lhs_ref;
5257 ao_ref_init (&lhs_ref, lhs);
5258 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5259 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5260 if ((vnresult->set != set
5261 && ! alias_set_subset_of (set, vnresult->set))
5262 || (vnresult->base_set != base_set
5263 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5264 resultsame = false;
5268 if (!resultsame)
5270 /* Only perform the following when being called from PRE
5271 which embeds tail merging. */
5272 if (default_vn_walk_kind == VN_WALK)
5274 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5275 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5276 if (vnresult)
5278 VN_INFO (vdef)->visited = true;
5279 return set_ssa_val_to (vdef, vnresult->result_vdef);
5283 if (dump_file && (dump_flags & TDF_DETAILS))
5285 fprintf (dump_file, "No store match\n");
5286 fprintf (dump_file, "Value numbering store ");
5287 print_generic_expr (dump_file, lhs);
5288 fprintf (dump_file, " to ");
5289 print_generic_expr (dump_file, op);
5290 fprintf (dump_file, "\n");
5292 /* Have to set value numbers before insert, since insert is
5293 going to valueize the references in-place. */
5294 if (vdef)
5295 changed |= set_ssa_val_to (vdef, vdef);
5297 /* Do not insert structure copies into the tables. */
5298 if (is_gimple_min_invariant (op)
5299 || is_gimple_reg (op))
5300 vn_reference_insert (lhs, op, vdef, NULL);
5302 /* Only perform the following when being called from PRE
5303 which embeds tail merging. */
5304 if (default_vn_walk_kind == VN_WALK)
5306 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5307 vn_reference_insert (assign, lhs, vuse, vdef);
5310 else
5312 /* We had a match, so value number the vdef to have the value
5313 number of the vuse it came from. */
5315 if (dump_file && (dump_flags & TDF_DETAILS))
5316 fprintf (dump_file, "Store matched earlier value, "
5317 "value numbering store vdefs to matching vuses.\n");
5319 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5322 return changed;
5325 /* Visit and value number PHI, return true if the value number
5326 changed. When BACKEDGES_VARYING_P is true then assume all
5327 backedge values are varying. When INSERTED is not NULL then
5328 this is just a ahead query for a possible iteration, set INSERTED
5329 to true if we'd insert into the hashtable. */
5331 static bool
5332 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5334 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5335 tree backedge_val = NULL_TREE;
5336 bool seen_non_backedge = false;
5337 tree sameval_base = NULL_TREE;
5338 poly_int64 soff, doff;
5339 unsigned n_executable = 0;
5340 edge_iterator ei;
5341 edge e;
5343 /* TODO: We could check for this in initialization, and replace this
5344 with a gcc_assert. */
5345 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5346 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5348 /* We track whether a PHI was CSEd to to avoid excessive iterations
5349 that would be necessary only because the PHI changed arguments
5350 but not value. */
5351 if (!inserted)
5352 gimple_set_plf (phi, GF_PLF_1, false);
5354 /* See if all non-TOP arguments have the same value. TOP is
5355 equivalent to everything, so we can ignore it. */
5356 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5357 if (e->flags & EDGE_EXECUTABLE)
5359 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5361 if (def == PHI_RESULT (phi))
5362 continue;
5363 ++n_executable;
5364 if (TREE_CODE (def) == SSA_NAME)
5366 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5367 def = SSA_VAL (def);
5368 if (e->flags & EDGE_DFS_BACK)
5369 backedge_val = def;
5371 if (!(e->flags & EDGE_DFS_BACK))
5372 seen_non_backedge = true;
5373 if (def == VN_TOP)
5375 /* Ignore undefined defs for sameval but record one. */
5376 else if (TREE_CODE (def) == SSA_NAME
5377 && ! virtual_operand_p (def)
5378 && ssa_undefined_value_p (def, false))
5379 seen_undef = def;
5380 else if (sameval == VN_TOP)
5381 sameval = def;
5382 else if (!expressions_equal_p (def, sameval))
5384 /* We know we're arriving only with invariant addresses here,
5385 try harder comparing them. We can do some caching here
5386 which we cannot do in expressions_equal_p. */
5387 if (TREE_CODE (def) == ADDR_EXPR
5388 && TREE_CODE (sameval) == ADDR_EXPR
5389 && sameval_base != (void *)-1)
5391 if (!sameval_base)
5392 sameval_base = get_addr_base_and_unit_offset
5393 (TREE_OPERAND (sameval, 0), &soff);
5394 if (!sameval_base)
5395 sameval_base = (tree)(void *)-1;
5396 else if ((get_addr_base_and_unit_offset
5397 (TREE_OPERAND (def, 0), &doff) == sameval_base)
5398 && known_eq (soff, doff))
5399 continue;
5401 sameval = NULL_TREE;
5402 break;
5406 /* If the value we want to use is flowing over the backedge and we
5407 should take it as VARYING but it has a non-VARYING value drop to
5408 VARYING.
5409 If we value-number a virtual operand never value-number to the
5410 value from the backedge as that confuses the alias-walking code.
5411 See gcc.dg/torture/pr87176.c. If the value is the same on a
5412 non-backedge everything is OK though. */
5413 bool visited_p;
5414 if ((backedge_val
5415 && !seen_non_backedge
5416 && TREE_CODE (backedge_val) == SSA_NAME
5417 && sameval == backedge_val
5418 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5419 || SSA_VAL (backedge_val) != backedge_val))
5420 /* Do not value-number a virtual operand to sth not visited though
5421 given that allows us to escape a region in alias walking. */
5422 || (sameval
5423 && TREE_CODE (sameval) == SSA_NAME
5424 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5425 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5426 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5427 /* Note this just drops to VARYING without inserting the PHI into
5428 the hashes. */
5429 result = PHI_RESULT (phi);
5430 /* If none of the edges was executable keep the value-number at VN_TOP,
5431 if only a single edge is exectuable use its value. */
5432 else if (n_executable <= 1)
5433 result = seen_undef ? seen_undef : sameval;
5434 /* If we saw only undefined values and VN_TOP use one of the
5435 undefined values. */
5436 else if (sameval == VN_TOP)
5437 result = seen_undef ? seen_undef : sameval;
5438 /* First see if it is equivalent to a phi node in this block. We prefer
5439 this as it allows IV elimination - see PRs 66502 and 67167. */
5440 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5442 if (!inserted
5443 && TREE_CODE (result) == SSA_NAME
5444 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5446 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5447 if (dump_file && (dump_flags & TDF_DETAILS))
5449 fprintf (dump_file, "Marking CSEd to PHI node ");
5450 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5451 0, TDF_SLIM);
5452 fprintf (dump_file, "\n");
5456 /* If all values are the same use that, unless we've seen undefined
5457 values as well and the value isn't constant.
5458 CCP/copyprop have the same restriction to not remove uninit warnings. */
5459 else if (sameval
5460 && (! seen_undef || is_gimple_min_invariant (sameval)))
5461 result = sameval;
5462 else
5464 result = PHI_RESULT (phi);
5465 /* Only insert PHIs that are varying, for constant value numbers
5466 we mess up equivalences otherwise as we are only comparing
5467 the immediate controlling predicates. */
5468 vn_phi_insert (phi, result, backedges_varying_p);
5469 if (inserted)
5470 *inserted = true;
5473 return set_ssa_val_to (PHI_RESULT (phi), result);
5476 /* Try to simplify RHS using equivalences and constant folding. */
5478 static tree
5479 try_to_simplify (gassign *stmt)
5481 enum tree_code code = gimple_assign_rhs_code (stmt);
5482 tree tem;
5484 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5485 in this case, there is no point in doing extra work. */
5486 if (code == SSA_NAME)
5487 return NULL_TREE;
5489 /* First try constant folding based on our current lattice. */
5490 mprts_hook = vn_lookup_simplify_result;
5491 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5492 mprts_hook = NULL;
5493 if (tem
5494 && (TREE_CODE (tem) == SSA_NAME
5495 || is_gimple_min_invariant (tem)))
5496 return tem;
5498 return NULL_TREE;
5501 /* Visit and value number STMT, return true if the value number
5502 changed. */
5504 static bool
5505 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5507 bool changed = false;
5509 if (dump_file && (dump_flags & TDF_DETAILS))
5511 fprintf (dump_file, "Value numbering stmt = ");
5512 print_gimple_stmt (dump_file, stmt, 0);
5515 if (gimple_code (stmt) == GIMPLE_PHI)
5516 changed = visit_phi (stmt, NULL, backedges_varying_p);
5517 else if (gimple_has_volatile_ops (stmt))
5518 changed = defs_to_varying (stmt);
5519 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5521 enum tree_code code = gimple_assign_rhs_code (ass);
5522 tree lhs = gimple_assign_lhs (ass);
5523 tree rhs1 = gimple_assign_rhs1 (ass);
5524 tree simplified;
5526 /* Shortcut for copies. Simplifying copies is pointless,
5527 since we copy the expression and value they represent. */
5528 if (code == SSA_NAME
5529 && TREE_CODE (lhs) == SSA_NAME)
5531 changed = visit_copy (lhs, rhs1);
5532 goto done;
5534 simplified = try_to_simplify (ass);
5535 if (simplified)
5537 if (dump_file && (dump_flags & TDF_DETAILS))
5539 fprintf (dump_file, "RHS ");
5540 print_gimple_expr (dump_file, ass, 0);
5541 fprintf (dump_file, " simplified to ");
5542 print_generic_expr (dump_file, simplified);
5543 fprintf (dump_file, "\n");
5546 /* Setting value numbers to constants will occasionally
5547 screw up phi congruence because constants are not
5548 uniquely associated with a single ssa name that can be
5549 looked up. */
5550 if (simplified
5551 && is_gimple_min_invariant (simplified)
5552 && TREE_CODE (lhs) == SSA_NAME)
5554 changed = set_ssa_val_to (lhs, simplified);
5555 goto done;
5557 else if (simplified
5558 && TREE_CODE (simplified) == SSA_NAME
5559 && TREE_CODE (lhs) == SSA_NAME)
5561 changed = visit_copy (lhs, simplified);
5562 goto done;
5565 if ((TREE_CODE (lhs) == SSA_NAME
5566 /* We can substitute SSA_NAMEs that are live over
5567 abnormal edges with their constant value. */
5568 && !(gimple_assign_copy_p (ass)
5569 && is_gimple_min_invariant (rhs1))
5570 && !(simplified
5571 && is_gimple_min_invariant (simplified))
5572 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5573 /* Stores or copies from SSA_NAMEs that are live over
5574 abnormal edges are a problem. */
5575 || (code == SSA_NAME
5576 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5577 changed = defs_to_varying (ass);
5578 else if (REFERENCE_CLASS_P (lhs)
5579 || DECL_P (lhs))
5580 changed = visit_reference_op_store (lhs, rhs1, ass);
5581 else if (TREE_CODE (lhs) == SSA_NAME)
5583 if ((gimple_assign_copy_p (ass)
5584 && is_gimple_min_invariant (rhs1))
5585 || (simplified
5586 && is_gimple_min_invariant (simplified)))
5588 if (simplified)
5589 changed = set_ssa_val_to (lhs, simplified);
5590 else
5591 changed = set_ssa_val_to (lhs, rhs1);
5593 else
5595 /* Visit the original statement. */
5596 switch (vn_get_stmt_kind (ass))
5598 case VN_NARY:
5599 changed = visit_nary_op (lhs, ass);
5600 break;
5601 case VN_REFERENCE:
5602 changed = visit_reference_op_load (lhs, rhs1, ass);
5603 break;
5604 default:
5605 changed = defs_to_varying (ass);
5606 break;
5610 else
5611 changed = defs_to_varying (ass);
5613 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5615 tree lhs = gimple_call_lhs (call_stmt);
5616 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5618 /* Try constant folding based on our current lattice. */
5619 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5620 vn_valueize);
5621 if (simplified)
5623 if (dump_file && (dump_flags & TDF_DETAILS))
5625 fprintf (dump_file, "call ");
5626 print_gimple_expr (dump_file, call_stmt, 0);
5627 fprintf (dump_file, " simplified to ");
5628 print_generic_expr (dump_file, simplified);
5629 fprintf (dump_file, "\n");
5632 /* Setting value numbers to constants will occasionally
5633 screw up phi congruence because constants are not
5634 uniquely associated with a single ssa name that can be
5635 looked up. */
5636 if (simplified
5637 && is_gimple_min_invariant (simplified))
5639 changed = set_ssa_val_to (lhs, simplified);
5640 if (gimple_vdef (call_stmt))
5641 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5642 SSA_VAL (gimple_vuse (call_stmt)));
5643 goto done;
5645 else if (simplified
5646 && TREE_CODE (simplified) == SSA_NAME)
5648 changed = visit_copy (lhs, simplified);
5649 if (gimple_vdef (call_stmt))
5650 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5651 SSA_VAL (gimple_vuse (call_stmt)));
5652 goto done;
5654 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5656 changed = defs_to_varying (call_stmt);
5657 goto done;
5661 /* Pick up flags from a devirtualization target. */
5662 tree fn = gimple_call_fn (stmt);
5663 int extra_fnflags = 0;
5664 if (fn && TREE_CODE (fn) == SSA_NAME)
5666 fn = SSA_VAL (fn);
5667 if (TREE_CODE (fn) == ADDR_EXPR
5668 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5669 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5671 if ((/* Calls to the same function with the same vuse
5672 and the same operands do not necessarily return the same
5673 value, unless they're pure or const. */
5674 ((gimple_call_flags (call_stmt) | extra_fnflags)
5675 & (ECF_PURE | ECF_CONST))
5676 /* If calls have a vdef, subsequent calls won't have
5677 the same incoming vuse. So, if 2 calls with vdef have the
5678 same vuse, we know they're not subsequent.
5679 We can value number 2 calls to the same function with the
5680 same vuse and the same operands which are not subsequent
5681 the same, because there is no code in the program that can
5682 compare the 2 values... */
5683 || (gimple_vdef (call_stmt)
5684 /* ... unless the call returns a pointer which does
5685 not alias with anything else. In which case the
5686 information that the values are distinct are encoded
5687 in the IL. */
5688 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5689 /* Only perform the following when being called from PRE
5690 which embeds tail merging. */
5691 && default_vn_walk_kind == VN_WALK))
5692 /* Do not process .DEFERRED_INIT since that confuses uninit
5693 analysis. */
5694 && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
5695 changed = visit_reference_op_call (lhs, call_stmt);
5696 else
5697 changed = defs_to_varying (call_stmt);
5699 else
5700 changed = defs_to_varying (stmt);
5701 done:
5702 return changed;
5706 /* Allocate a value number table. */
5708 static void
5709 allocate_vn_table (vn_tables_t table, unsigned size)
5711 table->phis = new vn_phi_table_type (size);
5712 table->nary = new vn_nary_op_table_type (size);
5713 table->references = new vn_reference_table_type (size);
5716 /* Free a value number table. */
5718 static void
5719 free_vn_table (vn_tables_t table)
5721 /* Walk over elements and release vectors. */
5722 vn_reference_iterator_type hir;
5723 vn_reference_t vr;
5724 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5725 vr->operands.release ();
5726 delete table->phis;
5727 table->phis = NULL;
5728 delete table->nary;
5729 table->nary = NULL;
5730 delete table->references;
5731 table->references = NULL;
5734 /* Set *ID according to RESULT. */
5736 static void
5737 set_value_id_for_result (tree result, unsigned int *id)
5739 if (result && TREE_CODE (result) == SSA_NAME)
5740 *id = VN_INFO (result)->value_id;
5741 else if (result && is_gimple_min_invariant (result))
5742 *id = get_or_alloc_constant_value_id (result);
5743 else
5744 *id = get_next_value_id ();
5747 /* Set the value ids in the valid hash tables. */
5749 static void
5750 set_hashtable_value_ids (void)
5752 vn_nary_op_iterator_type hin;
5753 vn_phi_iterator_type hip;
5754 vn_reference_iterator_type hir;
5755 vn_nary_op_t vno;
5756 vn_reference_t vr;
5757 vn_phi_t vp;
5759 /* Now set the value ids of the things we had put in the hash
5760 table. */
5762 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5763 if (! vno->predicated_values)
5764 set_value_id_for_result (vno->u.result, &vno->value_id);
5766 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5767 set_value_id_for_result (vp->result, &vp->value_id);
5769 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5770 hir)
5771 set_value_id_for_result (vr->result, &vr->value_id);
5774 /* Return the maximum value id we have ever seen. */
5776 unsigned int
5777 get_max_value_id (void)
5779 return next_value_id;
5782 /* Return the maximum constant value id we have ever seen. */
5784 unsigned int
5785 get_max_constant_value_id (void)
5787 return -next_constant_value_id;
5790 /* Return the next unique value id. */
5792 unsigned int
5793 get_next_value_id (void)
5795 gcc_checking_assert ((int)next_value_id > 0);
5796 return next_value_id++;
5799 /* Return the next unique value id for constants. */
5801 unsigned int
5802 get_next_constant_value_id (void)
5804 gcc_checking_assert (next_constant_value_id < 0);
5805 return next_constant_value_id--;
5809 /* Compare two expressions E1 and E2 and return true if they are equal. */
5811 bool
5812 expressions_equal_p (tree e1, tree e2)
5814 /* The obvious case. */
5815 if (e1 == e2)
5816 return true;
5818 /* If either one is VN_TOP consider them equal. */
5819 if (e1 == VN_TOP || e2 == VN_TOP)
5820 return true;
5822 /* SSA_NAME compare pointer equal. */
5823 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5824 return false;
5826 /* Now perform the actual comparison. */
5827 if (TREE_CODE (e1) == TREE_CODE (e2)
5828 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5829 return true;
5831 return false;
5835 /* Return true if the nary operation NARY may trap. This is a copy
5836 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5838 bool
5839 vn_nary_may_trap (vn_nary_op_t nary)
5841 tree type;
5842 tree rhs2 = NULL_TREE;
5843 bool honor_nans = false;
5844 bool honor_snans = false;
5845 bool fp_operation = false;
5846 bool honor_trapv = false;
5847 bool handled, ret;
5848 unsigned i;
5850 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5851 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5852 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5854 type = nary->type;
5855 fp_operation = FLOAT_TYPE_P (type);
5856 if (fp_operation)
5858 honor_nans = flag_trapping_math && !flag_finite_math_only;
5859 honor_snans = flag_signaling_nans != 0;
5861 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5862 honor_trapv = true;
5864 if (nary->length >= 2)
5865 rhs2 = nary->op[1];
5866 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5867 honor_trapv, honor_nans, honor_snans,
5868 rhs2, &handled);
5869 if (handled && ret)
5870 return true;
5872 for (i = 0; i < nary->length; ++i)
5873 if (tree_could_trap_p (nary->op[i]))
5874 return true;
5876 return false;
5879 /* Return true if the reference operation REF may trap. */
5881 bool
5882 vn_reference_may_trap (vn_reference_t ref)
5884 switch (ref->operands[0].opcode)
5886 case MODIFY_EXPR:
5887 case CALL_EXPR:
5888 /* We do not handle calls. */
5889 return true;
5890 case ADDR_EXPR:
5891 /* And toplevel address computations never trap. */
5892 return false;
5893 default:;
5896 vn_reference_op_t op;
5897 unsigned i;
5898 FOR_EACH_VEC_ELT (ref->operands, i, op)
5900 switch (op->opcode)
5902 case WITH_SIZE_EXPR:
5903 case TARGET_MEM_REF:
5904 /* Always variable. */
5905 return true;
5906 case COMPONENT_REF:
5907 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5908 return true;
5909 break;
5910 case ARRAY_RANGE_REF:
5911 if (TREE_CODE (op->op0) == SSA_NAME)
5912 return true;
5913 break;
5914 case ARRAY_REF:
5916 if (TREE_CODE (op->op0) != INTEGER_CST)
5917 return true;
5919 /* !in_array_bounds */
5920 tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
5921 if (!domain_type)
5922 return true;
5924 tree min = op->op1;
5925 tree max = TYPE_MAX_VALUE (domain_type);
5926 if (!min
5927 || !max
5928 || TREE_CODE (min) != INTEGER_CST
5929 || TREE_CODE (max) != INTEGER_CST)
5930 return true;
5932 if (tree_int_cst_lt (op->op0, min)
5933 || tree_int_cst_lt (max, op->op0))
5934 return true;
5936 break;
5938 case MEM_REF:
5939 /* Nothing interesting in itself, the base is separate. */
5940 break;
5941 /* The following are the address bases. */
5942 case SSA_NAME:
5943 return true;
5944 case ADDR_EXPR:
5945 if (op->op0)
5946 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5947 return false;
5948 default:;
5951 return false;
5954 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5955 bitmap inserted_exprs_)
5956 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5957 el_todo (0), eliminations (0), insertions (0),
5958 inserted_exprs (inserted_exprs_)
5960 need_eh_cleanup = BITMAP_ALLOC (NULL);
5961 need_ab_cleanup = BITMAP_ALLOC (NULL);
5964 eliminate_dom_walker::~eliminate_dom_walker ()
5966 BITMAP_FREE (need_eh_cleanup);
5967 BITMAP_FREE (need_ab_cleanup);
5970 /* Return a leader for OP that is available at the current point of the
5971 eliminate domwalk. */
5973 tree
5974 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5976 tree valnum = VN_INFO (op)->valnum;
5977 if (TREE_CODE (valnum) == SSA_NAME)
5979 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5980 return valnum;
5981 if (avail.length () > SSA_NAME_VERSION (valnum))
5982 return avail[SSA_NAME_VERSION (valnum)];
5984 else if (is_gimple_min_invariant (valnum))
5985 return valnum;
5986 return NULL_TREE;
5989 /* At the current point of the eliminate domwalk make OP available. */
5991 void
5992 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
5994 tree valnum = VN_INFO (op)->valnum;
5995 if (TREE_CODE (valnum) == SSA_NAME)
5997 if (avail.length () <= SSA_NAME_VERSION (valnum))
5998 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
5999 tree pushop = op;
6000 if (avail[SSA_NAME_VERSION (valnum)])
6001 pushop = avail[SSA_NAME_VERSION (valnum)];
6002 avail_stack.safe_push (pushop);
6003 avail[SSA_NAME_VERSION (valnum)] = op;
6007 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6008 the leader for the expression if insertion was successful. */
6010 tree
6011 eliminate_dom_walker::eliminate_insert (basic_block bb,
6012 gimple_stmt_iterator *gsi, tree val)
6014 /* We can insert a sequence with a single assignment only. */
6015 gimple_seq stmts = VN_INFO (val)->expr;
6016 if (!gimple_seq_singleton_p (stmts))
6017 return NULL_TREE;
6018 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
6019 if (!stmt
6020 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6021 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
6022 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
6023 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
6024 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
6025 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
6026 return NULL_TREE;
6028 tree op = gimple_assign_rhs1 (stmt);
6029 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
6030 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6031 op = TREE_OPERAND (op, 0);
6032 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
6033 if (!leader)
6034 return NULL_TREE;
6036 tree res;
6037 stmts = NULL;
6038 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6039 res = gimple_build (&stmts, BIT_FIELD_REF,
6040 TREE_TYPE (val), leader,
6041 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
6042 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
6043 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
6044 res = gimple_build (&stmts, BIT_AND_EXPR,
6045 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
6046 else
6047 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
6048 TREE_TYPE (val), leader);
6049 if (TREE_CODE (res) != SSA_NAME
6050 || SSA_NAME_IS_DEFAULT_DEF (res)
6051 || gimple_bb (SSA_NAME_DEF_STMT (res)))
6053 gimple_seq_discard (stmts);
6055 /* During propagation we have to treat SSA info conservatively
6056 and thus we can end up simplifying the inserted expression
6057 at elimination time to sth not defined in stmts. */
6058 /* But then this is a redundancy we failed to detect. Which means
6059 res now has two values. That doesn't play well with how
6060 we track availability here, so give up. */
6061 if (dump_file && (dump_flags & TDF_DETAILS))
6063 if (TREE_CODE (res) == SSA_NAME)
6064 res = eliminate_avail (bb, res);
6065 if (res)
6067 fprintf (dump_file, "Failed to insert expression for value ");
6068 print_generic_expr (dump_file, val);
6069 fprintf (dump_file, " which is really fully redundant to ");
6070 print_generic_expr (dump_file, res);
6071 fprintf (dump_file, "\n");
6075 return NULL_TREE;
6077 else
6079 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6080 vn_ssa_aux_t vn_info = VN_INFO (res);
6081 vn_info->valnum = val;
6082 vn_info->visited = true;
6085 insertions++;
6086 if (dump_file && (dump_flags & TDF_DETAILS))
6088 fprintf (dump_file, "Inserted ");
6089 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6092 return res;
6095 void
6096 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6098 tree sprime = NULL_TREE;
6099 gimple *stmt = gsi_stmt (*gsi);
6100 tree lhs = gimple_get_lhs (stmt);
6101 if (lhs && TREE_CODE (lhs) == SSA_NAME
6102 && !gimple_has_volatile_ops (stmt)
6103 /* See PR43491. Do not replace a global register variable when
6104 it is a the RHS of an assignment. Do replace local register
6105 variables since gcc does not guarantee a local variable will
6106 be allocated in register.
6107 ??? The fix isn't effective here. This should instead
6108 be ensured by not value-numbering them the same but treating
6109 them like volatiles? */
6110 && !(gimple_assign_single_p (stmt)
6111 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6112 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6113 && is_global_var (gimple_assign_rhs1 (stmt)))))
6115 sprime = eliminate_avail (b, lhs);
6116 if (!sprime)
6118 /* If there is no existing usable leader but SCCVN thinks
6119 it has an expression it wants to use as replacement,
6120 insert that. */
6121 tree val = VN_INFO (lhs)->valnum;
6122 vn_ssa_aux_t vn_info;
6123 if (val != VN_TOP
6124 && TREE_CODE (val) == SSA_NAME
6125 && (vn_info = VN_INFO (val), true)
6126 && vn_info->needs_insertion
6127 && vn_info->expr != NULL
6128 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6129 eliminate_push_avail (b, sprime);
6132 /* If this now constitutes a copy duplicate points-to
6133 and range info appropriately. This is especially
6134 important for inserted code. See tree-ssa-copy.c
6135 for similar code. */
6136 if (sprime
6137 && TREE_CODE (sprime) == SSA_NAME)
6139 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6140 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6141 && SSA_NAME_PTR_INFO (lhs)
6142 && ! SSA_NAME_PTR_INFO (sprime))
6144 duplicate_ssa_name_ptr_info (sprime,
6145 SSA_NAME_PTR_INFO (lhs));
6146 if (b != sprime_b)
6147 reset_flow_sensitive_info (sprime);
6149 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6150 && SSA_NAME_RANGE_INFO (lhs)
6151 && ! SSA_NAME_RANGE_INFO (sprime)
6152 && b == sprime_b)
6153 duplicate_ssa_name_range_info (sprime,
6154 SSA_NAME_RANGE_TYPE (lhs),
6155 SSA_NAME_RANGE_INFO (lhs));
6158 /* Inhibit the use of an inserted PHI on a loop header when
6159 the address of the memory reference is a simple induction
6160 variable. In other cases the vectorizer won't do anything
6161 anyway (either it's loop invariant or a complicated
6162 expression). */
6163 if (sprime
6164 && TREE_CODE (sprime) == SSA_NAME
6165 && do_pre
6166 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6167 && loop_outer (b->loop_father)
6168 && has_zero_uses (sprime)
6169 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6170 && gimple_assign_load_p (stmt))
6172 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6173 basic_block def_bb = gimple_bb (def_stmt);
6174 if (gimple_code (def_stmt) == GIMPLE_PHI
6175 && def_bb->loop_father->header == def_bb)
6177 loop_p loop = def_bb->loop_father;
6178 ssa_op_iter iter;
6179 tree op;
6180 bool found = false;
6181 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6183 affine_iv iv;
6184 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6185 if (def_bb
6186 && flow_bb_inside_loop_p (loop, def_bb)
6187 && simple_iv (loop, loop, op, &iv, true))
6189 found = true;
6190 break;
6193 if (found)
6195 if (dump_file && (dump_flags & TDF_DETAILS))
6197 fprintf (dump_file, "Not replacing ");
6198 print_gimple_expr (dump_file, stmt, 0);
6199 fprintf (dump_file, " with ");
6200 print_generic_expr (dump_file, sprime);
6201 fprintf (dump_file, " which would add a loop"
6202 " carried dependence to loop %d\n",
6203 loop->num);
6205 /* Don't keep sprime available. */
6206 sprime = NULL_TREE;
6211 if (sprime)
6213 /* If we can propagate the value computed for LHS into
6214 all uses don't bother doing anything with this stmt. */
6215 if (may_propagate_copy (lhs, sprime))
6217 /* Mark it for removal. */
6218 to_remove.safe_push (stmt);
6220 /* ??? Don't count copy/constant propagations. */
6221 if (gimple_assign_single_p (stmt)
6222 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6223 || gimple_assign_rhs1 (stmt) == sprime))
6224 return;
6226 if (dump_file && (dump_flags & TDF_DETAILS))
6228 fprintf (dump_file, "Replaced ");
6229 print_gimple_expr (dump_file, stmt, 0);
6230 fprintf (dump_file, " with ");
6231 print_generic_expr (dump_file, sprime);
6232 fprintf (dump_file, " in all uses of ");
6233 print_gimple_stmt (dump_file, stmt, 0);
6236 eliminations++;
6237 return;
6240 /* If this is an assignment from our leader (which
6241 happens in the case the value-number is a constant)
6242 then there is nothing to do. Likewise if we run into
6243 inserted code that needed a conversion because of
6244 our type-agnostic value-numbering of loads. */
6245 if ((gimple_assign_single_p (stmt)
6246 || (is_gimple_assign (stmt)
6247 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6248 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6249 && sprime == gimple_assign_rhs1 (stmt))
6250 return;
6252 /* Else replace its RHS. */
6253 if (dump_file && (dump_flags & TDF_DETAILS))
6255 fprintf (dump_file, "Replaced ");
6256 print_gimple_expr (dump_file, stmt, 0);
6257 fprintf (dump_file, " with ");
6258 print_generic_expr (dump_file, sprime);
6259 fprintf (dump_file, " in ");
6260 print_gimple_stmt (dump_file, stmt, 0);
6262 eliminations++;
6264 bool can_make_abnormal_goto = (is_gimple_call (stmt)
6265 && stmt_can_make_abnormal_goto (stmt));
6266 gimple *orig_stmt = stmt;
6267 if (!useless_type_conversion_p (TREE_TYPE (lhs),
6268 TREE_TYPE (sprime)))
6270 /* We preserve conversions to but not from function or method
6271 types. This asymmetry makes it necessary to re-instantiate
6272 conversions here. */
6273 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6274 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6275 sprime = fold_convert (TREE_TYPE (lhs), sprime);
6276 else
6277 gcc_unreachable ();
6279 tree vdef = gimple_vdef (stmt);
6280 tree vuse = gimple_vuse (stmt);
6281 propagate_tree_value_into_stmt (gsi, sprime);
6282 stmt = gsi_stmt (*gsi);
6283 update_stmt (stmt);
6284 /* In case the VDEF on the original stmt was released, value-number
6285 it to the VUSE. This is to make vuse_ssa_val able to skip
6286 released virtual operands. */
6287 if (vdef != gimple_vdef (stmt))
6289 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6290 VN_INFO (vdef)->valnum = vuse;
6293 /* If we removed EH side-effects from the statement, clean
6294 its EH information. */
6295 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6297 bitmap_set_bit (need_eh_cleanup,
6298 gimple_bb (stmt)->index);
6299 if (dump_file && (dump_flags & TDF_DETAILS))
6300 fprintf (dump_file, " Removed EH side-effects.\n");
6303 /* Likewise for AB side-effects. */
6304 if (can_make_abnormal_goto
6305 && !stmt_can_make_abnormal_goto (stmt))
6307 bitmap_set_bit (need_ab_cleanup,
6308 gimple_bb (stmt)->index);
6309 if (dump_file && (dump_flags & TDF_DETAILS))
6310 fprintf (dump_file, " Removed AB side-effects.\n");
6313 return;
6317 /* If the statement is a scalar store, see if the expression
6318 has the same value number as its rhs. If so, the store is
6319 dead. */
6320 if (gimple_assign_single_p (stmt)
6321 && !gimple_has_volatile_ops (stmt)
6322 && !is_gimple_reg (gimple_assign_lhs (stmt))
6323 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6324 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6326 tree rhs = gimple_assign_rhs1 (stmt);
6327 vn_reference_t vnresult;
6328 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6329 typed load of a byte known to be 0x11 as 1 so a store of
6330 a boolean 1 is detected as redundant. Because of this we
6331 have to make sure to lookup with a ref where its size
6332 matches the precision. */
6333 tree lookup_lhs = lhs;
6334 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6335 && (TREE_CODE (lhs) != COMPONENT_REF
6336 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6337 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6339 if (TREE_CODE (lhs) == COMPONENT_REF
6340 || TREE_CODE (lhs) == MEM_REF)
6342 tree ltype = build_nonstandard_integer_type
6343 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6344 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6345 if (TREE_CODE (lhs) == COMPONENT_REF)
6347 tree foff = component_ref_field_offset (lhs);
6348 tree f = TREE_OPERAND (lhs, 1);
6349 if (!poly_int_tree_p (foff))
6350 lookup_lhs = NULL_TREE;
6351 else
6352 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6353 TREE_OPERAND (lhs, 0),
6354 TYPE_SIZE (TREE_TYPE (lhs)),
6355 bit_from_pos
6356 (foff, DECL_FIELD_BIT_OFFSET (f)));
6358 else
6359 lookup_lhs = build2 (MEM_REF, ltype,
6360 TREE_OPERAND (lhs, 0),
6361 TREE_OPERAND (lhs, 1));
6363 else
6364 lookup_lhs = NULL_TREE;
6366 tree val = NULL_TREE;
6367 if (lookup_lhs)
6368 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6369 VN_WALKREWRITE, &vnresult, false);
6370 if (TREE_CODE (rhs) == SSA_NAME)
6371 rhs = VN_INFO (rhs)->valnum;
6372 if (val
6373 && (operand_equal_p (val, rhs, 0)
6374 /* Due to the bitfield lookups above we can get bit
6375 interpretations of the same RHS as values here. Those
6376 are redundant as well. */
6377 || (TREE_CODE (val) == SSA_NAME
6378 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6379 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6380 && TREE_CODE (val) == VIEW_CONVERT_EXPR
6381 && TREE_OPERAND (val, 0) == rhs)))
6383 /* We can only remove the later store if the former aliases
6384 at least all accesses the later one does or if the store
6385 was to readonly memory storing the same value. */
6386 ao_ref lhs_ref;
6387 ao_ref_init (&lhs_ref, lhs);
6388 alias_set_type set = ao_ref_alias_set (&lhs_ref);
6389 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6390 if (! vnresult
6391 || ((vnresult->set == set
6392 || alias_set_subset_of (set, vnresult->set))
6393 && (vnresult->base_set == base_set
6394 || alias_set_subset_of (base_set, vnresult->base_set))))
6396 if (dump_file && (dump_flags & TDF_DETAILS))
6398 fprintf (dump_file, "Deleted redundant store ");
6399 print_gimple_stmt (dump_file, stmt, 0);
6402 /* Queue stmt for removal. */
6403 to_remove.safe_push (stmt);
6404 return;
6409 /* If this is a control statement value numbering left edges
6410 unexecuted on force the condition in a way consistent with
6411 that. */
6412 if (gcond *cond = dyn_cast <gcond *> (stmt))
6414 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6415 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6417 if (dump_file && (dump_flags & TDF_DETAILS))
6419 fprintf (dump_file, "Removing unexecutable edge from ");
6420 print_gimple_stmt (dump_file, stmt, 0);
6422 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6423 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6424 gimple_cond_make_true (cond);
6425 else
6426 gimple_cond_make_false (cond);
6427 update_stmt (cond);
6428 el_todo |= TODO_cleanup_cfg;
6429 return;
6433 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6434 bool was_noreturn = (is_gimple_call (stmt)
6435 && gimple_call_noreturn_p (stmt));
6436 tree vdef = gimple_vdef (stmt);
6437 tree vuse = gimple_vuse (stmt);
6439 /* If we didn't replace the whole stmt (or propagate the result
6440 into all uses), replace all uses on this stmt with their
6441 leaders. */
6442 bool modified = false;
6443 use_operand_p use_p;
6444 ssa_op_iter iter;
6445 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6447 tree use = USE_FROM_PTR (use_p);
6448 /* ??? The call code above leaves stmt operands un-updated. */
6449 if (TREE_CODE (use) != SSA_NAME)
6450 continue;
6451 tree sprime;
6452 if (SSA_NAME_IS_DEFAULT_DEF (use))
6453 /* ??? For default defs BB shouldn't matter, but we have to
6454 solve the inconsistency between rpo eliminate and
6455 dom eliminate avail valueization first. */
6456 sprime = eliminate_avail (b, use);
6457 else
6458 /* Look for sth available at the definition block of the argument.
6459 This avoids inconsistencies between availability there which
6460 decides if the stmt can be removed and availability at the
6461 use site. The SSA property ensures that things available
6462 at the definition are also available at uses. */
6463 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6464 if (sprime && sprime != use
6465 && may_propagate_copy (use, sprime)
6466 /* We substitute into debug stmts to avoid excessive
6467 debug temporaries created by removed stmts, but we need
6468 to avoid doing so for inserted sprimes as we never want
6469 to create debug temporaries for them. */
6470 && (!inserted_exprs
6471 || TREE_CODE (sprime) != SSA_NAME
6472 || !is_gimple_debug (stmt)
6473 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6475 propagate_value (use_p, sprime);
6476 modified = true;
6480 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6481 into which is a requirement for the IPA devirt machinery. */
6482 gimple *old_stmt = stmt;
6483 if (modified)
6485 /* If a formerly non-invariant ADDR_EXPR is turned into an
6486 invariant one it was on a separate stmt. */
6487 if (gimple_assign_single_p (stmt)
6488 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6489 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6490 gimple_stmt_iterator prev = *gsi;
6491 gsi_prev (&prev);
6492 if (fold_stmt (gsi, follow_all_ssa_edges))
6494 /* fold_stmt may have created new stmts inbetween
6495 the previous stmt and the folded stmt. Mark
6496 all defs created there as varying to not confuse
6497 the SCCVN machinery as we're using that even during
6498 elimination. */
6499 if (gsi_end_p (prev))
6500 prev = gsi_start_bb (b);
6501 else
6502 gsi_next (&prev);
6503 if (gsi_stmt (prev) != gsi_stmt (*gsi))
6506 tree def;
6507 ssa_op_iter dit;
6508 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6509 dit, SSA_OP_ALL_DEFS)
6510 /* As existing DEFs may move between stmts
6511 only process new ones. */
6512 if (! has_VN_INFO (def))
6514 vn_ssa_aux_t vn_info = VN_INFO (def);
6515 vn_info->valnum = def;
6516 vn_info->visited = true;
6518 if (gsi_stmt (prev) == gsi_stmt (*gsi))
6519 break;
6520 gsi_next (&prev);
6522 while (1);
6524 stmt = gsi_stmt (*gsi);
6525 /* In case we folded the stmt away schedule the NOP for removal. */
6526 if (gimple_nop_p (stmt))
6527 to_remove.safe_push (stmt);
6530 /* Visit indirect calls and turn them into direct calls if
6531 possible using the devirtualization machinery. Do this before
6532 checking for required EH/abnormal/noreturn cleanup as devird
6533 may expose more of those. */
6534 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6536 tree fn = gimple_call_fn (call_stmt);
6537 if (fn
6538 && flag_devirtualize
6539 && virtual_method_call_p (fn))
6541 tree otr_type = obj_type_ref_class (fn);
6542 unsigned HOST_WIDE_INT otr_tok
6543 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6544 tree instance;
6545 ipa_polymorphic_call_context context (current_function_decl,
6546 fn, stmt, &instance);
6547 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6548 otr_type, stmt, NULL);
6549 bool final;
6550 vec <cgraph_node *> targets
6551 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6552 otr_tok, context, &final);
6553 if (dump_file)
6554 dump_possible_polymorphic_call_targets (dump_file,
6555 obj_type_ref_class (fn),
6556 otr_tok, context);
6557 if (final && targets.length () <= 1 && dbg_cnt (devirt))
6559 tree fn;
6560 if (targets.length () == 1)
6561 fn = targets[0]->decl;
6562 else
6563 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6564 if (dump_enabled_p ())
6566 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6567 "converting indirect call to "
6568 "function %s\n",
6569 lang_hooks.decl_printable_name (fn, 2));
6571 gimple_call_set_fndecl (call_stmt, fn);
6572 /* If changing the call to __builtin_unreachable
6573 or similar noreturn function, adjust gimple_call_fntype
6574 too. */
6575 if (gimple_call_noreturn_p (call_stmt)
6576 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6577 && TYPE_ARG_TYPES (TREE_TYPE (fn))
6578 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6579 == void_type_node))
6580 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6581 maybe_remove_unused_call_args (cfun, call_stmt);
6582 modified = true;
6587 if (modified)
6589 /* When changing a call into a noreturn call, cfg cleanup
6590 is needed to fix up the noreturn call. */
6591 if (!was_noreturn
6592 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6593 to_fixup.safe_push (stmt);
6594 /* When changing a condition or switch into one we know what
6595 edge will be executed, schedule a cfg cleanup. */
6596 if ((gimple_code (stmt) == GIMPLE_COND
6597 && (gimple_cond_true_p (as_a <gcond *> (stmt))
6598 || gimple_cond_false_p (as_a <gcond *> (stmt))))
6599 || (gimple_code (stmt) == GIMPLE_SWITCH
6600 && TREE_CODE (gimple_switch_index
6601 (as_a <gswitch *> (stmt))) == INTEGER_CST))
6602 el_todo |= TODO_cleanup_cfg;
6603 /* If we removed EH side-effects from the statement, clean
6604 its EH information. */
6605 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6607 bitmap_set_bit (need_eh_cleanup,
6608 gimple_bb (stmt)->index);
6609 if (dump_file && (dump_flags & TDF_DETAILS))
6610 fprintf (dump_file, " Removed EH side-effects.\n");
6612 /* Likewise for AB side-effects. */
6613 if (can_make_abnormal_goto
6614 && !stmt_can_make_abnormal_goto (stmt))
6616 bitmap_set_bit (need_ab_cleanup,
6617 gimple_bb (stmt)->index);
6618 if (dump_file && (dump_flags & TDF_DETAILS))
6619 fprintf (dump_file, " Removed AB side-effects.\n");
6621 update_stmt (stmt);
6622 /* In case the VDEF on the original stmt was released, value-number
6623 it to the VUSE. This is to make vuse_ssa_val able to skip
6624 released virtual operands. */
6625 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6626 VN_INFO (vdef)->valnum = vuse;
6629 /* Make new values available - for fully redundant LHS we
6630 continue with the next stmt above and skip this. */
6631 def_operand_p defp;
6632 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6633 eliminate_push_avail (b, DEF_FROM_PTR (defp));
6636 /* Perform elimination for the basic-block B during the domwalk. */
6638 edge
6639 eliminate_dom_walker::before_dom_children (basic_block b)
6641 /* Mark new bb. */
6642 avail_stack.safe_push (NULL_TREE);
6644 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6645 if (!(b->flags & BB_EXECUTABLE))
6646 return NULL;
6648 vn_context_bb = b;
6650 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6652 gphi *phi = gsi.phi ();
6653 tree res = PHI_RESULT (phi);
6655 if (virtual_operand_p (res))
6657 gsi_next (&gsi);
6658 continue;
6661 tree sprime = eliminate_avail (b, res);
6662 if (sprime
6663 && sprime != res)
6665 if (dump_file && (dump_flags & TDF_DETAILS))
6667 fprintf (dump_file, "Replaced redundant PHI node defining ");
6668 print_generic_expr (dump_file, res);
6669 fprintf (dump_file, " with ");
6670 print_generic_expr (dump_file, sprime);
6671 fprintf (dump_file, "\n");
6674 /* If we inserted this PHI node ourself, it's not an elimination. */
6675 if (! inserted_exprs
6676 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6677 eliminations++;
6679 /* If we will propagate into all uses don't bother to do
6680 anything. */
6681 if (may_propagate_copy (res, sprime))
6683 /* Mark the PHI for removal. */
6684 to_remove.safe_push (phi);
6685 gsi_next (&gsi);
6686 continue;
6689 remove_phi_node (&gsi, false);
6691 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6692 sprime = fold_convert (TREE_TYPE (res), sprime);
6693 gimple *stmt = gimple_build_assign (res, sprime);
6694 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6695 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6696 continue;
6699 eliminate_push_avail (b, res);
6700 gsi_next (&gsi);
6703 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6704 !gsi_end_p (gsi);
6705 gsi_next (&gsi))
6706 eliminate_stmt (b, &gsi);
6708 /* Replace destination PHI arguments. */
6709 edge_iterator ei;
6710 edge e;
6711 FOR_EACH_EDGE (e, ei, b->succs)
6712 if (e->flags & EDGE_EXECUTABLE)
6713 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6714 !gsi_end_p (gsi);
6715 gsi_next (&gsi))
6717 gphi *phi = gsi.phi ();
6718 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6719 tree arg = USE_FROM_PTR (use_p);
6720 if (TREE_CODE (arg) != SSA_NAME
6721 || virtual_operand_p (arg))
6722 continue;
6723 tree sprime = eliminate_avail (b, arg);
6724 if (sprime && may_propagate_copy (arg, sprime))
6725 propagate_value (use_p, sprime);
6728 vn_context_bb = NULL;
6730 return NULL;
6733 /* Make no longer available leaders no longer available. */
6735 void
6736 eliminate_dom_walker::after_dom_children (basic_block)
6738 tree entry;
6739 while ((entry = avail_stack.pop ()) != NULL_TREE)
6741 tree valnum = VN_INFO (entry)->valnum;
6742 tree old = avail[SSA_NAME_VERSION (valnum)];
6743 if (old == entry)
6744 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6745 else
6746 avail[SSA_NAME_VERSION (valnum)] = entry;
6750 /* Remove queued stmts and perform delayed cleanups. */
6752 unsigned
6753 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6755 statistics_counter_event (cfun, "Eliminated", eliminations);
6756 statistics_counter_event (cfun, "Insertions", insertions);
6758 /* We cannot remove stmts during BB walk, especially not release SSA
6759 names there as this confuses the VN machinery. The stmts ending
6760 up in to_remove are either stores or simple copies.
6761 Remove stmts in reverse order to make debug stmt creation possible. */
6762 while (!to_remove.is_empty ())
6764 bool do_release_defs = true;
6765 gimple *stmt = to_remove.pop ();
6767 /* When we are value-numbering a region we do not require exit PHIs to
6768 be present so we have to make sure to deal with uses outside of the
6769 region of stmts that we thought are eliminated.
6770 ??? Note we may be confused by uses in dead regions we didn't run
6771 elimination on. Rather than checking individual uses we accept
6772 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6773 contains such example). */
6774 if (region_p)
6776 if (gphi *phi = dyn_cast <gphi *> (stmt))
6778 tree lhs = gimple_phi_result (phi);
6779 if (!has_zero_uses (lhs))
6781 if (dump_file && (dump_flags & TDF_DETAILS))
6782 fprintf (dump_file, "Keeping eliminated stmt live "
6783 "as copy because of out-of-region uses\n");
6784 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6785 gimple *copy = gimple_build_assign (lhs, sprime);
6786 gimple_stmt_iterator gsi
6787 = gsi_after_labels (gimple_bb (stmt));
6788 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6789 do_release_defs = false;
6792 else if (tree lhs = gimple_get_lhs (stmt))
6793 if (TREE_CODE (lhs) == SSA_NAME
6794 && !has_zero_uses (lhs))
6796 if (dump_file && (dump_flags & TDF_DETAILS))
6797 fprintf (dump_file, "Keeping eliminated stmt live "
6798 "as copy because of out-of-region uses\n");
6799 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6800 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6801 if (is_gimple_assign (stmt))
6803 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6804 stmt = gsi_stmt (gsi);
6805 update_stmt (stmt);
6806 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6807 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6808 continue;
6810 else
6812 gimple *copy = gimple_build_assign (lhs, sprime);
6813 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6814 do_release_defs = false;
6819 if (dump_file && (dump_flags & TDF_DETAILS))
6821 fprintf (dump_file, "Removing dead stmt ");
6822 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6825 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6826 if (gimple_code (stmt) == GIMPLE_PHI)
6827 remove_phi_node (&gsi, do_release_defs);
6828 else
6830 basic_block bb = gimple_bb (stmt);
6831 unlink_stmt_vdef (stmt);
6832 if (gsi_remove (&gsi, true))
6833 bitmap_set_bit (need_eh_cleanup, bb->index);
6834 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6835 bitmap_set_bit (need_ab_cleanup, bb->index);
6836 if (do_release_defs)
6837 release_defs (stmt);
6840 /* Removing a stmt may expose a forwarder block. */
6841 el_todo |= TODO_cleanup_cfg;
6844 /* Fixup stmts that became noreturn calls. This may require splitting
6845 blocks and thus isn't possible during the dominator walk. Do this
6846 in reverse order so we don't inadvertedly remove a stmt we want to
6847 fixup by visiting a dominating now noreturn call first. */
6848 while (!to_fixup.is_empty ())
6850 gimple *stmt = to_fixup.pop ();
6852 if (dump_file && (dump_flags & TDF_DETAILS))
6854 fprintf (dump_file, "Fixing up noreturn call ");
6855 print_gimple_stmt (dump_file, stmt, 0);
6858 if (fixup_noreturn_call (stmt))
6859 el_todo |= TODO_cleanup_cfg;
6862 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6863 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6865 if (do_eh_cleanup)
6866 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6868 if (do_ab_cleanup)
6869 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6871 if (do_eh_cleanup || do_ab_cleanup)
6872 el_todo |= TODO_cleanup_cfg;
6874 return el_todo;
6877 /* Eliminate fully redundant computations. */
6879 unsigned
6880 eliminate_with_rpo_vn (bitmap inserted_exprs)
6882 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6884 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6885 rpo_avail = &walker;
6886 walker.walk (cfun->cfg->x_entry_block_ptr);
6887 rpo_avail = saved_rpo_avail;
6889 return walker.eliminate_cleanup ();
6892 static unsigned
6893 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6894 bool iterate, bool eliminate);
6896 void
6897 run_rpo_vn (vn_lookup_kind kind)
6899 default_vn_walk_kind = kind;
6900 do_rpo_vn (cfun, NULL, NULL, true, false);
6902 /* ??? Prune requirement of these. */
6903 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6905 /* Initialize the value ids and prune out remaining VN_TOPs
6906 from dead code. */
6907 tree name;
6908 unsigned i;
6909 FOR_EACH_SSA_NAME (i, name, cfun)
6911 vn_ssa_aux_t info = VN_INFO (name);
6912 if (!info->visited
6913 || info->valnum == VN_TOP)
6914 info->valnum = name;
6915 if (info->valnum == name)
6916 info->value_id = get_next_value_id ();
6917 else if (is_gimple_min_invariant (info->valnum))
6918 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6921 /* Propagate. */
6922 FOR_EACH_SSA_NAME (i, name, cfun)
6924 vn_ssa_aux_t info = VN_INFO (name);
6925 if (TREE_CODE (info->valnum) == SSA_NAME
6926 && info->valnum != name
6927 && info->value_id != VN_INFO (info->valnum)->value_id)
6928 info->value_id = VN_INFO (info->valnum)->value_id;
6931 set_hashtable_value_ids ();
6933 if (dump_file && (dump_flags & TDF_DETAILS))
6935 fprintf (dump_file, "Value numbers:\n");
6936 FOR_EACH_SSA_NAME (i, name, cfun)
6938 if (VN_INFO (name)->visited
6939 && SSA_VAL (name) != name)
6941 print_generic_expr (dump_file, name);
6942 fprintf (dump_file, " = ");
6943 print_generic_expr (dump_file, SSA_VAL (name));
6944 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6950 /* Free VN associated data structures. */
6952 void
6953 free_rpo_vn (void)
6955 free_vn_table (valid_info);
6956 XDELETE (valid_info);
6957 obstack_free (&vn_tables_obstack, NULL);
6958 obstack_free (&vn_tables_insert_obstack, NULL);
6960 vn_ssa_aux_iterator_type it;
6961 vn_ssa_aux_t info;
6962 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6963 if (info->needs_insertion)
6964 release_ssa_name (info->name);
6965 obstack_free (&vn_ssa_aux_obstack, NULL);
6966 delete vn_ssa_aux_hash;
6968 delete constant_to_value_id;
6969 constant_to_value_id = NULL;
6972 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6974 static tree
6975 vn_lookup_simplify_result (gimple_match_op *res_op)
6977 if (!res_op->code.is_tree_code ())
6978 return NULL_TREE;
6979 tree *ops = res_op->ops;
6980 unsigned int length = res_op->num_ops;
6981 if (res_op->code == CONSTRUCTOR
6982 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
6983 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
6984 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
6986 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
6987 ops = XALLOCAVEC (tree, length);
6988 for (unsigned i = 0; i < length; ++i)
6989 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
6991 vn_nary_op_t vnresult = NULL;
6992 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
6993 res_op->type, ops, &vnresult);
6994 /* If this is used from expression simplification make sure to
6995 return an available expression. */
6996 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
6997 res = rpo_avail->eliminate_avail (vn_context_bb, res);
6998 return res;
7001 /* Return a leader for OPs value that is valid at BB. */
7003 tree
7004 rpo_elim::eliminate_avail (basic_block bb, tree op)
7006 bool visited;
7007 tree valnum = SSA_VAL (op, &visited);
7008 /* If we didn't visit OP then it must be defined outside of the
7009 region we process and also dominate it. So it is available. */
7010 if (!visited)
7011 return op;
7012 if (TREE_CODE (valnum) == SSA_NAME)
7014 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7015 return valnum;
7016 vn_avail *av = VN_INFO (valnum)->avail;
7017 if (!av)
7018 return NULL_TREE;
7019 if (av->location == bb->index)
7020 /* On tramp3d 90% of the cases are here. */
7021 return ssa_name (av->leader);
7024 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
7025 /* ??? During elimination we have to use availability at the
7026 definition site of a use we try to replace. This
7027 is required to not run into inconsistencies because
7028 of dominated_by_p_w_unex behavior and removing a definition
7029 while not replacing all uses.
7030 ??? We could try to consistently walk dominators
7031 ignoring non-executable regions. The nearest common
7032 dominator of bb and abb is where we can stop walking. We
7033 may also be able to "pre-compute" (bits of) the next immediate
7034 (non-)dominator during the RPO walk when marking edges as
7035 executable. */
7036 if (dominated_by_p_w_unex (bb, abb, true))
7038 tree leader = ssa_name (av->leader);
7039 /* Prevent eliminations that break loop-closed SSA. */
7040 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
7041 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
7042 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7043 (leader))->loop_father,
7044 bb))
7045 return NULL_TREE;
7046 if (dump_file && (dump_flags & TDF_DETAILS))
7048 print_generic_expr (dump_file, leader);
7049 fprintf (dump_file, " is available for ");
7050 print_generic_expr (dump_file, valnum);
7051 fprintf (dump_file, "\n");
7053 /* On tramp3d 99% of the _remaining_ cases succeed at
7054 the first enty. */
7055 return leader;
7057 /* ??? Can we somehow skip to the immediate dominator
7058 RPO index (bb_to_rpo)? Again, maybe not worth, on
7059 tramp3d the worst number of elements in the vector is 9. */
7060 av = av->next;
7062 while (av);
7064 else if (valnum != VN_TOP)
7065 /* valnum is is_gimple_min_invariant. */
7066 return valnum;
7067 return NULL_TREE;
7070 /* Make LEADER a leader for its value at BB. */
7072 void
7073 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7075 tree valnum = VN_INFO (leader)->valnum;
7076 if (valnum == VN_TOP
7077 || is_gimple_min_invariant (valnum))
7078 return;
7079 if (dump_file && (dump_flags & TDF_DETAILS))
7081 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7082 print_generic_expr (dump_file, leader);
7083 fprintf (dump_file, " for value ");
7084 print_generic_expr (dump_file, valnum);
7085 fprintf (dump_file, "\n");
7087 vn_ssa_aux_t value = VN_INFO (valnum);
7088 vn_avail *av;
7089 if (m_avail_freelist)
7091 av = m_avail_freelist;
7092 m_avail_freelist = m_avail_freelist->next;
7094 else
7095 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7096 av->location = bb->index;
7097 av->leader = SSA_NAME_VERSION (leader);
7098 av->next = value->avail;
7099 av->next_undo = last_pushed_avail;
7100 last_pushed_avail = value;
7101 value->avail = av;
7104 /* Valueization hook for RPO VN plus required state. */
7106 tree
7107 rpo_vn_valueize (tree name)
7109 if (TREE_CODE (name) == SSA_NAME)
7111 vn_ssa_aux_t val = VN_INFO (name);
7112 if (val)
7114 tree tem = val->valnum;
7115 if (tem != VN_TOP && tem != name)
7117 if (TREE_CODE (tem) != SSA_NAME)
7118 return tem;
7119 /* For all values we only valueize to an available leader
7120 which means we can use SSA name info without restriction. */
7121 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7122 if (tem)
7123 return tem;
7127 return name;
7130 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7131 inverted condition. */
7133 static void
7134 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7136 switch (code)
7138 case LT_EXPR:
7139 /* a < b -> a {!,<}= b */
7140 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7141 ops, boolean_true_node, 0, pred_e);
7142 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7143 ops, boolean_true_node, 0, pred_e);
7144 /* a < b -> ! a {>,=} b */
7145 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7146 ops, boolean_false_node, 0, pred_e);
7147 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7148 ops, boolean_false_node, 0, pred_e);
7149 break;
7150 case GT_EXPR:
7151 /* a > b -> a {!,>}= b */
7152 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7153 ops, boolean_true_node, 0, pred_e);
7154 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7155 ops, boolean_true_node, 0, pred_e);
7156 /* a > b -> ! a {<,=} b */
7157 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7158 ops, boolean_false_node, 0, pred_e);
7159 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7160 ops, boolean_false_node, 0, pred_e);
7161 break;
7162 case EQ_EXPR:
7163 /* a == b -> ! a {<,>} b */
7164 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7165 ops, boolean_false_node, 0, pred_e);
7166 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7167 ops, boolean_false_node, 0, pred_e);
7168 break;
7169 case LE_EXPR:
7170 case GE_EXPR:
7171 case NE_EXPR:
7172 /* Nothing besides inverted condition. */
7173 break;
7174 default:;
7178 /* Main stmt worker for RPO VN, process BB. */
7180 static unsigned
7181 process_bb (rpo_elim &avail, basic_block bb,
7182 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7183 bool do_region, bitmap exit_bbs, bool skip_phis)
7185 unsigned todo = 0;
7186 edge_iterator ei;
7187 edge e;
7189 vn_context_bb = bb;
7191 /* If we are in loop-closed SSA preserve this state. This is
7192 relevant when called on regions from outside of FRE/PRE. */
7193 bool lc_phi_nodes = false;
7194 if (!skip_phis
7195 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7196 FOR_EACH_EDGE (e, ei, bb->preds)
7197 if (e->src->loop_father != e->dest->loop_father
7198 && flow_loop_nested_p (e->dest->loop_father,
7199 e->src->loop_father))
7201 lc_phi_nodes = true;
7202 break;
7205 /* When we visit a loop header substitute into loop info. */
7206 if (!iterate && eliminate && bb->loop_father->header == bb)
7208 /* Keep fields in sync with substitute_in_loop_info. */
7209 if (bb->loop_father->nb_iterations)
7210 bb->loop_father->nb_iterations
7211 = simplify_replace_tree (bb->loop_father->nb_iterations,
7212 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7215 /* Value-number all defs in the basic-block. */
7216 if (!skip_phis)
7217 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7218 gsi_next (&gsi))
7220 gphi *phi = gsi.phi ();
7221 tree res = PHI_RESULT (phi);
7222 vn_ssa_aux_t res_info = VN_INFO (res);
7223 if (!bb_visited)
7225 gcc_assert (!res_info->visited);
7226 res_info->valnum = VN_TOP;
7227 res_info->visited = true;
7230 /* When not iterating force backedge values to varying. */
7231 visit_stmt (phi, !iterate_phis);
7232 if (virtual_operand_p (res))
7233 continue;
7235 /* Eliminate */
7236 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7237 how we handle backedges and availability.
7238 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7239 tree val = res_info->valnum;
7240 if (res != val && !iterate && eliminate)
7242 if (tree leader = avail.eliminate_avail (bb, res))
7244 if (leader != res
7245 /* Preserve loop-closed SSA form. */
7246 && (! lc_phi_nodes
7247 || is_gimple_min_invariant (leader)))
7249 if (dump_file && (dump_flags & TDF_DETAILS))
7251 fprintf (dump_file, "Replaced redundant PHI node "
7252 "defining ");
7253 print_generic_expr (dump_file, res);
7254 fprintf (dump_file, " with ");
7255 print_generic_expr (dump_file, leader);
7256 fprintf (dump_file, "\n");
7258 avail.eliminations++;
7260 if (may_propagate_copy (res, leader))
7262 /* Schedule for removal. */
7263 avail.to_remove.safe_push (phi);
7264 continue;
7266 /* ??? Else generate a copy stmt. */
7270 /* Only make defs available that not already are. But make
7271 sure loop-closed SSA PHI node defs are picked up for
7272 downstream uses. */
7273 if (lc_phi_nodes
7274 || res == val
7275 || ! avail.eliminate_avail (bb, res))
7276 avail.eliminate_push_avail (bb, res);
7279 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7280 we do this when processing the last stmt as we have to do this
7281 before elimination which otherwise forces GIMPLE_CONDs to
7282 if (1 != 0) style when seeing non-executable edges. */
7283 if (gsi_end_p (gsi_start_bb (bb)))
7285 FOR_EACH_EDGE (e, ei, bb->succs)
7287 if (!(e->flags & EDGE_EXECUTABLE))
7289 if (dump_file && (dump_flags & TDF_DETAILS))
7290 fprintf (dump_file,
7291 "marking outgoing edge %d -> %d executable\n",
7292 e->src->index, e->dest->index);
7293 e->flags |= EDGE_EXECUTABLE;
7294 e->dest->flags |= BB_EXECUTABLE;
7296 else if (!(e->dest->flags & BB_EXECUTABLE))
7298 if (dump_file && (dump_flags & TDF_DETAILS))
7299 fprintf (dump_file,
7300 "marking destination block %d reachable\n",
7301 e->dest->index);
7302 e->dest->flags |= BB_EXECUTABLE;
7306 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7307 !gsi_end_p (gsi); gsi_next (&gsi))
7309 ssa_op_iter i;
7310 tree op;
7311 if (!bb_visited)
7313 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7315 vn_ssa_aux_t op_info = VN_INFO (op);
7316 gcc_assert (!op_info->visited);
7317 op_info->valnum = VN_TOP;
7318 op_info->visited = true;
7321 /* We somehow have to deal with uses that are not defined
7322 in the processed region. Forcing unvisited uses to
7323 varying here doesn't play well with def-use following during
7324 expression simplification, so we deal with this by checking
7325 the visited flag in SSA_VAL. */
7328 visit_stmt (gsi_stmt (gsi));
7330 gimple *last = gsi_stmt (gsi);
7331 e = NULL;
7332 switch (gimple_code (last))
7334 case GIMPLE_SWITCH:
7335 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7336 (as_a <gswitch *> (last))));
7337 break;
7338 case GIMPLE_COND:
7340 tree lhs = vn_valueize (gimple_cond_lhs (last));
7341 tree rhs = vn_valueize (gimple_cond_rhs (last));
7342 tree val = gimple_simplify (gimple_cond_code (last),
7343 boolean_type_node, lhs, rhs,
7344 NULL, vn_valueize);
7345 /* If the condition didn't simplfy see if we have recorded
7346 an expression from sofar taken edges. */
7347 if (! val || TREE_CODE (val) != INTEGER_CST)
7349 vn_nary_op_t vnresult;
7350 tree ops[2];
7351 ops[0] = lhs;
7352 ops[1] = rhs;
7353 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7354 boolean_type_node, ops,
7355 &vnresult);
7356 /* Did we get a predicated value? */
7357 if (! val && vnresult && vnresult->predicated_values)
7359 val = vn_nary_op_get_predicated_value (vnresult, bb);
7360 if (val && dump_file && (dump_flags & TDF_DETAILS))
7362 fprintf (dump_file, "Got predicated value ");
7363 print_generic_expr (dump_file, val, TDF_NONE);
7364 fprintf (dump_file, " for ");
7365 print_gimple_stmt (dump_file, last, TDF_SLIM);
7369 if (val)
7370 e = find_taken_edge (bb, val);
7371 if (! e)
7373 /* If we didn't manage to compute the taken edge then
7374 push predicated expressions for the condition itself
7375 and related conditions to the hashtables. This allows
7376 simplification of redundant conditions which is
7377 important as early cleanup. */
7378 edge true_e, false_e;
7379 extract_true_false_edges_from_block (bb, &true_e, &false_e);
7380 enum tree_code code = gimple_cond_code (last);
7381 enum tree_code icode
7382 = invert_tree_comparison (code, HONOR_NANS (lhs));
7383 tree ops[2];
7384 ops[0] = lhs;
7385 ops[1] = rhs;
7386 if (do_region
7387 && bitmap_bit_p (exit_bbs, true_e->dest->index))
7388 true_e = NULL;
7389 if (do_region
7390 && bitmap_bit_p (exit_bbs, false_e->dest->index))
7391 false_e = NULL;
7392 if (true_e)
7393 vn_nary_op_insert_pieces_predicated
7394 (2, code, boolean_type_node, ops,
7395 boolean_true_node, 0, true_e);
7396 if (false_e)
7397 vn_nary_op_insert_pieces_predicated
7398 (2, code, boolean_type_node, ops,
7399 boolean_false_node, 0, false_e);
7400 if (icode != ERROR_MARK)
7402 if (true_e)
7403 vn_nary_op_insert_pieces_predicated
7404 (2, icode, boolean_type_node, ops,
7405 boolean_false_node, 0, true_e);
7406 if (false_e)
7407 vn_nary_op_insert_pieces_predicated
7408 (2, icode, boolean_type_node, ops,
7409 boolean_true_node, 0, false_e);
7411 /* Relax for non-integers, inverted condition handled
7412 above. */
7413 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7415 if (true_e)
7416 insert_related_predicates_on_edge (code, ops, true_e);
7417 if (false_e)
7418 insert_related_predicates_on_edge (icode, ops, false_e);
7421 break;
7423 case GIMPLE_GOTO:
7424 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7425 break;
7426 default:
7427 e = NULL;
7429 if (e)
7431 todo = TODO_cleanup_cfg;
7432 if (!(e->flags & EDGE_EXECUTABLE))
7434 if (dump_file && (dump_flags & TDF_DETAILS))
7435 fprintf (dump_file,
7436 "marking known outgoing %sedge %d -> %d executable\n",
7437 e->flags & EDGE_DFS_BACK ? "back-" : "",
7438 e->src->index, e->dest->index);
7439 e->flags |= EDGE_EXECUTABLE;
7440 e->dest->flags |= BB_EXECUTABLE;
7442 else if (!(e->dest->flags & BB_EXECUTABLE))
7444 if (dump_file && (dump_flags & TDF_DETAILS))
7445 fprintf (dump_file,
7446 "marking destination block %d reachable\n",
7447 e->dest->index);
7448 e->dest->flags |= BB_EXECUTABLE;
7451 else if (gsi_one_before_end_p (gsi))
7453 FOR_EACH_EDGE (e, ei, bb->succs)
7455 if (!(e->flags & EDGE_EXECUTABLE))
7457 if (dump_file && (dump_flags & TDF_DETAILS))
7458 fprintf (dump_file,
7459 "marking outgoing edge %d -> %d executable\n",
7460 e->src->index, e->dest->index);
7461 e->flags |= EDGE_EXECUTABLE;
7462 e->dest->flags |= BB_EXECUTABLE;
7464 else if (!(e->dest->flags & BB_EXECUTABLE))
7466 if (dump_file && (dump_flags & TDF_DETAILS))
7467 fprintf (dump_file,
7468 "marking destination block %d reachable\n",
7469 e->dest->index);
7470 e->dest->flags |= BB_EXECUTABLE;
7475 /* Eliminate. That also pushes to avail. */
7476 if (eliminate && ! iterate)
7477 avail.eliminate_stmt (bb, &gsi);
7478 else
7479 /* If not eliminating, make all not already available defs
7480 available. */
7481 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7482 if (! avail.eliminate_avail (bb, op))
7483 avail.eliminate_push_avail (bb, op);
7486 /* Eliminate in destination PHI arguments. Always substitute in dest
7487 PHIs, even for non-executable edges. This handles region
7488 exits PHIs. */
7489 if (!iterate && eliminate)
7490 FOR_EACH_EDGE (e, ei, bb->succs)
7491 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7492 !gsi_end_p (gsi); gsi_next (&gsi))
7494 gphi *phi = gsi.phi ();
7495 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7496 tree arg = USE_FROM_PTR (use_p);
7497 if (TREE_CODE (arg) != SSA_NAME
7498 || virtual_operand_p (arg))
7499 continue;
7500 tree sprime;
7501 if (SSA_NAME_IS_DEFAULT_DEF (arg))
7503 sprime = SSA_VAL (arg);
7504 gcc_assert (TREE_CODE (sprime) != SSA_NAME
7505 || SSA_NAME_IS_DEFAULT_DEF (sprime));
7507 else
7508 /* Look for sth available at the definition block of the argument.
7509 This avoids inconsistencies between availability there which
7510 decides if the stmt can be removed and availability at the
7511 use site. The SSA property ensures that things available
7512 at the definition are also available at uses. */
7513 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7514 arg);
7515 if (sprime
7516 && sprime != arg
7517 && may_propagate_copy (arg, sprime))
7518 propagate_value (use_p, sprime);
7521 vn_context_bb = NULL;
7522 return todo;
7525 /* Unwind state per basic-block. */
7527 struct unwind_state
7529 /* Times this block has been visited. */
7530 unsigned visited;
7531 /* Whether to handle this as iteration point or whether to treat
7532 incoming backedge PHI values as varying. */
7533 bool iterate;
7534 /* Maximum RPO index this block is reachable from. */
7535 int max_rpo;
7536 /* Unwind state. */
7537 void *ob_top;
7538 vn_reference_t ref_top;
7539 vn_phi_t phi_top;
7540 vn_nary_op_t nary_top;
7541 vn_avail *avail_top;
7544 /* Unwind the RPO VN state for iteration. */
7546 static void
7547 do_unwind (unwind_state *to, rpo_elim &avail)
7549 gcc_assert (to->iterate);
7550 for (; last_inserted_nary != to->nary_top;
7551 last_inserted_nary = last_inserted_nary->next)
7553 vn_nary_op_t *slot;
7554 slot = valid_info->nary->find_slot_with_hash
7555 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7556 /* Predication causes the need to restore previous state. */
7557 if ((*slot)->unwind_to)
7558 *slot = (*slot)->unwind_to;
7559 else
7560 valid_info->nary->clear_slot (slot);
7562 for (; last_inserted_phi != to->phi_top;
7563 last_inserted_phi = last_inserted_phi->next)
7565 vn_phi_t *slot;
7566 slot = valid_info->phis->find_slot_with_hash
7567 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7568 valid_info->phis->clear_slot (slot);
7570 for (; last_inserted_ref != to->ref_top;
7571 last_inserted_ref = last_inserted_ref->next)
7573 vn_reference_t *slot;
7574 slot = valid_info->references->find_slot_with_hash
7575 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7576 (*slot)->operands.release ();
7577 valid_info->references->clear_slot (slot);
7579 obstack_free (&vn_tables_obstack, to->ob_top);
7581 /* Prune [rpo_idx, ] from avail. */
7582 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7584 vn_ssa_aux_t val = last_pushed_avail;
7585 vn_avail *av = val->avail;
7586 val->avail = av->next;
7587 last_pushed_avail = av->next_undo;
7588 av->next = avail.m_avail_freelist;
7589 avail.m_avail_freelist = av;
7593 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7594 If ITERATE is true then treat backedges optimistically as not
7595 executed and iterate. If ELIMINATE is true then perform
7596 elimination, otherwise leave that to the caller. */
7598 static unsigned
7599 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7600 bool iterate, bool eliminate)
7602 unsigned todo = 0;
7604 /* We currently do not support region-based iteration when
7605 elimination is requested. */
7606 gcc_assert (!entry || !iterate || !eliminate);
7607 /* When iterating we need loop info up-to-date. */
7608 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7610 bool do_region = entry != NULL;
7611 if (!do_region)
7613 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7614 exit_bbs = BITMAP_ALLOC (NULL);
7615 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7618 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7619 re-mark those that are contained in the region. */
7620 edge_iterator ei;
7621 edge e;
7622 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7623 e->flags &= ~EDGE_DFS_BACK;
7625 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7626 auto_vec<std::pair<int, int> > toplevel_scc_extents;
7627 int n = rev_post_order_and_mark_dfs_back_seme
7628 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7630 if (!do_region)
7631 BITMAP_FREE (exit_bbs);
7633 /* If there are any non-DFS_BACK edges into entry->dest skip
7634 processing PHI nodes for that block. This supports
7635 value-numbering loop bodies w/o the actual loop. */
7636 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7637 if (e != entry
7638 && !(e->flags & EDGE_DFS_BACK))
7639 break;
7640 bool skip_entry_phis = e != NULL;
7641 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7642 fprintf (dump_file, "Region does not contain all edges into "
7643 "the entry block, skipping its PHIs.\n");
7645 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7646 for (int i = 0; i < n; ++i)
7647 bb_to_rpo[rpo[i]] = i;
7649 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7651 rpo_elim avail (entry->dest);
7652 rpo_avail = &avail;
7654 /* Verify we have no extra entries into the region. */
7655 if (flag_checking && do_region)
7657 auto_bb_flag bb_in_region (fn);
7658 for (int i = 0; i < n; ++i)
7660 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7661 bb->flags |= bb_in_region;
7663 /* We can't merge the first two loops because we cannot rely
7664 on EDGE_DFS_BACK for edges not within the region. But if
7665 we decide to always have the bb_in_region flag we can
7666 do the checking during the RPO walk itself (but then it's
7667 also easy to handle MEME conservatively). */
7668 for (int i = 0; i < n; ++i)
7670 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7671 edge e;
7672 edge_iterator ei;
7673 FOR_EACH_EDGE (e, ei, bb->preds)
7674 gcc_assert (e == entry
7675 || (skip_entry_phis && bb == entry->dest)
7676 || (e->src->flags & bb_in_region));
7678 for (int i = 0; i < n; ++i)
7680 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7681 bb->flags &= ~bb_in_region;
7685 /* Create the VN state. For the initial size of the various hashtables
7686 use a heuristic based on region size and number of SSA names. */
7687 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7688 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7689 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7690 next_value_id = 1;
7691 next_constant_value_id = -1;
7693 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7694 gcc_obstack_init (&vn_ssa_aux_obstack);
7696 gcc_obstack_init (&vn_tables_obstack);
7697 gcc_obstack_init (&vn_tables_insert_obstack);
7698 valid_info = XCNEW (struct vn_tables_s);
7699 allocate_vn_table (valid_info, region_size);
7700 last_inserted_ref = NULL;
7701 last_inserted_phi = NULL;
7702 last_inserted_nary = NULL;
7703 last_pushed_avail = NULL;
7705 vn_valueize = rpo_vn_valueize;
7707 /* Initialize the unwind state and edge/BB executable state. */
7708 unsigned curr_scc = 0;
7709 for (int i = 0; i < n; ++i)
7711 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7712 rpo_state[i].visited = 0;
7713 rpo_state[i].max_rpo = i;
7714 if (!iterate && curr_scc < toplevel_scc_extents.length ())
7716 if (i >= toplevel_scc_extents[curr_scc].first
7717 && i <= toplevel_scc_extents[curr_scc].second)
7718 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7719 if (i == toplevel_scc_extents[curr_scc].second)
7720 curr_scc++;
7722 bb->flags &= ~BB_EXECUTABLE;
7723 bool has_backedges = false;
7724 edge e;
7725 edge_iterator ei;
7726 FOR_EACH_EDGE (e, ei, bb->preds)
7728 if (e->flags & EDGE_DFS_BACK)
7729 has_backedges = true;
7730 e->flags &= ~EDGE_EXECUTABLE;
7731 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7732 continue;
7734 rpo_state[i].iterate = iterate && has_backedges;
7736 entry->flags |= EDGE_EXECUTABLE;
7737 entry->dest->flags |= BB_EXECUTABLE;
7739 /* As heuristic to improve compile-time we handle only the N innermost
7740 loops and the outermost one optimistically. */
7741 if (iterate)
7743 unsigned max_depth = param_rpo_vn_max_loop_depth;
7744 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
7745 if (loop_depth (loop) > max_depth)
7746 for (unsigned i = 2;
7747 i < loop_depth (loop) - max_depth; ++i)
7749 basic_block header = superloop_at_depth (loop, i)->header;
7750 bool non_latch_backedge = false;
7751 edge e;
7752 edge_iterator ei;
7753 FOR_EACH_EDGE (e, ei, header->preds)
7754 if (e->flags & EDGE_DFS_BACK)
7756 /* There can be a non-latch backedge into the header
7757 which is part of an outer irreducible region. We
7758 cannot avoid iterating this block then. */
7759 if (!dominated_by_p (CDI_DOMINATORS,
7760 e->src, e->dest))
7762 if (dump_file && (dump_flags & TDF_DETAILS))
7763 fprintf (dump_file, "non-latch backedge %d -> %d "
7764 "forces iteration of loop %d\n",
7765 e->src->index, e->dest->index, loop->num);
7766 non_latch_backedge = true;
7768 else
7769 e->flags |= EDGE_EXECUTABLE;
7771 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7775 uint64_t nblk = 0;
7776 int idx = 0;
7777 if (iterate)
7778 /* Go and process all blocks, iterating as necessary. */
7781 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7783 /* If the block has incoming backedges remember unwind state. This
7784 is required even for non-executable blocks since in irreducible
7785 regions we might reach them via the backedge and re-start iterating
7786 from there.
7787 Note we can individually mark blocks with incoming backedges to
7788 not iterate where we then handle PHIs conservatively. We do that
7789 heuristically to reduce compile-time for degenerate cases. */
7790 if (rpo_state[idx].iterate)
7792 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7793 rpo_state[idx].ref_top = last_inserted_ref;
7794 rpo_state[idx].phi_top = last_inserted_phi;
7795 rpo_state[idx].nary_top = last_inserted_nary;
7796 rpo_state[idx].avail_top
7797 = last_pushed_avail ? last_pushed_avail->avail : NULL;
7800 if (!(bb->flags & BB_EXECUTABLE))
7802 if (dump_file && (dump_flags & TDF_DETAILS))
7803 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7804 idx, bb->index);
7805 idx++;
7806 continue;
7809 if (dump_file && (dump_flags & TDF_DETAILS))
7810 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7811 nblk++;
7812 todo |= process_bb (avail, bb,
7813 rpo_state[idx].visited != 0,
7814 rpo_state[idx].iterate,
7815 iterate, eliminate, do_region, exit_bbs, false);
7816 rpo_state[idx].visited++;
7818 /* Verify if changed values flow over executable outgoing backedges
7819 and those change destination PHI values (that's the thing we
7820 can easily verify). Reduce over all such edges to the farthest
7821 away PHI. */
7822 int iterate_to = -1;
7823 edge_iterator ei;
7824 edge e;
7825 FOR_EACH_EDGE (e, ei, bb->succs)
7826 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7827 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7828 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7830 int destidx = bb_to_rpo[e->dest->index];
7831 if (!rpo_state[destidx].visited)
7833 if (dump_file && (dump_flags & TDF_DETAILS))
7834 fprintf (dump_file, "Unvisited destination %d\n",
7835 e->dest->index);
7836 if (iterate_to == -1 || destidx < iterate_to)
7837 iterate_to = destidx;
7838 continue;
7840 if (dump_file && (dump_flags & TDF_DETAILS))
7841 fprintf (dump_file, "Looking for changed values of backedge"
7842 " %d->%d destination PHIs\n",
7843 e->src->index, e->dest->index);
7844 vn_context_bb = e->dest;
7845 gphi_iterator gsi;
7846 for (gsi = gsi_start_phis (e->dest);
7847 !gsi_end_p (gsi); gsi_next (&gsi))
7849 bool inserted = false;
7850 /* While we'd ideally just iterate on value changes
7851 we CSE PHIs and do that even across basic-block
7852 boundaries. So even hashtable state changes can
7853 be important (which is roughly equivalent to
7854 PHI argument value changes). To not excessively
7855 iterate because of that we track whether a PHI
7856 was CSEd to with GF_PLF_1. */
7857 bool phival_changed;
7858 if ((phival_changed = visit_phi (gsi.phi (),
7859 &inserted, false))
7860 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7862 if (!phival_changed
7863 && dump_file && (dump_flags & TDF_DETAILS))
7864 fprintf (dump_file, "PHI was CSEd and hashtable "
7865 "state (changed)\n");
7866 if (iterate_to == -1 || destidx < iterate_to)
7867 iterate_to = destidx;
7868 break;
7871 vn_context_bb = NULL;
7873 if (iterate_to != -1)
7875 do_unwind (&rpo_state[iterate_to], avail);
7876 idx = iterate_to;
7877 if (dump_file && (dump_flags & TDF_DETAILS))
7878 fprintf (dump_file, "Iterating to %d BB%d\n",
7879 iterate_to, rpo[iterate_to]);
7880 continue;
7883 idx++;
7885 while (idx < n);
7887 else /* !iterate */
7889 /* Process all blocks greedily with a worklist that enforces RPO
7890 processing of reachable blocks. */
7891 auto_bitmap worklist;
7892 bitmap_set_bit (worklist, 0);
7893 while (!bitmap_empty_p (worklist))
7895 int idx = bitmap_first_set_bit (worklist);
7896 bitmap_clear_bit (worklist, idx);
7897 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7898 gcc_assert ((bb->flags & BB_EXECUTABLE)
7899 && !rpo_state[idx].visited);
7901 if (dump_file && (dump_flags & TDF_DETAILS))
7902 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7904 /* When we run into predecessor edges where we cannot trust its
7905 executable state mark them executable so PHI processing will
7906 be conservative.
7907 ??? Do we need to force arguments flowing over that edge
7908 to be varying or will they even always be? */
7909 edge_iterator ei;
7910 edge e;
7911 FOR_EACH_EDGE (e, ei, bb->preds)
7912 if (!(e->flags & EDGE_EXECUTABLE)
7913 && (bb == entry->dest
7914 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7915 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7916 >= (int)idx))))
7918 if (dump_file && (dump_flags & TDF_DETAILS))
7919 fprintf (dump_file, "Cannot trust state of predecessor "
7920 "edge %d -> %d, marking executable\n",
7921 e->src->index, e->dest->index);
7922 e->flags |= EDGE_EXECUTABLE;
7925 nblk++;
7926 todo |= process_bb (avail, bb, false, false, false, eliminate,
7927 do_region, exit_bbs,
7928 skip_entry_phis && bb == entry->dest);
7929 rpo_state[idx].visited++;
7931 FOR_EACH_EDGE (e, ei, bb->succs)
7932 if ((e->flags & EDGE_EXECUTABLE)
7933 && e->dest->index != EXIT_BLOCK
7934 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7935 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7936 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7940 /* If statistics or dump file active. */
7941 int nex = 0;
7942 unsigned max_visited = 1;
7943 for (int i = 0; i < n; ++i)
7945 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7946 if (bb->flags & BB_EXECUTABLE)
7947 nex++;
7948 statistics_histogram_event (cfun, "RPO block visited times",
7949 rpo_state[i].visited);
7950 if (rpo_state[i].visited > max_visited)
7951 max_visited = rpo_state[i].visited;
7953 unsigned nvalues = 0, navail = 0;
7954 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7955 i != vn_ssa_aux_hash->end (); ++i)
7957 nvalues++;
7958 vn_avail *av = (*i)->avail;
7959 while (av)
7961 navail++;
7962 av = av->next;
7965 statistics_counter_event (cfun, "RPO blocks", n);
7966 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7967 statistics_counter_event (cfun, "RPO blocks executable", nex);
7968 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7969 statistics_histogram_event (cfun, "RPO num values", nvalues);
7970 statistics_histogram_event (cfun, "RPO num avail", navail);
7971 statistics_histogram_event (cfun, "RPO num lattice",
7972 vn_ssa_aux_hash->elements ());
7973 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7975 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7976 " blocks in total discovering %d executable blocks iterating "
7977 "%d.%d times, a block was visited max. %u times\n",
7978 n, nblk, nex,
7979 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7980 max_visited);
7981 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7982 "and %" PRIu64 " lattice elements\n",
7983 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
7986 if (eliminate)
7988 /* When !iterate we already performed elimination during the RPO
7989 walk. */
7990 if (iterate)
7992 /* Elimination for region-based VN needs to be done within the
7993 RPO walk. */
7994 gcc_assert (! do_region);
7995 /* Note we can't use avail.walk here because that gets confused
7996 by the existing availability and it will be less efficient
7997 as well. */
7998 todo |= eliminate_with_rpo_vn (NULL);
8000 else
8001 todo |= avail.eliminate_cleanup (do_region);
8004 vn_valueize = NULL;
8005 rpo_avail = NULL;
8007 XDELETEVEC (bb_to_rpo);
8008 XDELETEVEC (rpo);
8009 XDELETEVEC (rpo_state);
8011 return todo;
8014 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8015 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8016 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8017 are not considered. */
8019 unsigned
8020 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
8022 default_vn_walk_kind = VN_WALKREWRITE;
8023 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
8024 free_rpo_vn ();
8025 return todo;
8029 namespace {
8031 const pass_data pass_data_fre =
8033 GIMPLE_PASS, /* type */
8034 "fre", /* name */
8035 OPTGROUP_NONE, /* optinfo_flags */
8036 TV_TREE_FRE, /* tv_id */
8037 ( PROP_cfg | PROP_ssa ), /* properties_required */
8038 0, /* properties_provided */
8039 0, /* properties_destroyed */
8040 0, /* todo_flags_start */
8041 0, /* todo_flags_finish */
8044 class pass_fre : public gimple_opt_pass
8046 public:
8047 pass_fre (gcc::context *ctxt)
8048 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
8051 /* opt_pass methods: */
8052 opt_pass * clone () { return new pass_fre (m_ctxt); }
8053 void set_pass_param (unsigned int n, bool param)
8055 gcc_assert (n == 0);
8056 may_iterate = param;
8058 virtual bool gate (function *)
8060 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8062 virtual unsigned int execute (function *);
8064 private:
8065 bool may_iterate;
8066 }; // class pass_fre
8068 unsigned int
8069 pass_fre::execute (function *fun)
8071 unsigned todo = 0;
8073 /* At -O[1g] use the cheap non-iterating mode. */
8074 bool iterate_p = may_iterate && (optimize > 1);
8075 calculate_dominance_info (CDI_DOMINATORS);
8076 if (iterate_p)
8077 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8079 default_vn_walk_kind = VN_WALKREWRITE;
8080 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
8081 free_rpo_vn ();
8083 if (iterate_p)
8084 loop_optimizer_finalize ();
8086 if (scev_initialized_p ())
8087 scev_reset_htab ();
8089 /* For late FRE after IVOPTs and unrolling, see if we can
8090 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8091 if (!may_iterate)
8092 todo |= TODO_update_address_taken;
8094 return todo;
8097 } // anon namespace
8099 gimple_opt_pass *
8100 make_pass_fre (gcc::context *ctxt)
8102 return new pass_fre (ctxt);
8105 #undef BB_EXECUTABLE