Fix warnings building linux-atomic.c and fptr.c on hppa64-linux
[official-gcc.git] / gcc / tree-ssa-sccvn.c
blobd524259768410d438d1882073bf457ed5b714bee
1 /* SCC value numbering for trees
2 Copyright (C) 2006-2021 Free Software Foundation, Inc.
3 Contributed by Daniel Berlin <dan@dberlin.org>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
10 any later version.
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "splay-tree.h"
25 #include "backend.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "gimple.h"
29 #include "ssa.h"
30 #include "expmed.h"
31 #include "insn-config.h"
32 #include "memmodel.h"
33 #include "emit-rtl.h"
34 #include "cgraph.h"
35 #include "gimple-pretty-print.h"
36 #include "alias.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
39 #include "cfganal.h"
40 #include "tree-inline.h"
41 #include "internal-fn.h"
42 #include "gimple-fold.h"
43 #include "tree-eh.h"
44 #include "gimplify.h"
45 #include "flags.h"
46 #include "dojump.h"
47 #include "explow.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "tree-dfa.h"
53 #include "tree-ssa.h"
54 #include "dumpfile.h"
55 #include "cfgloop.h"
56 #include "tree-ssa-propagate.h"
57 #include "tree-cfg.h"
58 #include "domwalk.h"
59 #include "gimple-iterator.h"
60 #include "gimple-match.h"
61 #include "stringpool.h"
62 #include "attribs.h"
63 #include "tree-pass.h"
64 #include "statistics.h"
65 #include "langhooks.h"
66 #include "ipa-utils.h"
67 #include "dbgcnt.h"
68 #include "tree-cfgcleanup.h"
69 #include "tree-ssa-loop.h"
70 #include "tree-scalar-evolution.h"
71 #include "tree-ssa-loop-niter.h"
72 #include "builtins.h"
73 #include "fold-const-call.h"
74 #include "tree-ssa-sccvn.h"
76 /* This algorithm is based on the SCC algorithm presented by Keith
77 Cooper and L. Taylor Simpson in "SCC-Based Value numbering"
78 (http://citeseer.ist.psu.edu/41805.html). In
79 straight line code, it is equivalent to a regular hash based value
80 numbering that is performed in reverse postorder.
82 For code with cycles, there are two alternatives, both of which
83 require keeping the hashtables separate from the actual list of
84 value numbers for SSA names.
86 1. Iterate value numbering in an RPO walk of the blocks, removing
87 all the entries from the hashtable after each iteration (but
88 keeping the SSA name->value number mapping between iterations).
89 Iterate until it does not change.
91 2. Perform value numbering as part of an SCC walk on the SSA graph,
92 iterating only the cycles in the SSA graph until they do not change
93 (using a separate, optimistic hashtable for value numbering the SCC
94 operands).
96 The second is not just faster in practice (because most SSA graph
97 cycles do not involve all the variables in the graph), it also has
98 some nice properties.
100 One of these nice properties is that when we pop an SCC off the
101 stack, we are guaranteed to have processed all the operands coming from
102 *outside of that SCC*, so we do not need to do anything special to
103 ensure they have value numbers.
105 Another nice property is that the SCC walk is done as part of a DFS
106 of the SSA graph, which makes it easy to perform combining and
107 simplifying operations at the same time.
109 The code below is deliberately written in a way that makes it easy
110 to separate the SCC walk from the other work it does.
112 In order to propagate constants through the code, we track which
113 expressions contain constants, and use those while folding. In
114 theory, we could also track expressions whose value numbers are
115 replaced, in case we end up folding based on expression
116 identities.
118 In order to value number memory, we assign value numbers to vuses.
119 This enables us to note that, for example, stores to the same
120 address of the same value from the same starting memory states are
121 equivalent.
122 TODO:
124 1. We can iterate only the changing portions of the SCC's, but
125 I have not seen an SCC big enough for this to be a win.
126 2. If you differentiate between phi nodes for loops and phi nodes
127 for if-then-else, you can properly consider phi nodes in different
128 blocks for equivalence.
129 3. We could value number vuses in more cases, particularly, whole
130 structure copies.
133 /* There's no BB_EXECUTABLE but we can use BB_VISITED. */
134 #define BB_EXECUTABLE BB_VISITED
136 static vn_lookup_kind default_vn_walk_kind;
138 /* vn_nary_op hashtable helpers. */
140 struct vn_nary_op_hasher : nofree_ptr_hash <vn_nary_op_s>
142 typedef vn_nary_op_s *compare_type;
143 static inline hashval_t hash (const vn_nary_op_s *);
144 static inline bool equal (const vn_nary_op_s *, const vn_nary_op_s *);
147 /* Return the computed hashcode for nary operation P1. */
149 inline hashval_t
150 vn_nary_op_hasher::hash (const vn_nary_op_s *vno1)
152 return vno1->hashcode;
155 /* Compare nary operations P1 and P2 and return true if they are
156 equivalent. */
158 inline bool
159 vn_nary_op_hasher::equal (const vn_nary_op_s *vno1, const vn_nary_op_s *vno2)
161 return vno1 == vno2 || vn_nary_op_eq (vno1, vno2);
164 typedef hash_table<vn_nary_op_hasher> vn_nary_op_table_type;
165 typedef vn_nary_op_table_type::iterator vn_nary_op_iterator_type;
168 /* vn_phi hashtable helpers. */
170 static int
171 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2);
173 struct vn_phi_hasher : nofree_ptr_hash <vn_phi_s>
175 static inline hashval_t hash (const vn_phi_s *);
176 static inline bool equal (const vn_phi_s *, const vn_phi_s *);
179 /* Return the computed hashcode for phi operation P1. */
181 inline hashval_t
182 vn_phi_hasher::hash (const vn_phi_s *vp1)
184 return vp1->hashcode;
187 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
189 inline bool
190 vn_phi_hasher::equal (const vn_phi_s *vp1, const vn_phi_s *vp2)
192 return vp1 == vp2 || vn_phi_eq (vp1, vp2);
195 typedef hash_table<vn_phi_hasher> vn_phi_table_type;
196 typedef vn_phi_table_type::iterator vn_phi_iterator_type;
199 /* Compare two reference operands P1 and P2 for equality. Return true if
200 they are equal, and false otherwise. */
202 static int
203 vn_reference_op_eq (const void *p1, const void *p2)
205 const_vn_reference_op_t const vro1 = (const_vn_reference_op_t) p1;
206 const_vn_reference_op_t const vro2 = (const_vn_reference_op_t) p2;
208 return (vro1->opcode == vro2->opcode
209 /* We do not care for differences in type qualification. */
210 && (vro1->type == vro2->type
211 || (vro1->type && vro2->type
212 && types_compatible_p (TYPE_MAIN_VARIANT (vro1->type),
213 TYPE_MAIN_VARIANT (vro2->type))))
214 && expressions_equal_p (vro1->op0, vro2->op0)
215 && expressions_equal_p (vro1->op1, vro2->op1)
216 && expressions_equal_p (vro1->op2, vro2->op2)
217 && (vro1->opcode != CALL_EXPR || vro1->clique == vro2->clique));
220 /* Free a reference operation structure VP. */
222 static inline void
223 free_reference (vn_reference_s *vr)
225 vr->operands.release ();
229 /* vn_reference hashtable helpers. */
231 struct vn_reference_hasher : nofree_ptr_hash <vn_reference_s>
233 static inline hashval_t hash (const vn_reference_s *);
234 static inline bool equal (const vn_reference_s *, const vn_reference_s *);
237 /* Return the hashcode for a given reference operation P1. */
239 inline hashval_t
240 vn_reference_hasher::hash (const vn_reference_s *vr1)
242 return vr1->hashcode;
245 inline bool
246 vn_reference_hasher::equal (const vn_reference_s *v, const vn_reference_s *c)
248 return v == c || vn_reference_eq (v, c);
251 typedef hash_table<vn_reference_hasher> vn_reference_table_type;
252 typedef vn_reference_table_type::iterator vn_reference_iterator_type;
254 /* Pretty-print OPS to OUTFILE. */
256 void
257 print_vn_reference_ops (FILE *outfile, const vec<vn_reference_op_s> ops)
259 vn_reference_op_t vro;
260 unsigned int i;
261 fprintf (outfile, "{");
262 for (i = 0; ops.iterate (i, &vro); i++)
264 bool closebrace = false;
265 if (vro->opcode != SSA_NAME
266 && TREE_CODE_CLASS (vro->opcode) != tcc_declaration)
268 fprintf (outfile, "%s", get_tree_code_name (vro->opcode));
269 if (vro->op0 || vro->opcode == CALL_EXPR)
271 fprintf (outfile, "<");
272 closebrace = true;
275 if (vro->op0 || vro->opcode == CALL_EXPR)
277 if (!vro->op0)
278 fprintf (outfile, internal_fn_name ((internal_fn)vro->clique));
279 else
280 print_generic_expr (outfile, vro->op0);
281 if (vro->op1)
283 fprintf (outfile, ",");
284 print_generic_expr (outfile, vro->op1);
286 if (vro->op2)
288 fprintf (outfile, ",");
289 print_generic_expr (outfile, vro->op2);
292 if (closebrace)
293 fprintf (outfile, ">");
294 if (i != ops.length () - 1)
295 fprintf (outfile, ",");
297 fprintf (outfile, "}");
300 DEBUG_FUNCTION void
301 debug_vn_reference_ops (const vec<vn_reference_op_s> ops)
303 print_vn_reference_ops (stderr, ops);
304 fputc ('\n', stderr);
307 /* The set of VN hashtables. */
309 typedef struct vn_tables_s
311 vn_nary_op_table_type *nary;
312 vn_phi_table_type *phis;
313 vn_reference_table_type *references;
314 } *vn_tables_t;
317 /* vn_constant hashtable helpers. */
319 struct vn_constant_hasher : free_ptr_hash <vn_constant_s>
321 static inline hashval_t hash (const vn_constant_s *);
322 static inline bool equal (const vn_constant_s *, const vn_constant_s *);
325 /* Hash table hash function for vn_constant_t. */
327 inline hashval_t
328 vn_constant_hasher::hash (const vn_constant_s *vc1)
330 return vc1->hashcode;
333 /* Hash table equality function for vn_constant_t. */
335 inline bool
336 vn_constant_hasher::equal (const vn_constant_s *vc1, const vn_constant_s *vc2)
338 if (vc1->hashcode != vc2->hashcode)
339 return false;
341 return vn_constant_eq_with_type (vc1->constant, vc2->constant);
344 static hash_table<vn_constant_hasher> *constant_to_value_id;
347 /* Obstack we allocate the vn-tables elements from. */
348 static obstack vn_tables_obstack;
349 /* Special obstack we never unwind. */
350 static obstack vn_tables_insert_obstack;
352 static vn_reference_t last_inserted_ref;
353 static vn_phi_t last_inserted_phi;
354 static vn_nary_op_t last_inserted_nary;
355 static vn_ssa_aux_t last_pushed_avail;
357 /* Valid hashtables storing information we have proven to be
358 correct. */
359 static vn_tables_t valid_info;
362 /* Valueization hook for simplify_replace_tree. Valueize NAME if it is
363 an SSA name, otherwise just return it. */
364 tree (*vn_valueize) (tree);
365 static tree
366 vn_valueize_for_srt (tree t, void* context ATTRIBUTE_UNUSED)
368 basic_block saved_vn_context_bb = vn_context_bb;
369 /* Look for sth available at the definition block of the argument.
370 This avoids inconsistencies between availability there which
371 decides if the stmt can be removed and availability at the
372 use site. The SSA property ensures that things available
373 at the definition are also available at uses. */
374 if (!SSA_NAME_IS_DEFAULT_DEF (t))
375 vn_context_bb = gimple_bb (SSA_NAME_DEF_STMT (t));
376 tree res = vn_valueize (t);
377 vn_context_bb = saved_vn_context_bb;
378 return res;
382 /* This represents the top of the VN lattice, which is the universal
383 value. */
385 tree VN_TOP;
387 /* Unique counter for our value ids. */
389 static unsigned int next_value_id;
390 static int next_constant_value_id;
393 /* Table of vn_ssa_aux_t's, one per ssa_name. The vn_ssa_aux_t objects
394 are allocated on an obstack for locality reasons, and to free them
395 without looping over the vec. */
397 struct vn_ssa_aux_hasher : typed_noop_remove <vn_ssa_aux_t>
399 typedef vn_ssa_aux_t value_type;
400 typedef tree compare_type;
401 static inline hashval_t hash (const value_type &);
402 static inline bool equal (const value_type &, const compare_type &);
403 static inline void mark_deleted (value_type &) {}
404 static const bool empty_zero_p = true;
405 static inline void mark_empty (value_type &e) { e = NULL; }
406 static inline bool is_deleted (value_type &) { return false; }
407 static inline bool is_empty (value_type &e) { return e == NULL; }
410 hashval_t
411 vn_ssa_aux_hasher::hash (const value_type &entry)
413 return SSA_NAME_VERSION (entry->name);
416 bool
417 vn_ssa_aux_hasher::equal (const value_type &entry, const compare_type &name)
419 return name == entry->name;
422 static hash_table<vn_ssa_aux_hasher> *vn_ssa_aux_hash;
423 typedef hash_table<vn_ssa_aux_hasher>::iterator vn_ssa_aux_iterator_type;
424 static struct obstack vn_ssa_aux_obstack;
426 static vn_nary_op_t vn_nary_op_insert_stmt (gimple *, tree);
427 static unsigned int vn_nary_length_from_stmt (gimple *);
428 static vn_nary_op_t alloc_vn_nary_op_noinit (unsigned int, obstack *);
429 static vn_nary_op_t vn_nary_op_insert_into (vn_nary_op_t,
430 vn_nary_op_table_type *, bool);
431 static void init_vn_nary_op_from_stmt (vn_nary_op_t, gassign *);
432 static void init_vn_nary_op_from_pieces (vn_nary_op_t, unsigned int,
433 enum tree_code, tree, tree *);
434 static tree vn_lookup_simplify_result (gimple_match_op *);
435 static vn_reference_t vn_reference_lookup_or_insert_for_pieces
436 (tree, alias_set_type, alias_set_type, tree,
437 vec<vn_reference_op_s, va_heap>, tree);
439 /* Return whether there is value numbering information for a given SSA name. */
441 bool
442 has_VN_INFO (tree name)
444 return vn_ssa_aux_hash->find_with_hash (name, SSA_NAME_VERSION (name));
447 vn_ssa_aux_t
448 VN_INFO (tree name)
450 vn_ssa_aux_t *res
451 = vn_ssa_aux_hash->find_slot_with_hash (name, SSA_NAME_VERSION (name),
452 INSERT);
453 if (*res != NULL)
454 return *res;
456 vn_ssa_aux_t newinfo = *res = XOBNEW (&vn_ssa_aux_obstack, struct vn_ssa_aux);
457 memset (newinfo, 0, sizeof (struct vn_ssa_aux));
458 newinfo->name = name;
459 newinfo->valnum = VN_TOP;
460 /* We are using the visited flag to handle uses with defs not within the
461 region being value-numbered. */
462 newinfo->visited = false;
464 /* Given we create the VN_INFOs on-demand now we have to do initialization
465 different than VN_TOP here. */
466 if (SSA_NAME_IS_DEFAULT_DEF (name))
467 switch (TREE_CODE (SSA_NAME_VAR (name)))
469 case VAR_DECL:
470 /* All undefined vars are VARYING. */
471 newinfo->valnum = name;
472 newinfo->visited = true;
473 break;
475 case PARM_DECL:
476 /* Parameters are VARYING but we can record a condition
477 if we know it is a non-NULL pointer. */
478 newinfo->visited = true;
479 newinfo->valnum = name;
480 if (POINTER_TYPE_P (TREE_TYPE (name))
481 && nonnull_arg_p (SSA_NAME_VAR (name)))
483 tree ops[2];
484 ops[0] = name;
485 ops[1] = build_int_cst (TREE_TYPE (name), 0);
486 vn_nary_op_t nary;
487 /* Allocate from non-unwinding stack. */
488 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
489 init_vn_nary_op_from_pieces (nary, 2, NE_EXPR,
490 boolean_type_node, ops);
491 nary->predicated_values = 0;
492 nary->u.result = boolean_true_node;
493 vn_nary_op_insert_into (nary, valid_info->nary, true);
494 gcc_assert (nary->unwind_to == NULL);
495 /* Also do not link it into the undo chain. */
496 last_inserted_nary = nary->next;
497 nary->next = (vn_nary_op_t)(void *)-1;
498 nary = alloc_vn_nary_op_noinit (2, &vn_tables_insert_obstack);
499 init_vn_nary_op_from_pieces (nary, 2, EQ_EXPR,
500 boolean_type_node, ops);
501 nary->predicated_values = 0;
502 nary->u.result = boolean_false_node;
503 vn_nary_op_insert_into (nary, valid_info->nary, true);
504 gcc_assert (nary->unwind_to == NULL);
505 last_inserted_nary = nary->next;
506 nary->next = (vn_nary_op_t)(void *)-1;
507 if (dump_file && (dump_flags & TDF_DETAILS))
509 fprintf (dump_file, "Recording ");
510 print_generic_expr (dump_file, name, TDF_SLIM);
511 fprintf (dump_file, " != 0\n");
514 break;
516 case RESULT_DECL:
517 /* If the result is passed by invisible reference the default
518 def is initialized, otherwise it's uninitialized. Still
519 undefined is varying. */
520 newinfo->visited = true;
521 newinfo->valnum = name;
522 break;
524 default:
525 gcc_unreachable ();
527 return newinfo;
530 /* Return the SSA value of X. */
532 inline tree
533 SSA_VAL (tree x, bool *visited = NULL)
535 vn_ssa_aux_t tem = vn_ssa_aux_hash->find_with_hash (x, SSA_NAME_VERSION (x));
536 if (visited)
537 *visited = tem && tem->visited;
538 return tem && tem->visited ? tem->valnum : x;
541 /* Return the SSA value of the VUSE x, supporting released VDEFs
542 during elimination which will value-number the VDEF to the
543 associated VUSE (but not substitute in the whole lattice). */
545 static inline tree
546 vuse_ssa_val (tree x)
548 if (!x)
549 return NULL_TREE;
553 x = SSA_VAL (x);
554 gcc_assert (x != VN_TOP);
556 while (SSA_NAME_IN_FREE_LIST (x));
558 return x;
561 /* Similar to the above but used as callback for walk_non_aliased_vuses
562 and thus should stop at unvisited VUSE to not walk across region
563 boundaries. */
565 static tree
566 vuse_valueize (tree vuse)
570 bool visited;
571 vuse = SSA_VAL (vuse, &visited);
572 if (!visited)
573 return NULL_TREE;
574 gcc_assert (vuse != VN_TOP);
576 while (SSA_NAME_IN_FREE_LIST (vuse));
577 return vuse;
581 /* Return the vn_kind the expression computed by the stmt should be
582 associated with. */
584 enum vn_kind
585 vn_get_stmt_kind (gimple *stmt)
587 switch (gimple_code (stmt))
589 case GIMPLE_CALL:
590 return VN_REFERENCE;
591 case GIMPLE_PHI:
592 return VN_PHI;
593 case GIMPLE_ASSIGN:
595 enum tree_code code = gimple_assign_rhs_code (stmt);
596 tree rhs1 = gimple_assign_rhs1 (stmt);
597 switch (get_gimple_rhs_class (code))
599 case GIMPLE_UNARY_RHS:
600 case GIMPLE_BINARY_RHS:
601 case GIMPLE_TERNARY_RHS:
602 return VN_NARY;
603 case GIMPLE_SINGLE_RHS:
604 switch (TREE_CODE_CLASS (code))
606 case tcc_reference:
607 /* VOP-less references can go through unary case. */
608 if ((code == REALPART_EXPR
609 || code == IMAGPART_EXPR
610 || code == VIEW_CONVERT_EXPR
611 || code == BIT_FIELD_REF)
612 && (TREE_CODE (TREE_OPERAND (rhs1, 0)) == SSA_NAME
613 || is_gimple_min_invariant (TREE_OPERAND (rhs1, 0))))
614 return VN_NARY;
616 /* Fallthrough. */
617 case tcc_declaration:
618 return VN_REFERENCE;
620 case tcc_constant:
621 return VN_CONSTANT;
623 default:
624 if (code == ADDR_EXPR)
625 return (is_gimple_min_invariant (rhs1)
626 ? VN_CONSTANT : VN_REFERENCE);
627 else if (code == CONSTRUCTOR)
628 return VN_NARY;
629 return VN_NONE;
631 default:
632 return VN_NONE;
635 default:
636 return VN_NONE;
640 /* Lookup a value id for CONSTANT and return it. If it does not
641 exist returns 0. */
643 unsigned int
644 get_constant_value_id (tree constant)
646 vn_constant_s **slot;
647 struct vn_constant_s vc;
649 vc.hashcode = vn_hash_constant_with_type (constant);
650 vc.constant = constant;
651 slot = constant_to_value_id->find_slot (&vc, NO_INSERT);
652 if (slot)
653 return (*slot)->value_id;
654 return 0;
657 /* Lookup a value id for CONSTANT, and if it does not exist, create a
658 new one and return it. If it does exist, return it. */
660 unsigned int
661 get_or_alloc_constant_value_id (tree constant)
663 vn_constant_s **slot;
664 struct vn_constant_s vc;
665 vn_constant_t vcp;
667 /* If the hashtable isn't initialized we're not running from PRE and thus
668 do not need value-ids. */
669 if (!constant_to_value_id)
670 return 0;
672 vc.hashcode = vn_hash_constant_with_type (constant);
673 vc.constant = constant;
674 slot = constant_to_value_id->find_slot (&vc, INSERT);
675 if (*slot)
676 return (*slot)->value_id;
678 vcp = XNEW (struct vn_constant_s);
679 vcp->hashcode = vc.hashcode;
680 vcp->constant = constant;
681 vcp->value_id = get_next_constant_value_id ();
682 *slot = vcp;
683 return vcp->value_id;
686 /* Compute the hash for a reference operand VRO1. */
688 static void
689 vn_reference_op_compute_hash (const vn_reference_op_t vro1, inchash::hash &hstate)
691 hstate.add_int (vro1->opcode);
692 if (vro1->opcode == CALL_EXPR && !vro1->op0)
693 hstate.add_int (vro1->clique);
694 if (vro1->op0)
695 inchash::add_expr (vro1->op0, hstate);
696 if (vro1->op1)
697 inchash::add_expr (vro1->op1, hstate);
698 if (vro1->op2)
699 inchash::add_expr (vro1->op2, hstate);
702 /* Compute a hash for the reference operation VR1 and return it. */
704 static hashval_t
705 vn_reference_compute_hash (const vn_reference_t vr1)
707 inchash::hash hstate;
708 hashval_t result;
709 int i;
710 vn_reference_op_t vro;
711 poly_int64 off = -1;
712 bool deref = false;
714 FOR_EACH_VEC_ELT (vr1->operands, i, vro)
716 if (vro->opcode == MEM_REF)
717 deref = true;
718 else if (vro->opcode != ADDR_EXPR)
719 deref = false;
720 if (maybe_ne (vro->off, -1))
722 if (known_eq (off, -1))
723 off = 0;
724 off += vro->off;
726 else
728 if (maybe_ne (off, -1)
729 && maybe_ne (off, 0))
730 hstate.add_poly_int (off);
731 off = -1;
732 if (deref
733 && vro->opcode == ADDR_EXPR)
735 if (vro->op0)
737 tree op = TREE_OPERAND (vro->op0, 0);
738 hstate.add_int (TREE_CODE (op));
739 inchash::add_expr (op, hstate);
742 else
743 vn_reference_op_compute_hash (vro, hstate);
746 result = hstate.end ();
747 /* ??? We would ICE later if we hash instead of adding that in. */
748 if (vr1->vuse)
749 result += SSA_NAME_VERSION (vr1->vuse);
751 return result;
754 /* Return true if reference operations VR1 and VR2 are equivalent. This
755 means they have the same set of operands and vuses. */
757 bool
758 vn_reference_eq (const_vn_reference_t const vr1, const_vn_reference_t const vr2)
760 unsigned i, j;
762 /* Early out if this is not a hash collision. */
763 if (vr1->hashcode != vr2->hashcode)
764 return false;
766 /* The VOP needs to be the same. */
767 if (vr1->vuse != vr2->vuse)
768 return false;
770 /* If the operands are the same we are done. */
771 if (vr1->operands == vr2->operands)
772 return true;
774 if (!vr1->type || !vr2->type)
776 if (vr1->type != vr2->type)
777 return false;
779 else if (vr1->type == vr2->type)
781 else if (COMPLETE_TYPE_P (vr1->type) != COMPLETE_TYPE_P (vr2->type)
782 || (COMPLETE_TYPE_P (vr1->type)
783 && !expressions_equal_p (TYPE_SIZE (vr1->type),
784 TYPE_SIZE (vr2->type))))
785 return false;
786 else if (vr1->operands[0].opcode == CALL_EXPR
787 && !types_compatible_p (vr1->type, vr2->type))
788 return false;
789 else if (INTEGRAL_TYPE_P (vr1->type)
790 && INTEGRAL_TYPE_P (vr2->type))
792 if (TYPE_PRECISION (vr1->type) != TYPE_PRECISION (vr2->type))
793 return false;
795 else if (INTEGRAL_TYPE_P (vr1->type)
796 && (TYPE_PRECISION (vr1->type)
797 != TREE_INT_CST_LOW (TYPE_SIZE (vr1->type))))
798 return false;
799 else if (INTEGRAL_TYPE_P (vr2->type)
800 && (TYPE_PRECISION (vr2->type)
801 != TREE_INT_CST_LOW (TYPE_SIZE (vr2->type))))
802 return false;
804 i = 0;
805 j = 0;
808 poly_int64 off1 = 0, off2 = 0;
809 vn_reference_op_t vro1, vro2;
810 vn_reference_op_s tem1, tem2;
811 bool deref1 = false, deref2 = false;
812 bool reverse1 = false, reverse2 = false;
813 for (; vr1->operands.iterate (i, &vro1); i++)
815 if (vro1->opcode == MEM_REF)
816 deref1 = true;
817 /* Do not look through a storage order barrier. */
818 else if (vro1->opcode == VIEW_CONVERT_EXPR && vro1->reverse)
819 return false;
820 reverse1 |= vro1->reverse;
821 if (known_eq (vro1->off, -1))
822 break;
823 off1 += vro1->off;
825 for (; vr2->operands.iterate (j, &vro2); j++)
827 if (vro2->opcode == MEM_REF)
828 deref2 = true;
829 /* Do not look through a storage order barrier. */
830 else if (vro2->opcode == VIEW_CONVERT_EXPR && vro2->reverse)
831 return false;
832 reverse2 |= vro2->reverse;
833 if (known_eq (vro2->off, -1))
834 break;
835 off2 += vro2->off;
837 if (maybe_ne (off1, off2) || reverse1 != reverse2)
838 return false;
839 if (deref1 && vro1->opcode == ADDR_EXPR)
841 memset (&tem1, 0, sizeof (tem1));
842 tem1.op0 = TREE_OPERAND (vro1->op0, 0);
843 tem1.type = TREE_TYPE (tem1.op0);
844 tem1.opcode = TREE_CODE (tem1.op0);
845 vro1 = &tem1;
846 deref1 = false;
848 if (deref2 && vro2->opcode == ADDR_EXPR)
850 memset (&tem2, 0, sizeof (tem2));
851 tem2.op0 = TREE_OPERAND (vro2->op0, 0);
852 tem2.type = TREE_TYPE (tem2.op0);
853 tem2.opcode = TREE_CODE (tem2.op0);
854 vro2 = &tem2;
855 deref2 = false;
857 if (deref1 != deref2)
858 return false;
859 if (!vn_reference_op_eq (vro1, vro2))
860 return false;
861 ++j;
862 ++i;
864 while (vr1->operands.length () != i
865 || vr2->operands.length () != j);
867 return true;
870 /* Copy the operations present in load/store REF into RESULT, a vector of
871 vn_reference_op_s's. */
873 static void
874 copy_reference_ops_from_ref (tree ref, vec<vn_reference_op_s> *result)
876 /* For non-calls, store the information that makes up the address. */
877 tree orig = ref;
878 while (ref)
880 vn_reference_op_s temp;
882 memset (&temp, 0, sizeof (temp));
883 temp.type = TREE_TYPE (ref);
884 temp.opcode = TREE_CODE (ref);
885 temp.off = -1;
887 switch (temp.opcode)
889 case MODIFY_EXPR:
890 temp.op0 = TREE_OPERAND (ref, 1);
891 break;
892 case WITH_SIZE_EXPR:
893 temp.op0 = TREE_OPERAND (ref, 1);
894 temp.off = 0;
895 break;
896 case MEM_REF:
897 /* The base address gets its own vn_reference_op_s structure. */
898 temp.op0 = TREE_OPERAND (ref, 1);
899 if (!mem_ref_offset (ref).to_shwi (&temp.off))
900 temp.off = -1;
901 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
902 temp.base = MR_DEPENDENCE_BASE (ref);
903 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
904 break;
905 case TARGET_MEM_REF:
906 /* The base address gets its own vn_reference_op_s structure. */
907 temp.op0 = TMR_INDEX (ref);
908 temp.op1 = TMR_STEP (ref);
909 temp.op2 = TMR_OFFSET (ref);
910 temp.clique = MR_DEPENDENCE_CLIQUE (ref);
911 temp.base = MR_DEPENDENCE_BASE (ref);
912 result->safe_push (temp);
913 memset (&temp, 0, sizeof (temp));
914 temp.type = NULL_TREE;
915 temp.opcode = ERROR_MARK;
916 temp.op0 = TMR_INDEX2 (ref);
917 temp.off = -1;
918 break;
919 case BIT_FIELD_REF:
920 /* Record bits, position and storage order. */
921 temp.op0 = TREE_OPERAND (ref, 1);
922 temp.op1 = TREE_OPERAND (ref, 2);
923 if (!multiple_p (bit_field_offset (ref), BITS_PER_UNIT, &temp.off))
924 temp.off = -1;
925 temp.reverse = REF_REVERSE_STORAGE_ORDER (ref);
926 break;
927 case COMPONENT_REF:
928 /* The field decl is enough to unambiguously specify the field,
929 so use its type here. */
930 temp.type = TREE_TYPE (TREE_OPERAND (ref, 1));
931 temp.op0 = TREE_OPERAND (ref, 1);
932 temp.op1 = TREE_OPERAND (ref, 2);
933 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
934 && TYPE_REVERSE_STORAGE_ORDER
935 (TREE_TYPE (TREE_OPERAND (ref, 0))));
937 tree this_offset = component_ref_field_offset (ref);
938 if (this_offset
939 && poly_int_tree_p (this_offset))
941 tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
942 if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
944 poly_offset_int off
945 = (wi::to_poly_offset (this_offset)
946 + (wi::to_offset (bit_offset) >> LOG2_BITS_PER_UNIT));
947 /* Probibit value-numbering zero offset components
948 of addresses the same before the pass folding
949 __builtin_object_size had a chance to run. */
950 if (TREE_CODE (orig) != ADDR_EXPR
951 || maybe_ne (off, 0)
952 || (cfun->curr_properties & PROP_objsz))
953 off.to_shwi (&temp.off);
957 break;
958 case ARRAY_RANGE_REF:
959 case ARRAY_REF:
961 tree eltype = TREE_TYPE (TREE_TYPE (TREE_OPERAND (ref, 0)));
962 /* Record index as operand. */
963 temp.op0 = TREE_OPERAND (ref, 1);
964 /* Always record lower bounds and element size. */
965 temp.op1 = array_ref_low_bound (ref);
966 /* But record element size in units of the type alignment. */
967 temp.op2 = TREE_OPERAND (ref, 3);
968 temp.align = eltype->type_common.align;
969 if (! temp.op2)
970 temp.op2 = size_binop (EXACT_DIV_EXPR, TYPE_SIZE_UNIT (eltype),
971 size_int (TYPE_ALIGN_UNIT (eltype)));
972 if (poly_int_tree_p (temp.op0)
973 && poly_int_tree_p (temp.op1)
974 && TREE_CODE (temp.op2) == INTEGER_CST)
976 poly_offset_int off = ((wi::to_poly_offset (temp.op0)
977 - wi::to_poly_offset (temp.op1))
978 * wi::to_offset (temp.op2)
979 * vn_ref_op_align_unit (&temp));
980 off.to_shwi (&temp.off);
982 temp.reverse = (AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (ref, 0)))
983 && TYPE_REVERSE_STORAGE_ORDER
984 (TREE_TYPE (TREE_OPERAND (ref, 0))));
986 break;
987 case VAR_DECL:
988 if (DECL_HARD_REGISTER (ref))
990 temp.op0 = ref;
991 break;
993 /* Fallthru. */
994 case PARM_DECL:
995 case CONST_DECL:
996 case RESULT_DECL:
997 /* Canonicalize decls to MEM[&decl] which is what we end up with
998 when valueizing MEM[ptr] with ptr = &decl. */
999 temp.opcode = MEM_REF;
1000 temp.op0 = build_int_cst (build_pointer_type (TREE_TYPE (ref)), 0);
1001 temp.off = 0;
1002 result->safe_push (temp);
1003 temp.opcode = ADDR_EXPR;
1004 temp.op0 = build1 (ADDR_EXPR, TREE_TYPE (temp.op0), ref);
1005 temp.type = TREE_TYPE (temp.op0);
1006 temp.off = -1;
1007 break;
1008 case STRING_CST:
1009 case INTEGER_CST:
1010 case POLY_INT_CST:
1011 case COMPLEX_CST:
1012 case VECTOR_CST:
1013 case REAL_CST:
1014 case FIXED_CST:
1015 case CONSTRUCTOR:
1016 case SSA_NAME:
1017 temp.op0 = ref;
1018 break;
1019 case ADDR_EXPR:
1020 if (is_gimple_min_invariant (ref))
1022 temp.op0 = ref;
1023 break;
1025 break;
1026 /* These are only interesting for their operands, their
1027 existence, and their type. They will never be the last
1028 ref in the chain of references (IE they require an
1029 operand), so we don't have to put anything
1030 for op* as it will be handled by the iteration */
1031 case REALPART_EXPR:
1032 temp.off = 0;
1033 break;
1034 case VIEW_CONVERT_EXPR:
1035 temp.off = 0;
1036 temp.reverse = storage_order_barrier_p (ref);
1037 break;
1038 case IMAGPART_EXPR:
1039 /* This is only interesting for its constant offset. */
1040 temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
1041 break;
1042 default:
1043 gcc_unreachable ();
1045 result->safe_push (temp);
1047 if (REFERENCE_CLASS_P (ref)
1048 || TREE_CODE (ref) == MODIFY_EXPR
1049 || TREE_CODE (ref) == WITH_SIZE_EXPR
1050 || (TREE_CODE (ref) == ADDR_EXPR
1051 && !is_gimple_min_invariant (ref)))
1052 ref = TREE_OPERAND (ref, 0);
1053 else
1054 ref = NULL_TREE;
1058 /* Build a alias-oracle reference abstraction in *REF from the vn_reference
1059 operands in *OPS, the reference alias set SET and the reference type TYPE.
1060 Return true if something useful was produced. */
1062 bool
1063 ao_ref_init_from_vn_reference (ao_ref *ref,
1064 alias_set_type set, alias_set_type base_set,
1065 tree type, const vec<vn_reference_op_s> &ops)
1067 unsigned i;
1068 tree base = NULL_TREE;
1069 tree *op0_p = &base;
1070 poly_offset_int offset = 0;
1071 poly_offset_int max_size;
1072 poly_offset_int size = -1;
1073 tree size_tree = NULL_TREE;
1075 /* We don't handle calls. */
1076 if (!type)
1077 return false;
1079 machine_mode mode = TYPE_MODE (type);
1080 if (mode == BLKmode)
1081 size_tree = TYPE_SIZE (type);
1082 else
1083 size = GET_MODE_BITSIZE (mode);
1084 if (size_tree != NULL_TREE
1085 && poly_int_tree_p (size_tree))
1086 size = wi::to_poly_offset (size_tree);
1088 /* Lower the final access size from the outermost expression. */
1089 const_vn_reference_op_t cst_op = &ops[0];
1090 /* Cast away constness for the sake of the const-unsafe
1091 FOR_EACH_VEC_ELT(). */
1092 vn_reference_op_t op = const_cast<vn_reference_op_t>(cst_op);
1093 size_tree = NULL_TREE;
1094 if (op->opcode == COMPONENT_REF)
1095 size_tree = DECL_SIZE (op->op0);
1096 else if (op->opcode == BIT_FIELD_REF)
1097 size_tree = op->op0;
1098 if (size_tree != NULL_TREE
1099 && poly_int_tree_p (size_tree)
1100 && (!known_size_p (size)
1101 || known_lt (wi::to_poly_offset (size_tree), size)))
1102 size = wi::to_poly_offset (size_tree);
1104 /* Initially, maxsize is the same as the accessed element size.
1105 In the following it will only grow (or become -1). */
1106 max_size = size;
1108 /* Compute cumulative bit-offset for nested component-refs and array-refs,
1109 and find the ultimate containing object. */
1110 FOR_EACH_VEC_ELT (ops, i, op)
1112 switch (op->opcode)
1114 /* These may be in the reference ops, but we cannot do anything
1115 sensible with them here. */
1116 case ADDR_EXPR:
1117 /* Apart from ADDR_EXPR arguments to MEM_REF. */
1118 if (base != NULL_TREE
1119 && TREE_CODE (base) == MEM_REF
1120 && op->op0
1121 && DECL_P (TREE_OPERAND (op->op0, 0)))
1123 const_vn_reference_op_t pop = &ops[i-1];
1124 base = TREE_OPERAND (op->op0, 0);
1125 if (known_eq (pop->off, -1))
1127 max_size = -1;
1128 offset = 0;
1130 else
1131 offset += pop->off * BITS_PER_UNIT;
1132 op0_p = NULL;
1133 break;
1135 /* Fallthru. */
1136 case CALL_EXPR:
1137 return false;
1139 /* Record the base objects. */
1140 case MEM_REF:
1141 *op0_p = build2 (MEM_REF, op->type,
1142 NULL_TREE, op->op0);
1143 MR_DEPENDENCE_CLIQUE (*op0_p) = op->clique;
1144 MR_DEPENDENCE_BASE (*op0_p) = op->base;
1145 op0_p = &TREE_OPERAND (*op0_p, 0);
1146 break;
1148 case VAR_DECL:
1149 case PARM_DECL:
1150 case RESULT_DECL:
1151 case SSA_NAME:
1152 *op0_p = op->op0;
1153 op0_p = NULL;
1154 break;
1156 /* And now the usual component-reference style ops. */
1157 case BIT_FIELD_REF:
1158 offset += wi::to_poly_offset (op->op1);
1159 break;
1161 case COMPONENT_REF:
1163 tree field = op->op0;
1164 /* We do not have a complete COMPONENT_REF tree here so we
1165 cannot use component_ref_field_offset. Do the interesting
1166 parts manually. */
1167 tree this_offset = DECL_FIELD_OFFSET (field);
1169 if (op->op1 || !poly_int_tree_p (this_offset))
1170 max_size = -1;
1171 else
1173 poly_offset_int woffset = (wi::to_poly_offset (this_offset)
1174 << LOG2_BITS_PER_UNIT);
1175 woffset += wi::to_offset (DECL_FIELD_BIT_OFFSET (field));
1176 offset += woffset;
1178 break;
1181 case ARRAY_RANGE_REF:
1182 case ARRAY_REF:
1183 /* We recorded the lower bound and the element size. */
1184 if (!poly_int_tree_p (op->op0)
1185 || !poly_int_tree_p (op->op1)
1186 || TREE_CODE (op->op2) != INTEGER_CST)
1187 max_size = -1;
1188 else
1190 poly_offset_int woffset
1191 = wi::sext (wi::to_poly_offset (op->op0)
1192 - wi::to_poly_offset (op->op1),
1193 TYPE_PRECISION (sizetype));
1194 woffset *= wi::to_offset (op->op2) * vn_ref_op_align_unit (op);
1195 woffset <<= LOG2_BITS_PER_UNIT;
1196 offset += woffset;
1198 break;
1200 case REALPART_EXPR:
1201 break;
1203 case IMAGPART_EXPR:
1204 offset += size;
1205 break;
1207 case VIEW_CONVERT_EXPR:
1208 break;
1210 case STRING_CST:
1211 case INTEGER_CST:
1212 case COMPLEX_CST:
1213 case VECTOR_CST:
1214 case REAL_CST:
1215 case CONSTRUCTOR:
1216 case CONST_DECL:
1217 return false;
1219 default:
1220 return false;
1224 if (base == NULL_TREE)
1225 return false;
1227 ref->ref = NULL_TREE;
1228 ref->base = base;
1229 ref->ref_alias_set = set;
1230 ref->base_alias_set = base_set;
1231 /* We discount volatiles from value-numbering elsewhere. */
1232 ref->volatile_p = false;
1234 if (!size.to_shwi (&ref->size) || maybe_lt (ref->size, 0))
1236 ref->offset = 0;
1237 ref->size = -1;
1238 ref->max_size = -1;
1239 return true;
1242 if (!offset.to_shwi (&ref->offset))
1244 ref->offset = 0;
1245 ref->max_size = -1;
1246 return true;
1249 if (!max_size.to_shwi (&ref->max_size) || maybe_lt (ref->max_size, 0))
1250 ref->max_size = -1;
1252 return true;
1255 /* Copy the operations present in load/store/call REF into RESULT, a vector of
1256 vn_reference_op_s's. */
1258 static void
1259 copy_reference_ops_from_call (gcall *call,
1260 vec<vn_reference_op_s> *result)
1262 vn_reference_op_s temp;
1263 unsigned i;
1264 tree lhs = gimple_call_lhs (call);
1265 int lr;
1267 /* If 2 calls have a different non-ssa lhs, vdef value numbers should be
1268 different. By adding the lhs here in the vector, we ensure that the
1269 hashcode is different, guaranteeing a different value number. */
1270 if (lhs && TREE_CODE (lhs) != SSA_NAME)
1272 memset (&temp, 0, sizeof (temp));
1273 temp.opcode = MODIFY_EXPR;
1274 temp.type = TREE_TYPE (lhs);
1275 temp.op0 = lhs;
1276 temp.off = -1;
1277 result->safe_push (temp);
1280 /* Copy the type, opcode, function, static chain and EH region, if any. */
1281 memset (&temp, 0, sizeof (temp));
1282 temp.type = gimple_call_fntype (call);
1283 temp.opcode = CALL_EXPR;
1284 temp.op0 = gimple_call_fn (call);
1285 if (gimple_call_internal_p (call))
1286 temp.clique = gimple_call_internal_fn (call);
1287 temp.op1 = gimple_call_chain (call);
1288 if (stmt_could_throw_p (cfun, call) && (lr = lookup_stmt_eh_lp (call)) > 0)
1289 temp.op2 = size_int (lr);
1290 temp.off = -1;
1291 result->safe_push (temp);
1293 /* Copy the call arguments. As they can be references as well,
1294 just chain them together. */
1295 for (i = 0; i < gimple_call_num_args (call); ++i)
1297 tree callarg = gimple_call_arg (call, i);
1298 copy_reference_ops_from_ref (callarg, result);
1302 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1303 *I_P to point to the last element of the replacement. */
1304 static bool
1305 vn_reference_fold_indirect (vec<vn_reference_op_s> *ops,
1306 unsigned int *i_p)
1308 unsigned int i = *i_p;
1309 vn_reference_op_t op = &(*ops)[i];
1310 vn_reference_op_t mem_op = &(*ops)[i - 1];
1311 tree addr_base;
1312 poly_int64 addr_offset = 0;
1314 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1315 from .foo.bar to the preceding MEM_REF offset and replace the
1316 address with &OBJ. */
1317 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (op->op0, 0),
1318 &addr_offset, vn_valueize);
1319 gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
1320 if (addr_base != TREE_OPERAND (op->op0, 0))
1322 poly_offset_int off
1323 = (poly_offset_int::from (wi::to_poly_wide (mem_op->op0),
1324 SIGNED)
1325 + addr_offset);
1326 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1327 op->op0 = build_fold_addr_expr (addr_base);
1328 if (tree_fits_shwi_p (mem_op->op0))
1329 mem_op->off = tree_to_shwi (mem_op->op0);
1330 else
1331 mem_op->off = -1;
1332 return true;
1334 return false;
1337 /* Fold *& at position *I_P in a vn_reference_op_s vector *OPS. Updates
1338 *I_P to point to the last element of the replacement. */
1339 static bool
1340 vn_reference_maybe_forwprop_address (vec<vn_reference_op_s> *ops,
1341 unsigned int *i_p)
1343 bool changed = false;
1344 vn_reference_op_t op;
1348 unsigned int i = *i_p;
1349 op = &(*ops)[i];
1350 vn_reference_op_t mem_op = &(*ops)[i - 1];
1351 gimple *def_stmt;
1352 enum tree_code code;
1353 poly_offset_int off;
1355 def_stmt = SSA_NAME_DEF_STMT (op->op0);
1356 if (!is_gimple_assign (def_stmt))
1357 return changed;
1359 code = gimple_assign_rhs_code (def_stmt);
1360 if (code != ADDR_EXPR
1361 && code != POINTER_PLUS_EXPR)
1362 return changed;
1364 off = poly_offset_int::from (wi::to_poly_wide (mem_op->op0), SIGNED);
1366 /* The only thing we have to do is from &OBJ.foo.bar add the offset
1367 from .foo.bar to the preceding MEM_REF offset and replace the
1368 address with &OBJ. */
1369 if (code == ADDR_EXPR)
1371 tree addr, addr_base;
1372 poly_int64 addr_offset;
1374 addr = gimple_assign_rhs1 (def_stmt);
1375 addr_base = get_addr_base_and_unit_offset_1 (TREE_OPERAND (addr, 0),
1376 &addr_offset,
1377 vn_valueize);
1378 /* If that didn't work because the address isn't invariant propagate
1379 the reference tree from the address operation in case the current
1380 dereference isn't offsetted. */
1381 if (!addr_base
1382 && *i_p == ops->length () - 1
1383 && known_eq (off, 0)
1384 /* This makes us disable this transform for PRE where the
1385 reference ops might be also used for code insertion which
1386 is invalid. */
1387 && default_vn_walk_kind == VN_WALKREWRITE)
1389 auto_vec<vn_reference_op_s, 32> tem;
1390 copy_reference_ops_from_ref (TREE_OPERAND (addr, 0), &tem);
1391 /* Make sure to preserve TBAA info. The only objects not
1392 wrapped in MEM_REFs that can have their address taken are
1393 STRING_CSTs. */
1394 if (tem.length () >= 2
1395 && tem[tem.length () - 2].opcode == MEM_REF)
1397 vn_reference_op_t new_mem_op = &tem[tem.length () - 2];
1398 new_mem_op->op0
1399 = wide_int_to_tree (TREE_TYPE (mem_op->op0),
1400 wi::to_poly_wide (new_mem_op->op0));
1402 else
1403 gcc_assert (tem.last ().opcode == STRING_CST);
1404 ops->pop ();
1405 ops->pop ();
1406 ops->safe_splice (tem);
1407 --*i_p;
1408 return true;
1410 if (!addr_base
1411 || TREE_CODE (addr_base) != MEM_REF
1412 || (TREE_CODE (TREE_OPERAND (addr_base, 0)) == SSA_NAME
1413 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (TREE_OPERAND (addr_base,
1414 0))))
1415 return changed;
1417 off += addr_offset;
1418 off += mem_ref_offset (addr_base);
1419 op->op0 = TREE_OPERAND (addr_base, 0);
1421 else
1423 tree ptr, ptroff;
1424 ptr = gimple_assign_rhs1 (def_stmt);
1425 ptroff = gimple_assign_rhs2 (def_stmt);
1426 if (TREE_CODE (ptr) != SSA_NAME
1427 || SSA_NAME_OCCURS_IN_ABNORMAL_PHI (ptr)
1428 /* Make sure to not endlessly recurse.
1429 See gcc.dg/tree-ssa/20040408-1.c for an example. Can easily
1430 happen when we value-number a PHI to its backedge value. */
1431 || SSA_VAL (ptr) == op->op0
1432 || !poly_int_tree_p (ptroff))
1433 return changed;
1435 off += wi::to_poly_offset (ptroff);
1436 op->op0 = ptr;
1439 mem_op->op0 = wide_int_to_tree (TREE_TYPE (mem_op->op0), off);
1440 if (tree_fits_shwi_p (mem_op->op0))
1441 mem_op->off = tree_to_shwi (mem_op->op0);
1442 else
1443 mem_op->off = -1;
1444 /* ??? Can end up with endless recursion here!?
1445 gcc.c-torture/execute/strcmp-1.c */
1446 if (TREE_CODE (op->op0) == SSA_NAME)
1447 op->op0 = SSA_VAL (op->op0);
1448 if (TREE_CODE (op->op0) != SSA_NAME)
1449 op->opcode = TREE_CODE (op->op0);
1451 changed = true;
1453 /* Tail-recurse. */
1454 while (TREE_CODE (op->op0) == SSA_NAME);
1456 /* Fold a remaining *&. */
1457 if (TREE_CODE (op->op0) == ADDR_EXPR)
1458 vn_reference_fold_indirect (ops, i_p);
1460 return changed;
1463 /* Optimize the reference REF to a constant if possible or return
1464 NULL_TREE if not. */
1466 tree
1467 fully_constant_vn_reference_p (vn_reference_t ref)
1469 vec<vn_reference_op_s> operands = ref->operands;
1470 vn_reference_op_t op;
1472 /* Try to simplify the translated expression if it is
1473 a call to a builtin function with at most two arguments. */
1474 op = &operands[0];
1475 if (op->opcode == CALL_EXPR
1476 && (!op->op0
1477 || (TREE_CODE (op->op0) == ADDR_EXPR
1478 && TREE_CODE (TREE_OPERAND (op->op0, 0)) == FUNCTION_DECL
1479 && fndecl_built_in_p (TREE_OPERAND (op->op0, 0),
1480 BUILT_IN_NORMAL)))
1481 && operands.length () >= 2
1482 && operands.length () <= 3)
1484 vn_reference_op_t arg0, arg1 = NULL;
1485 bool anyconst = false;
1486 arg0 = &operands[1];
1487 if (operands.length () > 2)
1488 arg1 = &operands[2];
1489 if (TREE_CODE_CLASS (arg0->opcode) == tcc_constant
1490 || (arg0->opcode == ADDR_EXPR
1491 && is_gimple_min_invariant (arg0->op0)))
1492 anyconst = true;
1493 if (arg1
1494 && (TREE_CODE_CLASS (arg1->opcode) == tcc_constant
1495 || (arg1->opcode == ADDR_EXPR
1496 && is_gimple_min_invariant (arg1->op0))))
1497 anyconst = true;
1498 if (anyconst)
1500 combined_fn fn;
1501 if (op->op0)
1502 fn = as_combined_fn (DECL_FUNCTION_CODE
1503 (TREE_OPERAND (op->op0, 0)));
1504 else
1505 fn = as_combined_fn ((internal_fn) op->clique);
1506 tree folded;
1507 if (arg1)
1508 folded = fold_const_call (fn, ref->type, arg0->op0, arg1->op0);
1509 else
1510 folded = fold_const_call (fn, ref->type, arg0->op0);
1511 if (folded
1512 && is_gimple_min_invariant (folded))
1513 return folded;
1517 /* Simplify reads from constants or constant initializers. */
1518 else if (BITS_PER_UNIT == 8
1519 && ref->type
1520 && COMPLETE_TYPE_P (ref->type)
1521 && is_gimple_reg_type (ref->type))
1523 poly_int64 off = 0;
1524 HOST_WIDE_INT size;
1525 if (INTEGRAL_TYPE_P (ref->type))
1526 size = TYPE_PRECISION (ref->type);
1527 else if (tree_fits_shwi_p (TYPE_SIZE (ref->type)))
1528 size = tree_to_shwi (TYPE_SIZE (ref->type));
1529 else
1530 return NULL_TREE;
1531 if (size % BITS_PER_UNIT != 0
1532 || size > MAX_BITSIZE_MODE_ANY_MODE)
1533 return NULL_TREE;
1534 size /= BITS_PER_UNIT;
1535 unsigned i;
1536 for (i = 0; i < operands.length (); ++i)
1538 if (TREE_CODE_CLASS (operands[i].opcode) == tcc_constant)
1540 ++i;
1541 break;
1543 if (known_eq (operands[i].off, -1))
1544 return NULL_TREE;
1545 off += operands[i].off;
1546 if (operands[i].opcode == MEM_REF)
1548 ++i;
1549 break;
1552 vn_reference_op_t base = &operands[--i];
1553 tree ctor = error_mark_node;
1554 tree decl = NULL_TREE;
1555 if (TREE_CODE_CLASS (base->opcode) == tcc_constant)
1556 ctor = base->op0;
1557 else if (base->opcode == MEM_REF
1558 && base[1].opcode == ADDR_EXPR
1559 && (TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == VAR_DECL
1560 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == CONST_DECL
1561 || TREE_CODE (TREE_OPERAND (base[1].op0, 0)) == STRING_CST))
1563 decl = TREE_OPERAND (base[1].op0, 0);
1564 if (TREE_CODE (decl) == STRING_CST)
1565 ctor = decl;
1566 else
1567 ctor = ctor_for_folding (decl);
1569 if (ctor == NULL_TREE)
1570 return build_zero_cst (ref->type);
1571 else if (ctor != error_mark_node)
1573 HOST_WIDE_INT const_off;
1574 if (decl)
1576 tree res = fold_ctor_reference (ref->type, ctor,
1577 off * BITS_PER_UNIT,
1578 size * BITS_PER_UNIT, decl);
1579 if (res)
1581 STRIP_USELESS_TYPE_CONVERSION (res);
1582 if (is_gimple_min_invariant (res))
1583 return res;
1586 else if (off.is_constant (&const_off))
1588 unsigned char buf[MAX_BITSIZE_MODE_ANY_MODE / BITS_PER_UNIT];
1589 int len = native_encode_expr (ctor, buf, size, const_off);
1590 if (len > 0)
1591 return native_interpret_expr (ref->type, buf, len);
1596 return NULL_TREE;
1599 /* Return true if OPS contain a storage order barrier. */
1601 static bool
1602 contains_storage_order_barrier_p (vec<vn_reference_op_s> ops)
1604 vn_reference_op_t op;
1605 unsigned i;
1607 FOR_EACH_VEC_ELT (ops, i, op)
1608 if (op->opcode == VIEW_CONVERT_EXPR && op->reverse)
1609 return true;
1611 return false;
1614 /* Return true if OPS represent an access with reverse storage order. */
1616 static bool
1617 reverse_storage_order_for_component_p (vec<vn_reference_op_s> ops)
1619 unsigned i = 0;
1620 if (ops[i].opcode == REALPART_EXPR || ops[i].opcode == IMAGPART_EXPR)
1621 ++i;
1622 switch (ops[i].opcode)
1624 case ARRAY_REF:
1625 case COMPONENT_REF:
1626 case BIT_FIELD_REF:
1627 case MEM_REF:
1628 return ops[i].reverse;
1629 default:
1630 return false;
1634 /* Transform any SSA_NAME's in a vector of vn_reference_op_s
1635 structures into their value numbers. This is done in-place, and
1636 the vector passed in is returned. *VALUEIZED_ANYTHING will specify
1637 whether any operands were valueized. */
1639 static void
1640 valueize_refs_1 (vec<vn_reference_op_s> *orig, bool *valueized_anything,
1641 bool with_avail = false)
1643 vn_reference_op_t vro;
1644 unsigned int i;
1646 *valueized_anything = false;
1648 FOR_EACH_VEC_ELT (*orig, i, vro)
1650 if (vro->opcode == SSA_NAME
1651 || (vro->op0 && TREE_CODE (vro->op0) == SSA_NAME))
1653 tree tem = with_avail ? vn_valueize (vro->op0) : SSA_VAL (vro->op0);
1654 if (tem != vro->op0)
1656 *valueized_anything = true;
1657 vro->op0 = tem;
1659 /* If it transforms from an SSA_NAME to a constant, update
1660 the opcode. */
1661 if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
1662 vro->opcode = TREE_CODE (vro->op0);
1664 if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
1666 tree tem = with_avail ? vn_valueize (vro->op1) : SSA_VAL (vro->op1);
1667 if (tem != vro->op1)
1669 *valueized_anything = true;
1670 vro->op1 = tem;
1673 if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
1675 tree tem = with_avail ? vn_valueize (vro->op2) : SSA_VAL (vro->op2);
1676 if (tem != vro->op2)
1678 *valueized_anything = true;
1679 vro->op2 = tem;
1682 /* If it transforms from an SSA_NAME to an address, fold with
1683 a preceding indirect reference. */
1684 if (i > 0
1685 && vro->op0
1686 && TREE_CODE (vro->op0) == ADDR_EXPR
1687 && (*orig)[i - 1].opcode == MEM_REF)
1689 if (vn_reference_fold_indirect (orig, &i))
1690 *valueized_anything = true;
1692 else if (i > 0
1693 && vro->opcode == SSA_NAME
1694 && (*orig)[i - 1].opcode == MEM_REF)
1696 if (vn_reference_maybe_forwprop_address (orig, &i))
1697 *valueized_anything = true;
1699 /* If it transforms a non-constant ARRAY_REF into a constant
1700 one, adjust the constant offset. */
1701 else if (vro->opcode == ARRAY_REF
1702 && known_eq (vro->off, -1)
1703 && poly_int_tree_p (vro->op0)
1704 && poly_int_tree_p (vro->op1)
1705 && TREE_CODE (vro->op2) == INTEGER_CST)
1707 poly_offset_int off = ((wi::to_poly_offset (vro->op0)
1708 - wi::to_poly_offset (vro->op1))
1709 * wi::to_offset (vro->op2)
1710 * vn_ref_op_align_unit (vro));
1711 off.to_shwi (&vro->off);
1716 static void
1717 valueize_refs (vec<vn_reference_op_s> *orig)
1719 bool tem;
1720 valueize_refs_1 (orig, &tem);
1723 static vec<vn_reference_op_s> shared_lookup_references;
1725 /* Create a vector of vn_reference_op_s structures from REF, a
1726 REFERENCE_CLASS_P tree. The vector is shared among all callers of
1727 this function. *VALUEIZED_ANYTHING will specify whether any
1728 operands were valueized. */
1730 static vec<vn_reference_op_s>
1731 valueize_shared_reference_ops_from_ref (tree ref, bool *valueized_anything)
1733 if (!ref)
1734 return vNULL;
1735 shared_lookup_references.truncate (0);
1736 copy_reference_ops_from_ref (ref, &shared_lookup_references);
1737 valueize_refs_1 (&shared_lookup_references, valueized_anything);
1738 return shared_lookup_references;
1741 /* Create a vector of vn_reference_op_s structures from CALL, a
1742 call statement. The vector is shared among all callers of
1743 this function. */
1745 static vec<vn_reference_op_s>
1746 valueize_shared_reference_ops_from_call (gcall *call)
1748 if (!call)
1749 return vNULL;
1750 shared_lookup_references.truncate (0);
1751 copy_reference_ops_from_call (call, &shared_lookup_references);
1752 valueize_refs (&shared_lookup_references);
1753 return shared_lookup_references;
1756 /* Lookup a SCCVN reference operation VR in the current hash table.
1757 Returns the resulting value number if it exists in the hash table,
1758 NULL_TREE otherwise. VNRESULT will be filled in with the actual
1759 vn_reference_t stored in the hashtable if something is found. */
1761 static tree
1762 vn_reference_lookup_1 (vn_reference_t vr, vn_reference_t *vnresult)
1764 vn_reference_s **slot;
1765 hashval_t hash;
1767 hash = vr->hashcode;
1768 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
1769 if (slot)
1771 if (vnresult)
1772 *vnresult = (vn_reference_t)*slot;
1773 return ((vn_reference_t)*slot)->result;
1776 return NULL_TREE;
1780 /* Partial definition tracking support. */
1782 struct pd_range
1784 HOST_WIDE_INT offset;
1785 HOST_WIDE_INT size;
1788 struct pd_data
1790 tree rhs;
1791 HOST_WIDE_INT offset;
1792 HOST_WIDE_INT size;
1795 /* Context for alias walking. */
1797 struct vn_walk_cb_data
1799 vn_walk_cb_data (vn_reference_t vr_, tree orig_ref_, tree *last_vuse_ptr_,
1800 vn_lookup_kind vn_walk_kind_, bool tbaa_p_, tree mask_)
1801 : vr (vr_), last_vuse_ptr (last_vuse_ptr_), last_vuse (NULL_TREE),
1802 mask (mask_), masked_result (NULL_TREE), vn_walk_kind (vn_walk_kind_),
1803 tbaa_p (tbaa_p_), saved_operands (vNULL), first_set (-2),
1804 first_base_set (-2), known_ranges (NULL)
1806 if (!last_vuse_ptr)
1807 last_vuse_ptr = &last_vuse;
1808 ao_ref_init (&orig_ref, orig_ref_);
1809 if (mask)
1811 wide_int w = wi::to_wide (mask);
1812 unsigned int pos = 0, prec = w.get_precision ();
1813 pd_data pd;
1814 pd.rhs = build_constructor (NULL_TREE, NULL);
1815 /* When bitwise and with a constant is done on a memory load,
1816 we don't really need all the bits to be defined or defined
1817 to constants, we don't really care what is in the position
1818 corresponding to 0 bits in the mask.
1819 So, push the ranges of those 0 bits in the mask as artificial
1820 zero stores and let the partial def handling code do the
1821 rest. */
1822 while (pos < prec)
1824 int tz = wi::ctz (w);
1825 if (pos + tz > prec)
1826 tz = prec - pos;
1827 if (tz)
1829 if (BYTES_BIG_ENDIAN)
1830 pd.offset = prec - pos - tz;
1831 else
1832 pd.offset = pos;
1833 pd.size = tz;
1834 void *r = push_partial_def (pd, 0, 0, 0, prec);
1835 gcc_assert (r == NULL_TREE);
1837 pos += tz;
1838 if (pos == prec)
1839 break;
1840 w = wi::lrshift (w, tz);
1841 tz = wi::ctz (wi::bit_not (w));
1842 if (pos + tz > prec)
1843 tz = prec - pos;
1844 pos += tz;
1845 w = wi::lrshift (w, tz);
1849 ~vn_walk_cb_data ();
1850 void *finish (alias_set_type, alias_set_type, tree);
1851 void *push_partial_def (pd_data pd,
1852 alias_set_type, alias_set_type, HOST_WIDE_INT,
1853 HOST_WIDE_INT);
1855 vn_reference_t vr;
1856 ao_ref orig_ref;
1857 tree *last_vuse_ptr;
1858 tree last_vuse;
1859 tree mask;
1860 tree masked_result;
1861 vn_lookup_kind vn_walk_kind;
1862 bool tbaa_p;
1863 vec<vn_reference_op_s> saved_operands;
1865 /* The VDEFs of partial defs we come along. */
1866 auto_vec<pd_data, 2> partial_defs;
1867 /* The first defs range to avoid splay tree setup in most cases. */
1868 pd_range first_range;
1869 alias_set_type first_set;
1870 alias_set_type first_base_set;
1871 splay_tree known_ranges;
1872 obstack ranges_obstack;
1875 vn_walk_cb_data::~vn_walk_cb_data ()
1877 if (known_ranges)
1879 splay_tree_delete (known_ranges);
1880 obstack_free (&ranges_obstack, NULL);
1882 saved_operands.release ();
1885 void *
1886 vn_walk_cb_data::finish (alias_set_type set, alias_set_type base_set, tree val)
1888 if (first_set != -2)
1890 set = first_set;
1891 base_set = first_base_set;
1893 if (mask)
1895 masked_result = val;
1896 return (void *) -1;
1898 vec<vn_reference_op_s> &operands
1899 = saved_operands.exists () ? saved_operands : vr->operands;
1900 return vn_reference_lookup_or_insert_for_pieces (last_vuse, set, base_set,
1901 vr->type, operands, val);
1904 /* pd_range splay-tree helpers. */
1906 static int
1907 pd_range_compare (splay_tree_key offset1p, splay_tree_key offset2p)
1909 HOST_WIDE_INT offset1 = *(HOST_WIDE_INT *)offset1p;
1910 HOST_WIDE_INT offset2 = *(HOST_WIDE_INT *)offset2p;
1911 if (offset1 < offset2)
1912 return -1;
1913 else if (offset1 > offset2)
1914 return 1;
1915 return 0;
1918 static void *
1919 pd_tree_alloc (int size, void *data_)
1921 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
1922 return obstack_alloc (&data->ranges_obstack, size);
1925 static void
1926 pd_tree_dealloc (void *, void *)
1930 /* Push PD to the vector of partial definitions returning a
1931 value when we are ready to combine things with VUSE, SET and MAXSIZEI,
1932 NULL when we want to continue looking for partial defs or -1
1933 on failure. */
1935 void *
1936 vn_walk_cb_data::push_partial_def (pd_data pd,
1937 alias_set_type set, alias_set_type base_set,
1938 HOST_WIDE_INT offseti,
1939 HOST_WIDE_INT maxsizei)
1941 const HOST_WIDE_INT bufsize = 64;
1942 /* We're using a fixed buffer for encoding so fail early if the object
1943 we want to interpret is bigger. */
1944 if (maxsizei > bufsize * BITS_PER_UNIT
1945 || CHAR_BIT != 8
1946 || BITS_PER_UNIT != 8
1947 /* Not prepared to handle PDP endian. */
1948 || BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN)
1949 return (void *)-1;
1951 /* Turn too large constant stores into non-constant stores. */
1952 if (CONSTANT_CLASS_P (pd.rhs) && pd.size > bufsize * BITS_PER_UNIT)
1953 pd.rhs = error_mark_node;
1955 /* And for non-constant or CONSTRUCTOR stores shrink them to only keep at
1956 most a partial byte before and/or after the region. */
1957 if (!CONSTANT_CLASS_P (pd.rhs))
1959 if (pd.offset < offseti)
1961 HOST_WIDE_INT o = ROUND_DOWN (offseti - pd.offset, BITS_PER_UNIT);
1962 gcc_assert (pd.size > o);
1963 pd.size -= o;
1964 pd.offset += o;
1966 if (pd.size > maxsizei)
1967 pd.size = maxsizei + ((pd.size - maxsizei) % BITS_PER_UNIT);
1970 pd.offset -= offseti;
1972 bool pd_constant_p = (TREE_CODE (pd.rhs) == CONSTRUCTOR
1973 || CONSTANT_CLASS_P (pd.rhs));
1974 if (partial_defs.is_empty ())
1976 /* If we get a clobber upfront, fail. */
1977 if (TREE_CLOBBER_P (pd.rhs))
1978 return (void *)-1;
1979 if (!pd_constant_p)
1980 return (void *)-1;
1981 partial_defs.safe_push (pd);
1982 first_range.offset = pd.offset;
1983 first_range.size = pd.size;
1984 first_set = set;
1985 first_base_set = base_set;
1986 last_vuse_ptr = NULL;
1987 /* Continue looking for partial defs. */
1988 return NULL;
1991 if (!known_ranges)
1993 /* ??? Optimize the case where the 2nd partial def completes things. */
1994 gcc_obstack_init (&ranges_obstack);
1995 known_ranges = splay_tree_new_with_allocator (pd_range_compare, 0, 0,
1996 pd_tree_alloc,
1997 pd_tree_dealloc, this);
1998 splay_tree_insert (known_ranges,
1999 (splay_tree_key)&first_range.offset,
2000 (splay_tree_value)&first_range);
2003 pd_range newr = { pd.offset, pd.size };
2004 splay_tree_node n;
2005 pd_range *r;
2006 /* Lookup the predecessor of offset + 1 and see if we need to merge. */
2007 HOST_WIDE_INT loffset = newr.offset + 1;
2008 if ((n = splay_tree_predecessor (known_ranges, (splay_tree_key)&loffset))
2009 && ((r = (pd_range *)n->value), true)
2010 && ranges_known_overlap_p (r->offset, r->size + 1,
2011 newr.offset, newr.size))
2013 /* Ignore partial defs already covered. Here we also drop shadowed
2014 clobbers arriving here at the floor. */
2015 if (known_subrange_p (newr.offset, newr.size, r->offset, r->size))
2016 return NULL;
2017 r->size = MAX (r->offset + r->size, newr.offset + newr.size) - r->offset;
2019 else
2021 /* newr.offset wasn't covered yet, insert the range. */
2022 r = XOBNEW (&ranges_obstack, pd_range);
2023 *r = newr;
2024 splay_tree_insert (known_ranges, (splay_tree_key)&r->offset,
2025 (splay_tree_value)r);
2027 /* Merge r which now contains newr and is a member of the splay tree with
2028 adjacent overlapping ranges. */
2029 pd_range *rafter;
2030 while ((n = splay_tree_successor (known_ranges, (splay_tree_key)&r->offset))
2031 && ((rafter = (pd_range *)n->value), true)
2032 && ranges_known_overlap_p (r->offset, r->size + 1,
2033 rafter->offset, rafter->size))
2035 r->size = MAX (r->offset + r->size,
2036 rafter->offset + rafter->size) - r->offset;
2037 splay_tree_remove (known_ranges, (splay_tree_key)&rafter->offset);
2039 /* If we get a clobber, fail. */
2040 if (TREE_CLOBBER_P (pd.rhs))
2041 return (void *)-1;
2042 /* Non-constants are OK as long as they are shadowed by a constant. */
2043 if (!pd_constant_p)
2044 return (void *)-1;
2045 partial_defs.safe_push (pd);
2047 /* Now we have merged newr into the range tree. When we have covered
2048 [offseti, sizei] then the tree will contain exactly one node which has
2049 the desired properties and it will be 'r'. */
2050 if (!known_subrange_p (0, maxsizei, r->offset, r->size))
2051 /* Continue looking for partial defs. */
2052 return NULL;
2054 /* Now simply native encode all partial defs in reverse order. */
2055 unsigned ndefs = partial_defs.length ();
2056 /* We support up to 512-bit values (for V8DFmode). */
2057 unsigned char buffer[bufsize + 1];
2058 unsigned char this_buffer[bufsize + 1];
2059 int len;
2061 memset (buffer, 0, bufsize + 1);
2062 unsigned needed_len = ROUND_UP (maxsizei, BITS_PER_UNIT) / BITS_PER_UNIT;
2063 while (!partial_defs.is_empty ())
2065 pd_data pd = partial_defs.pop ();
2066 unsigned int amnt;
2067 if (TREE_CODE (pd.rhs) == CONSTRUCTOR)
2069 /* Empty CONSTRUCTOR. */
2070 if (pd.size >= needed_len * BITS_PER_UNIT)
2071 len = needed_len;
2072 else
2073 len = ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT;
2074 memset (this_buffer, 0, len);
2076 else
2078 len = native_encode_expr (pd.rhs, this_buffer, bufsize,
2079 MAX (0, -pd.offset) / BITS_PER_UNIT);
2080 if (len <= 0
2081 || len < (ROUND_UP (pd.size, BITS_PER_UNIT) / BITS_PER_UNIT
2082 - MAX (0, -pd.offset) / BITS_PER_UNIT))
2084 if (dump_file && (dump_flags & TDF_DETAILS))
2085 fprintf (dump_file, "Failed to encode %u "
2086 "partial definitions\n", ndefs);
2087 return (void *)-1;
2091 unsigned char *p = buffer;
2092 HOST_WIDE_INT size = pd.size;
2093 if (pd.offset < 0)
2094 size -= ROUND_DOWN (-pd.offset, BITS_PER_UNIT);
2095 this_buffer[len] = 0;
2096 if (BYTES_BIG_ENDIAN)
2098 /* LSB of this_buffer[len - 1] byte should be at
2099 pd.offset + pd.size - 1 bits in buffer. */
2100 amnt = ((unsigned HOST_WIDE_INT) pd.offset
2101 + pd.size) % BITS_PER_UNIT;
2102 if (amnt)
2103 shift_bytes_in_array_right (this_buffer, len + 1, amnt);
2104 unsigned char *q = this_buffer;
2105 unsigned int off = 0;
2106 if (pd.offset >= 0)
2108 unsigned int msk;
2109 off = pd.offset / BITS_PER_UNIT;
2110 gcc_assert (off < needed_len);
2111 p = buffer + off;
2112 if (size <= amnt)
2114 msk = ((1 << size) - 1) << (BITS_PER_UNIT - amnt);
2115 *p = (*p & ~msk) | (this_buffer[len] & msk);
2116 size = 0;
2118 else
2120 if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2121 q = (this_buffer + len
2122 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2123 / BITS_PER_UNIT));
2124 if (pd.offset % BITS_PER_UNIT)
2126 msk = -1U << (BITS_PER_UNIT
2127 - (pd.offset % BITS_PER_UNIT));
2128 *p = (*p & msk) | (*q & ~msk);
2129 p++;
2130 q++;
2131 off++;
2132 size -= BITS_PER_UNIT - (pd.offset % BITS_PER_UNIT);
2133 gcc_assert (size >= 0);
2137 else if (TREE_CODE (pd.rhs) != CONSTRUCTOR)
2139 q = (this_buffer + len
2140 - (ROUND_UP (size - amnt, BITS_PER_UNIT)
2141 / BITS_PER_UNIT));
2142 if (pd.offset % BITS_PER_UNIT)
2144 q++;
2145 size -= BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) pd.offset
2146 % BITS_PER_UNIT);
2147 gcc_assert (size >= 0);
2150 if ((unsigned HOST_WIDE_INT) size / BITS_PER_UNIT + off
2151 > needed_len)
2152 size = (needed_len - off) * BITS_PER_UNIT;
2153 memcpy (p, q, size / BITS_PER_UNIT);
2154 if (size % BITS_PER_UNIT)
2156 unsigned int msk
2157 = -1U << (BITS_PER_UNIT - (size % BITS_PER_UNIT));
2158 p += size / BITS_PER_UNIT;
2159 q += size / BITS_PER_UNIT;
2160 *p = (*q & msk) | (*p & ~msk);
2163 else
2165 if (pd.offset >= 0)
2167 /* LSB of this_buffer[0] byte should be at pd.offset bits
2168 in buffer. */
2169 unsigned int msk;
2170 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2171 amnt = pd.offset % BITS_PER_UNIT;
2172 if (amnt)
2173 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2174 unsigned int off = pd.offset / BITS_PER_UNIT;
2175 gcc_assert (off < needed_len);
2176 size = MIN (size,
2177 (HOST_WIDE_INT) (needed_len - off) * BITS_PER_UNIT);
2178 p = buffer + off;
2179 if (amnt + size < BITS_PER_UNIT)
2181 /* Low amnt bits come from *p, then size bits
2182 from this_buffer[0] and the remaining again from
2183 *p. */
2184 msk = ((1 << size) - 1) << amnt;
2185 *p = (*p & ~msk) | (this_buffer[0] & msk);
2186 size = 0;
2188 else if (amnt)
2190 msk = -1U << amnt;
2191 *p = (*p & ~msk) | (this_buffer[0] & msk);
2192 p++;
2193 size -= (BITS_PER_UNIT - amnt);
2196 else
2198 amnt = (unsigned HOST_WIDE_INT) pd.offset % BITS_PER_UNIT;
2199 if (amnt)
2200 size -= BITS_PER_UNIT - amnt;
2201 size = MIN (size, (HOST_WIDE_INT) needed_len * BITS_PER_UNIT);
2202 if (amnt)
2203 shift_bytes_in_array_left (this_buffer, len + 1, amnt);
2205 memcpy (p, this_buffer + (amnt != 0), size / BITS_PER_UNIT);
2206 p += size / BITS_PER_UNIT;
2207 if (size % BITS_PER_UNIT)
2209 unsigned int msk = -1U << (size % BITS_PER_UNIT);
2210 *p = (this_buffer[(amnt != 0) + size / BITS_PER_UNIT]
2211 & ~msk) | (*p & msk);
2216 tree type = vr->type;
2217 /* Make sure to interpret in a type that has a range covering the whole
2218 access size. */
2219 if (INTEGRAL_TYPE_P (vr->type) && maxsizei != TYPE_PRECISION (vr->type))
2220 type = build_nonstandard_integer_type (maxsizei, TYPE_UNSIGNED (type));
2221 tree val;
2222 if (BYTES_BIG_ENDIAN)
2224 unsigned sz = needed_len;
2225 if (maxsizei % BITS_PER_UNIT)
2226 shift_bytes_in_array_right (buffer, needed_len,
2227 BITS_PER_UNIT
2228 - (maxsizei % BITS_PER_UNIT));
2229 if (INTEGRAL_TYPE_P (type))
2230 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
2231 if (sz > needed_len)
2233 memcpy (this_buffer + (sz - needed_len), buffer, needed_len);
2234 val = native_interpret_expr (type, this_buffer, sz);
2236 else
2237 val = native_interpret_expr (type, buffer, needed_len);
2239 else
2240 val = native_interpret_expr (type, buffer, bufsize);
2241 /* If we chop off bits because the types precision doesn't match the memory
2242 access size this is ok when optimizing reads but not when called from
2243 the DSE code during elimination. */
2244 if (val && type != vr->type)
2246 if (! int_fits_type_p (val, vr->type))
2247 val = NULL_TREE;
2248 else
2249 val = fold_convert (vr->type, val);
2252 if (val)
2254 if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file,
2256 "Successfully combined %u partial definitions\n", ndefs);
2257 /* We are using the alias-set of the first store we encounter which
2258 should be appropriate here. */
2259 return finish (first_set, first_base_set, val);
2261 else
2263 if (dump_file && (dump_flags & TDF_DETAILS))
2264 fprintf (dump_file,
2265 "Failed to interpret %u encoded partial definitions\n", ndefs);
2266 return (void *)-1;
2270 /* Callback for walk_non_aliased_vuses. Adjusts the vn_reference_t VR_
2271 with the current VUSE and performs the expression lookup. */
2273 static void *
2274 vn_reference_lookup_2 (ao_ref *op ATTRIBUTE_UNUSED, tree vuse, void *data_)
2276 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2277 vn_reference_t vr = data->vr;
2278 vn_reference_s **slot;
2279 hashval_t hash;
2281 /* If we have partial definitions recorded we have to go through
2282 vn_reference_lookup_3. */
2283 if (!data->partial_defs.is_empty ())
2284 return NULL;
2286 if (data->last_vuse_ptr)
2288 *data->last_vuse_ptr = vuse;
2289 data->last_vuse = vuse;
2292 /* Fixup vuse and hash. */
2293 if (vr->vuse)
2294 vr->hashcode = vr->hashcode - SSA_NAME_VERSION (vr->vuse);
2295 vr->vuse = vuse_ssa_val (vuse);
2296 if (vr->vuse)
2297 vr->hashcode = vr->hashcode + SSA_NAME_VERSION (vr->vuse);
2299 hash = vr->hashcode;
2300 slot = valid_info->references->find_slot_with_hash (vr, hash, NO_INSERT);
2301 if (slot)
2303 if ((*slot)->result && data->saved_operands.exists ())
2304 return data->finish (vr->set, vr->base_set, (*slot)->result);
2305 return *slot;
2308 return NULL;
2311 /* Lookup an existing or insert a new vn_reference entry into the
2312 value table for the VUSE, SET, TYPE, OPERANDS reference which
2313 has the value VALUE which is either a constant or an SSA name. */
2315 static vn_reference_t
2316 vn_reference_lookup_or_insert_for_pieces (tree vuse,
2317 alias_set_type set,
2318 alias_set_type base_set,
2319 tree type,
2320 vec<vn_reference_op_s,
2321 va_heap> operands,
2322 tree value)
2324 vn_reference_s vr1;
2325 vn_reference_t result;
2326 unsigned value_id;
2327 vr1.vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
2328 vr1.operands = operands;
2329 vr1.type = type;
2330 vr1.set = set;
2331 vr1.base_set = base_set;
2332 vr1.hashcode = vn_reference_compute_hash (&vr1);
2333 if (vn_reference_lookup_1 (&vr1, &result))
2334 return result;
2335 if (TREE_CODE (value) == SSA_NAME)
2336 value_id = VN_INFO (value)->value_id;
2337 else
2338 value_id = get_or_alloc_constant_value_id (value);
2339 return vn_reference_insert_pieces (vuse, set, base_set, type,
2340 operands.copy (), value, value_id);
2343 /* Return a value-number for RCODE OPS... either by looking up an existing
2344 value-number for the possibly simplified result or by inserting the
2345 operation if INSERT is true. If SIMPLIFY is false, return a value
2346 number for the unsimplified expression. */
2348 static tree
2349 vn_nary_build_or_lookup_1 (gimple_match_op *res_op, bool insert,
2350 bool simplify)
2352 tree result = NULL_TREE;
2353 /* We will be creating a value number for
2354 RCODE (OPS...).
2355 So first simplify and lookup this expression to see if it
2356 is already available. */
2357 /* For simplification valueize. */
2358 unsigned i = 0;
2359 if (simplify)
2360 for (i = 0; i < res_op->num_ops; ++i)
2361 if (TREE_CODE (res_op->ops[i]) == SSA_NAME)
2363 tree tem = vn_valueize (res_op->ops[i]);
2364 if (!tem)
2365 break;
2366 res_op->ops[i] = tem;
2368 /* If valueization of an operand fails (it is not available), skip
2369 simplification. */
2370 bool res = false;
2371 if (i == res_op->num_ops)
2373 mprts_hook = vn_lookup_simplify_result;
2374 res = res_op->resimplify (NULL, vn_valueize);
2375 mprts_hook = NULL;
2377 gimple *new_stmt = NULL;
2378 if (res
2379 && gimple_simplified_result_is_gimple_val (res_op))
2381 /* The expression is already available. */
2382 result = res_op->ops[0];
2383 /* Valueize it, simplification returns sth in AVAIL only. */
2384 if (TREE_CODE (result) == SSA_NAME)
2385 result = SSA_VAL (result);
2387 else
2389 tree val = vn_lookup_simplify_result (res_op);
2390 if (!val && insert)
2392 gimple_seq stmts = NULL;
2393 result = maybe_push_res_to_seq (res_op, &stmts);
2394 if (result)
2396 gcc_assert (gimple_seq_singleton_p (stmts));
2397 new_stmt = gimple_seq_first_stmt (stmts);
2400 else
2401 /* The expression is already available. */
2402 result = val;
2404 if (new_stmt)
2406 /* The expression is not yet available, value-number lhs to
2407 the new SSA_NAME we created. */
2408 /* Initialize value-number information properly. */
2409 vn_ssa_aux_t result_info = VN_INFO (result);
2410 result_info->valnum = result;
2411 result_info->value_id = get_next_value_id ();
2412 result_info->visited = 1;
2413 gimple_seq_add_stmt_without_update (&VN_INFO (result)->expr,
2414 new_stmt);
2415 result_info->needs_insertion = true;
2416 /* ??? PRE phi-translation inserts NARYs without corresponding
2417 SSA name result. Re-use those but set their result according
2418 to the stmt we just built. */
2419 vn_nary_op_t nary = NULL;
2420 vn_nary_op_lookup_stmt (new_stmt, &nary);
2421 if (nary)
2423 gcc_assert (! nary->predicated_values && nary->u.result == NULL_TREE);
2424 nary->u.result = gimple_assign_lhs (new_stmt);
2426 /* As all "inserted" statements are singleton SCCs, insert
2427 to the valid table. This is strictly needed to
2428 avoid re-generating new value SSA_NAMEs for the same
2429 expression during SCC iteration over and over (the
2430 optimistic table gets cleared after each iteration).
2431 We do not need to insert into the optimistic table, as
2432 lookups there will fall back to the valid table. */
2433 else
2435 unsigned int length = vn_nary_length_from_stmt (new_stmt);
2436 vn_nary_op_t vno1
2437 = alloc_vn_nary_op_noinit (length, &vn_tables_insert_obstack);
2438 vno1->value_id = result_info->value_id;
2439 vno1->length = length;
2440 vno1->predicated_values = 0;
2441 vno1->u.result = result;
2442 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (new_stmt));
2443 vn_nary_op_insert_into (vno1, valid_info->nary, true);
2444 /* Also do not link it into the undo chain. */
2445 last_inserted_nary = vno1->next;
2446 vno1->next = (vn_nary_op_t)(void *)-1;
2448 if (dump_file && (dump_flags & TDF_DETAILS))
2450 fprintf (dump_file, "Inserting name ");
2451 print_generic_expr (dump_file, result);
2452 fprintf (dump_file, " for expression ");
2453 print_gimple_expr (dump_file, new_stmt, 0, TDF_SLIM);
2454 fprintf (dump_file, "\n");
2457 return result;
2460 /* Return a value-number for RCODE OPS... either by looking up an existing
2461 value-number for the simplified result or by inserting the operation. */
2463 static tree
2464 vn_nary_build_or_lookup (gimple_match_op *res_op)
2466 return vn_nary_build_or_lookup_1 (res_op, true, true);
2469 /* Try to simplify the expression RCODE OPS... of type TYPE and return
2470 its value if present. */
2472 tree
2473 vn_nary_simplify (vn_nary_op_t nary)
2475 if (nary->length > gimple_match_op::MAX_NUM_OPS)
2476 return NULL_TREE;
2477 gimple_match_op op (gimple_match_cond::UNCOND, nary->opcode,
2478 nary->type, nary->length);
2479 memcpy (op.ops, nary->op, sizeof (tree) * nary->length);
2480 return vn_nary_build_or_lookup_1 (&op, false, true);
2483 /* Elimination engine. */
2485 class eliminate_dom_walker : public dom_walker
2487 public:
2488 eliminate_dom_walker (cdi_direction, bitmap);
2489 ~eliminate_dom_walker ();
2491 virtual edge before_dom_children (basic_block);
2492 virtual void after_dom_children (basic_block);
2494 virtual tree eliminate_avail (basic_block, tree op);
2495 virtual void eliminate_push_avail (basic_block, tree op);
2496 tree eliminate_insert (basic_block, gimple_stmt_iterator *gsi, tree val);
2498 void eliminate_stmt (basic_block, gimple_stmt_iterator *);
2500 unsigned eliminate_cleanup (bool region_p = false);
2502 bool do_pre;
2503 unsigned int el_todo;
2504 unsigned int eliminations;
2505 unsigned int insertions;
2507 /* SSA names that had their defs inserted by PRE if do_pre. */
2508 bitmap inserted_exprs;
2510 /* Blocks with statements that have had their EH properties changed. */
2511 bitmap need_eh_cleanup;
2513 /* Blocks with statements that have had their AB properties changed. */
2514 bitmap need_ab_cleanup;
2516 /* Local state for the eliminate domwalk. */
2517 auto_vec<gimple *> to_remove;
2518 auto_vec<gimple *> to_fixup;
2519 auto_vec<tree> avail;
2520 auto_vec<tree> avail_stack;
2523 /* Adaptor to the elimination engine using RPO availability. */
2525 class rpo_elim : public eliminate_dom_walker
2527 public:
2528 rpo_elim(basic_block entry_)
2529 : eliminate_dom_walker (CDI_DOMINATORS, NULL), entry (entry_),
2530 m_avail_freelist (NULL) {}
2532 virtual tree eliminate_avail (basic_block, tree op);
2534 virtual void eliminate_push_avail (basic_block, tree);
2536 basic_block entry;
2537 /* Freelist of avail entries which are allocated from the vn_ssa_aux
2538 obstack. */
2539 vn_avail *m_avail_freelist;
2542 /* Global RPO state for access from hooks. */
2543 static eliminate_dom_walker *rpo_avail;
2544 basic_block vn_context_bb;
2546 /* Return true if BASE1 and BASE2 can be adjusted so they have the
2547 same address and adjust *OFFSET1 and *OFFSET2 accordingly.
2548 Otherwise return false. */
2550 static bool
2551 adjust_offsets_for_equal_base_address (tree base1, poly_int64 *offset1,
2552 tree base2, poly_int64 *offset2)
2554 poly_int64 soff;
2555 if (TREE_CODE (base1) == MEM_REF
2556 && TREE_CODE (base2) == MEM_REF)
2558 if (mem_ref_offset (base1).to_shwi (&soff))
2560 base1 = TREE_OPERAND (base1, 0);
2561 *offset1 += soff * BITS_PER_UNIT;
2563 if (mem_ref_offset (base2).to_shwi (&soff))
2565 base2 = TREE_OPERAND (base2, 0);
2566 *offset2 += soff * BITS_PER_UNIT;
2568 return operand_equal_p (base1, base2, 0);
2570 return operand_equal_p (base1, base2, OEP_ADDRESS_OF);
2573 /* Callback for walk_non_aliased_vuses. Tries to perform a lookup
2574 from the statement defining VUSE and if not successful tries to
2575 translate *REFP and VR_ through an aggregate copy at the definition
2576 of VUSE. If *DISAMBIGUATE_ONLY is true then do not perform translation
2577 of *REF and *VR. If only disambiguation was performed then
2578 *DISAMBIGUATE_ONLY is set to true. */
2580 static void *
2581 vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *data_,
2582 translate_flags *disambiguate_only)
2584 vn_walk_cb_data *data = (vn_walk_cb_data *)data_;
2585 vn_reference_t vr = data->vr;
2586 gimple *def_stmt = SSA_NAME_DEF_STMT (vuse);
2587 tree base = ao_ref_base (ref);
2588 HOST_WIDE_INT offseti = 0, maxsizei, sizei = 0;
2589 static vec<vn_reference_op_s> lhs_ops;
2590 ao_ref lhs_ref;
2591 bool lhs_ref_ok = false;
2592 poly_int64 copy_size;
2594 /* First try to disambiguate after value-replacing in the definitions LHS. */
2595 if (is_gimple_assign (def_stmt))
2597 tree lhs = gimple_assign_lhs (def_stmt);
2598 bool valueized_anything = false;
2599 /* Avoid re-allocation overhead. */
2600 lhs_ops.truncate (0);
2601 basic_block saved_rpo_bb = vn_context_bb;
2602 vn_context_bb = gimple_bb (def_stmt);
2603 if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE)
2605 copy_reference_ops_from_ref (lhs, &lhs_ops);
2606 valueize_refs_1 (&lhs_ops, &valueized_anything, true);
2608 vn_context_bb = saved_rpo_bb;
2609 ao_ref_init (&lhs_ref, lhs);
2610 lhs_ref_ok = true;
2611 if (valueized_anything
2612 && ao_ref_init_from_vn_reference
2613 (&lhs_ref, ao_ref_alias_set (&lhs_ref),
2614 ao_ref_base_alias_set (&lhs_ref), TREE_TYPE (lhs), lhs_ops)
2615 && !refs_may_alias_p_1 (ref, &lhs_ref, data->tbaa_p))
2617 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2618 return NULL;
2621 /* Besides valueizing the LHS we can also use access-path based
2622 disambiguation on the original non-valueized ref. */
2623 if (!ref->ref
2624 && lhs_ref_ok
2625 && data->orig_ref.ref)
2627 /* We want to use the non-valueized LHS for this, but avoid redundant
2628 work. */
2629 ao_ref *lref = &lhs_ref;
2630 ao_ref lref_alt;
2631 if (valueized_anything)
2633 ao_ref_init (&lref_alt, lhs);
2634 lref = &lref_alt;
2636 if (!refs_may_alias_p_1 (&data->orig_ref, lref, data->tbaa_p))
2638 *disambiguate_only = (valueized_anything
2639 ? TR_VALUEIZE_AND_DISAMBIGUATE
2640 : TR_DISAMBIGUATE);
2641 return NULL;
2645 /* If we reach a clobbering statement try to skip it and see if
2646 we find a VN result with exactly the same value as the
2647 possible clobber. In this case we can ignore the clobber
2648 and return the found value. */
2649 if (is_gimple_reg_type (TREE_TYPE (lhs))
2650 && types_compatible_p (TREE_TYPE (lhs), vr->type)
2651 && (ref->ref || data->orig_ref.ref))
2653 tree *saved_last_vuse_ptr = data->last_vuse_ptr;
2654 /* Do not update last_vuse_ptr in vn_reference_lookup_2. */
2655 data->last_vuse_ptr = NULL;
2656 tree saved_vuse = vr->vuse;
2657 hashval_t saved_hashcode = vr->hashcode;
2658 void *res = vn_reference_lookup_2 (ref, gimple_vuse (def_stmt), data);
2659 /* Need to restore vr->vuse and vr->hashcode. */
2660 vr->vuse = saved_vuse;
2661 vr->hashcode = saved_hashcode;
2662 data->last_vuse_ptr = saved_last_vuse_ptr;
2663 if (res && res != (void *)-1)
2665 vn_reference_t vnresult = (vn_reference_t) res;
2666 tree rhs = gimple_assign_rhs1 (def_stmt);
2667 if (TREE_CODE (rhs) == SSA_NAME)
2668 rhs = SSA_VAL (rhs);
2669 if (vnresult->result
2670 && operand_equal_p (vnresult->result, rhs, 0)
2671 /* We have to honor our promise about union type punning
2672 and also support arbitrary overlaps with
2673 -fno-strict-aliasing. So simply resort to alignment to
2674 rule out overlaps. Do this check last because it is
2675 quite expensive compared to the hash-lookup above. */
2676 && multiple_p (get_object_alignment
2677 (ref->ref ? ref->ref : data->orig_ref.ref),
2678 ref->size)
2679 && multiple_p (get_object_alignment (lhs), ref->size))
2680 return res;
2684 else if (*disambiguate_only <= TR_VALUEIZE_AND_DISAMBIGUATE
2685 && gimple_call_builtin_p (def_stmt, BUILT_IN_NORMAL)
2686 && gimple_call_num_args (def_stmt) <= 4)
2688 /* For builtin calls valueize its arguments and call the
2689 alias oracle again. Valueization may improve points-to
2690 info of pointers and constify size and position arguments.
2691 Originally this was motivated by PR61034 which has
2692 conditional calls to free falsely clobbering ref because
2693 of imprecise points-to info of the argument. */
2694 tree oldargs[4];
2695 bool valueized_anything = false;
2696 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2698 oldargs[i] = gimple_call_arg (def_stmt, i);
2699 tree val = vn_valueize (oldargs[i]);
2700 if (val != oldargs[i])
2702 gimple_call_set_arg (def_stmt, i, val);
2703 valueized_anything = true;
2706 if (valueized_anything)
2708 bool res = call_may_clobber_ref_p_1 (as_a <gcall *> (def_stmt),
2709 ref, data->tbaa_p);
2710 for (unsigned i = 0; i < gimple_call_num_args (def_stmt); ++i)
2711 gimple_call_set_arg (def_stmt, i, oldargs[i]);
2712 if (!res)
2714 *disambiguate_only = TR_VALUEIZE_AND_DISAMBIGUATE;
2715 return NULL;
2720 if (*disambiguate_only > TR_TRANSLATE)
2721 return (void *)-1;
2723 /* If we cannot constrain the size of the reference we cannot
2724 test if anything kills it. */
2725 if (!ref->max_size_known_p ())
2726 return (void *)-1;
2728 poly_int64 offset = ref->offset;
2729 poly_int64 maxsize = ref->max_size;
2731 /* def_stmt may-defs *ref. See if we can derive a value for *ref
2732 from that definition.
2733 1) Memset. */
2734 if (is_gimple_reg_type (vr->type)
2735 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET)
2736 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMSET_CHK))
2737 && (integer_zerop (gimple_call_arg (def_stmt, 1))
2738 || ((TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST
2739 || (INTEGRAL_TYPE_P (vr->type) && known_eq (ref->size, 8)))
2740 && CHAR_BIT == 8
2741 && BITS_PER_UNIT == 8
2742 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2743 && offset.is_constant (&offseti)
2744 && ref->size.is_constant (&sizei)
2745 && (offseti % BITS_PER_UNIT == 0
2746 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == INTEGER_CST)))
2747 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2))
2748 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
2749 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)))))
2750 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
2751 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME))
2753 tree base2;
2754 poly_int64 offset2, size2, maxsize2;
2755 bool reverse;
2756 tree ref2 = gimple_call_arg (def_stmt, 0);
2757 if (TREE_CODE (ref2) == SSA_NAME)
2759 ref2 = SSA_VAL (ref2);
2760 if (TREE_CODE (ref2) == SSA_NAME
2761 && (TREE_CODE (base) != MEM_REF
2762 || TREE_OPERAND (base, 0) != ref2))
2764 gimple *def_stmt = SSA_NAME_DEF_STMT (ref2);
2765 if (gimple_assign_single_p (def_stmt)
2766 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
2767 ref2 = gimple_assign_rhs1 (def_stmt);
2770 if (TREE_CODE (ref2) == ADDR_EXPR)
2772 ref2 = TREE_OPERAND (ref2, 0);
2773 base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2,
2774 &reverse);
2775 if (!known_size_p (maxsize2)
2776 || !known_eq (maxsize2, size2)
2777 || !operand_equal_p (base, base2, OEP_ADDRESS_OF))
2778 return (void *)-1;
2780 else if (TREE_CODE (ref2) == SSA_NAME)
2782 poly_int64 soff;
2783 if (TREE_CODE (base) != MEM_REF
2784 || !(mem_ref_offset (base)
2785 << LOG2_BITS_PER_UNIT).to_shwi (&soff))
2786 return (void *)-1;
2787 offset += soff;
2788 offset2 = 0;
2789 if (TREE_OPERAND (base, 0) != ref2)
2791 gimple *def = SSA_NAME_DEF_STMT (ref2);
2792 if (is_gimple_assign (def)
2793 && gimple_assign_rhs_code (def) == POINTER_PLUS_EXPR
2794 && gimple_assign_rhs1 (def) == TREE_OPERAND (base, 0)
2795 && poly_int_tree_p (gimple_assign_rhs2 (def)))
2797 tree rhs2 = gimple_assign_rhs2 (def);
2798 if (!(poly_offset_int::from (wi::to_poly_wide (rhs2),
2799 SIGNED)
2800 << LOG2_BITS_PER_UNIT).to_shwi (&offset2))
2801 return (void *)-1;
2802 ref2 = gimple_assign_rhs1 (def);
2803 if (TREE_CODE (ref2) == SSA_NAME)
2804 ref2 = SSA_VAL (ref2);
2806 else
2807 return (void *)-1;
2810 else
2811 return (void *)-1;
2812 tree len = gimple_call_arg (def_stmt, 2);
2813 HOST_WIDE_INT leni, offset2i;
2814 if (TREE_CODE (len) == SSA_NAME)
2815 len = SSA_VAL (len);
2816 /* Sometimes the above trickery is smarter than alias analysis. Take
2817 advantage of that. */
2818 if (!ranges_maybe_overlap_p (offset, maxsize, offset2,
2819 (wi::to_poly_offset (len)
2820 << LOG2_BITS_PER_UNIT)))
2821 return NULL;
2822 if (data->partial_defs.is_empty ()
2823 && known_subrange_p (offset, maxsize, offset2,
2824 wi::to_poly_offset (len) << LOG2_BITS_PER_UNIT))
2826 tree val;
2827 if (integer_zerop (gimple_call_arg (def_stmt, 1)))
2828 val = build_zero_cst (vr->type);
2829 else if (INTEGRAL_TYPE_P (vr->type)
2830 && known_eq (ref->size, 8)
2831 && offseti % BITS_PER_UNIT == 0)
2833 gimple_match_op res_op (gimple_match_cond::UNCOND, NOP_EXPR,
2834 vr->type, gimple_call_arg (def_stmt, 1));
2835 val = vn_nary_build_or_lookup (&res_op);
2836 if (!val
2837 || (TREE_CODE (val) == SSA_NAME
2838 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
2839 return (void *)-1;
2841 else
2843 unsigned buflen = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (vr->type)) + 1;
2844 if (INTEGRAL_TYPE_P (vr->type))
2845 buflen = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (vr->type)) + 1;
2846 unsigned char *buf = XALLOCAVEC (unsigned char, buflen);
2847 memset (buf, TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 1)),
2848 buflen);
2849 if (BYTES_BIG_ENDIAN)
2851 unsigned int amnt
2852 = (((unsigned HOST_WIDE_INT) offseti + sizei)
2853 % BITS_PER_UNIT);
2854 if (amnt)
2856 shift_bytes_in_array_right (buf, buflen,
2857 BITS_PER_UNIT - amnt);
2858 buf++;
2859 buflen--;
2862 else if (offseti % BITS_PER_UNIT != 0)
2864 unsigned int amnt
2865 = BITS_PER_UNIT - ((unsigned HOST_WIDE_INT) offseti
2866 % BITS_PER_UNIT);
2867 shift_bytes_in_array_left (buf, buflen, amnt);
2868 buf++;
2869 buflen--;
2871 val = native_interpret_expr (vr->type, buf, buflen);
2872 if (!val)
2873 return (void *)-1;
2875 return data->finish (0, 0, val);
2877 /* For now handle clearing memory with partial defs. */
2878 else if (known_eq (ref->size, maxsize)
2879 && integer_zerop (gimple_call_arg (def_stmt, 1))
2880 && tree_fits_poly_int64_p (len)
2881 && tree_to_poly_int64 (len).is_constant (&leni)
2882 && leni <= INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT
2883 && offset.is_constant (&offseti)
2884 && offset2.is_constant (&offset2i)
2885 && maxsize.is_constant (&maxsizei)
2886 && ranges_known_overlap_p (offseti, maxsizei, offset2i,
2887 leni << LOG2_BITS_PER_UNIT))
2889 pd_data pd;
2890 pd.rhs = build_constructor (NULL_TREE, NULL);
2891 pd.offset = offset2i;
2892 pd.size = leni << LOG2_BITS_PER_UNIT;
2893 return data->push_partial_def (pd, 0, 0, offseti, maxsizei);
2897 /* 2) Assignment from an empty CONSTRUCTOR. */
2898 else if (is_gimple_reg_type (vr->type)
2899 && gimple_assign_single_p (def_stmt)
2900 && gimple_assign_rhs_code (def_stmt) == CONSTRUCTOR
2901 && CONSTRUCTOR_NELTS (gimple_assign_rhs1 (def_stmt)) == 0)
2903 tree base2;
2904 poly_int64 offset2, size2, maxsize2;
2905 HOST_WIDE_INT offset2i, size2i;
2906 gcc_assert (lhs_ref_ok);
2907 base2 = ao_ref_base (&lhs_ref);
2908 offset2 = lhs_ref.offset;
2909 size2 = lhs_ref.size;
2910 maxsize2 = lhs_ref.max_size;
2911 if (known_size_p (maxsize2)
2912 && known_eq (maxsize2, size2)
2913 && adjust_offsets_for_equal_base_address (base, &offset,
2914 base2, &offset2))
2916 if (data->partial_defs.is_empty ()
2917 && known_subrange_p (offset, maxsize, offset2, size2))
2919 /* While technically undefined behavior do not optimize
2920 a full read from a clobber. */
2921 if (gimple_clobber_p (def_stmt))
2922 return (void *)-1;
2923 tree val = build_zero_cst (vr->type);
2924 return data->finish (ao_ref_alias_set (&lhs_ref),
2925 ao_ref_base_alias_set (&lhs_ref), val);
2927 else if (known_eq (ref->size, maxsize)
2928 && maxsize.is_constant (&maxsizei)
2929 && offset.is_constant (&offseti)
2930 && offset2.is_constant (&offset2i)
2931 && size2.is_constant (&size2i)
2932 && ranges_known_overlap_p (offseti, maxsizei,
2933 offset2i, size2i))
2935 /* Let clobbers be consumed by the partial-def tracker
2936 which can choose to ignore them if they are shadowed
2937 by a later def. */
2938 pd_data pd;
2939 pd.rhs = gimple_assign_rhs1 (def_stmt);
2940 pd.offset = offset2i;
2941 pd.size = size2i;
2942 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
2943 ao_ref_base_alias_set (&lhs_ref),
2944 offseti, maxsizei);
2949 /* 3) Assignment from a constant. We can use folds native encode/interpret
2950 routines to extract the assigned bits. */
2951 else if (known_eq (ref->size, maxsize)
2952 && is_gimple_reg_type (vr->type)
2953 && !reverse_storage_order_for_component_p (vr->operands)
2954 && !contains_storage_order_barrier_p (vr->operands)
2955 && gimple_assign_single_p (def_stmt)
2956 && CHAR_BIT == 8
2957 && BITS_PER_UNIT == 8
2958 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
2959 /* native_encode and native_decode operate on arrays of bytes
2960 and so fundamentally need a compile-time size and offset. */
2961 && maxsize.is_constant (&maxsizei)
2962 && offset.is_constant (&offseti)
2963 && (is_gimple_min_invariant (gimple_assign_rhs1 (def_stmt))
2964 || (TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
2965 && is_gimple_min_invariant (SSA_VAL (gimple_assign_rhs1 (def_stmt))))))
2967 tree lhs = gimple_assign_lhs (def_stmt);
2968 tree base2;
2969 poly_int64 offset2, size2, maxsize2;
2970 HOST_WIDE_INT offset2i, size2i;
2971 bool reverse;
2972 gcc_assert (lhs_ref_ok);
2973 base2 = ao_ref_base (&lhs_ref);
2974 offset2 = lhs_ref.offset;
2975 size2 = lhs_ref.size;
2976 maxsize2 = lhs_ref.max_size;
2977 reverse = reverse_storage_order_for_component_p (lhs);
2978 if (base2
2979 && !reverse
2980 && !storage_order_barrier_p (lhs)
2981 && known_eq (maxsize2, size2)
2982 && adjust_offsets_for_equal_base_address (base, &offset,
2983 base2, &offset2)
2984 && offset.is_constant (&offseti)
2985 && offset2.is_constant (&offset2i)
2986 && size2.is_constant (&size2i))
2988 if (data->partial_defs.is_empty ()
2989 && known_subrange_p (offseti, maxsizei, offset2, size2))
2991 /* We support up to 512-bit values (for V8DFmode). */
2992 unsigned char buffer[65];
2993 int len;
2995 tree rhs = gimple_assign_rhs1 (def_stmt);
2996 if (TREE_CODE (rhs) == SSA_NAME)
2997 rhs = SSA_VAL (rhs);
2998 len = native_encode_expr (rhs,
2999 buffer, sizeof (buffer) - 1,
3000 (offseti - offset2i) / BITS_PER_UNIT);
3001 if (len > 0 && len * BITS_PER_UNIT >= maxsizei)
3003 tree type = vr->type;
3004 unsigned char *buf = buffer;
3005 unsigned int amnt = 0;
3006 /* Make sure to interpret in a type that has a range
3007 covering the whole access size. */
3008 if (INTEGRAL_TYPE_P (vr->type)
3009 && maxsizei != TYPE_PRECISION (vr->type))
3010 type = build_nonstandard_integer_type (maxsizei,
3011 TYPE_UNSIGNED (type));
3012 if (BYTES_BIG_ENDIAN)
3014 /* For big-endian native_encode_expr stored the rhs
3015 such that the LSB of it is the LSB of buffer[len - 1].
3016 That bit is stored into memory at position
3017 offset2 + size2 - 1, i.e. in byte
3018 base + (offset2 + size2 - 1) / BITS_PER_UNIT.
3019 E.g. for offset2 1 and size2 14, rhs -1 and memory
3020 previously cleared that is:
3022 01111111|11111110
3023 Now, if we want to extract offset 2 and size 12 from
3024 it using native_interpret_expr (which actually works
3025 for integral bitfield types in terms of byte size of
3026 the mode), the native_encode_expr stored the value
3027 into buffer as
3028 XX111111|11111111
3029 and returned len 2 (the X bits are outside of
3030 precision).
3031 Let sz be maxsize / BITS_PER_UNIT if not extracting
3032 a bitfield, and GET_MODE_SIZE otherwise.
3033 We need to align the LSB of the value we want to
3034 extract as the LSB of buf[sz - 1].
3035 The LSB from memory we need to read is at position
3036 offset + maxsize - 1. */
3037 HOST_WIDE_INT sz = maxsizei / BITS_PER_UNIT;
3038 if (INTEGRAL_TYPE_P (type))
3039 sz = GET_MODE_SIZE (SCALAR_INT_TYPE_MODE (type));
3040 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3041 - offseti - maxsizei) % BITS_PER_UNIT;
3042 if (amnt)
3043 shift_bytes_in_array_right (buffer, len, amnt);
3044 amnt = ((unsigned HOST_WIDE_INT) offset2i + size2i
3045 - offseti - maxsizei - amnt) / BITS_PER_UNIT;
3046 if ((unsigned HOST_WIDE_INT) sz + amnt > (unsigned) len)
3047 len = 0;
3048 else
3050 buf = buffer + len - sz - amnt;
3051 len -= (buf - buffer);
3054 else
3056 amnt = ((unsigned HOST_WIDE_INT) offset2i
3057 - offseti) % BITS_PER_UNIT;
3058 if (amnt)
3060 buffer[len] = 0;
3061 shift_bytes_in_array_left (buffer, len + 1, amnt);
3062 buf = buffer + 1;
3065 tree val = native_interpret_expr (type, buf, len);
3066 /* If we chop off bits because the types precision doesn't
3067 match the memory access size this is ok when optimizing
3068 reads but not when called from the DSE code during
3069 elimination. */
3070 if (val
3071 && type != vr->type)
3073 if (! int_fits_type_p (val, vr->type))
3074 val = NULL_TREE;
3075 else
3076 val = fold_convert (vr->type, val);
3079 if (val)
3080 return data->finish (ao_ref_alias_set (&lhs_ref),
3081 ao_ref_base_alias_set (&lhs_ref), val);
3084 else if (ranges_known_overlap_p (offseti, maxsizei, offset2i,
3085 size2i))
3087 pd_data pd;
3088 tree rhs = gimple_assign_rhs1 (def_stmt);
3089 if (TREE_CODE (rhs) == SSA_NAME)
3090 rhs = SSA_VAL (rhs);
3091 pd.rhs = rhs;
3092 pd.offset = offset2i;
3093 pd.size = size2i;
3094 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3095 ao_ref_base_alias_set (&lhs_ref),
3096 offseti, maxsizei);
3101 /* 4) Assignment from an SSA name which definition we may be able
3102 to access pieces from or we can combine to a larger entity. */
3103 else if (known_eq (ref->size, maxsize)
3104 && is_gimple_reg_type (vr->type)
3105 && !reverse_storage_order_for_component_p (vr->operands)
3106 && !contains_storage_order_barrier_p (vr->operands)
3107 && gimple_assign_single_p (def_stmt)
3108 && TREE_CODE (gimple_assign_rhs1 (def_stmt)) == SSA_NAME)
3110 tree lhs = gimple_assign_lhs (def_stmt);
3111 tree base2;
3112 poly_int64 offset2, size2, maxsize2;
3113 HOST_WIDE_INT offset2i, size2i, offseti;
3114 bool reverse;
3115 gcc_assert (lhs_ref_ok);
3116 base2 = ao_ref_base (&lhs_ref);
3117 offset2 = lhs_ref.offset;
3118 size2 = lhs_ref.size;
3119 maxsize2 = lhs_ref.max_size;
3120 reverse = reverse_storage_order_for_component_p (lhs);
3121 tree def_rhs = gimple_assign_rhs1 (def_stmt);
3122 if (!reverse
3123 && !storage_order_barrier_p (lhs)
3124 && known_size_p (maxsize2)
3125 && known_eq (maxsize2, size2)
3126 && adjust_offsets_for_equal_base_address (base, &offset,
3127 base2, &offset2))
3129 if (data->partial_defs.is_empty ()
3130 && known_subrange_p (offset, maxsize, offset2, size2)
3131 /* ??? We can't handle bitfield precision extracts without
3132 either using an alternate type for the BIT_FIELD_REF and
3133 then doing a conversion or possibly adjusting the offset
3134 according to endianness. */
3135 && (! INTEGRAL_TYPE_P (vr->type)
3136 || known_eq (ref->size, TYPE_PRECISION (vr->type)))
3137 && multiple_p (ref->size, BITS_PER_UNIT))
3139 tree val = NULL_TREE;
3140 if (! INTEGRAL_TYPE_P (TREE_TYPE (def_rhs))
3141 || type_has_mode_precision_p (TREE_TYPE (def_rhs)))
3143 gimple_match_op op (gimple_match_cond::UNCOND,
3144 BIT_FIELD_REF, vr->type,
3145 SSA_VAL (def_rhs),
3146 bitsize_int (ref->size),
3147 bitsize_int (offset - offset2));
3148 val = vn_nary_build_or_lookup (&op);
3150 else if (known_eq (ref->size, size2))
3152 gimple_match_op op (gimple_match_cond::UNCOND,
3153 VIEW_CONVERT_EXPR, vr->type,
3154 SSA_VAL (def_rhs));
3155 val = vn_nary_build_or_lookup (&op);
3157 if (val
3158 && (TREE_CODE (val) != SSA_NAME
3159 || ! SSA_NAME_OCCURS_IN_ABNORMAL_PHI (val)))
3160 return data->finish (ao_ref_alias_set (&lhs_ref),
3161 ao_ref_base_alias_set (&lhs_ref), val);
3163 else if (maxsize.is_constant (&maxsizei)
3164 && offset.is_constant (&offseti)
3165 && offset2.is_constant (&offset2i)
3166 && size2.is_constant (&size2i)
3167 && ranges_known_overlap_p (offset, maxsize, offset2, size2))
3169 pd_data pd;
3170 pd.rhs = SSA_VAL (def_rhs);
3171 pd.offset = offset2i;
3172 pd.size = size2i;
3173 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3174 ao_ref_base_alias_set (&lhs_ref),
3175 offseti, maxsizei);
3180 /* 5) For aggregate copies translate the reference through them if
3181 the copy kills ref. */
3182 else if (data->vn_walk_kind == VN_WALKREWRITE
3183 && gimple_assign_single_p (def_stmt)
3184 && (DECL_P (gimple_assign_rhs1 (def_stmt))
3185 || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
3186 || handled_component_p (gimple_assign_rhs1 (def_stmt))))
3188 tree base2;
3189 int i, j, k;
3190 auto_vec<vn_reference_op_s> rhs;
3191 vn_reference_op_t vro;
3192 ao_ref r;
3194 gcc_assert (lhs_ref_ok);
3196 /* See if the assignment kills REF. */
3197 base2 = ao_ref_base (&lhs_ref);
3198 if (!lhs_ref.max_size_known_p ()
3199 || (base != base2
3200 && (TREE_CODE (base) != MEM_REF
3201 || TREE_CODE (base2) != MEM_REF
3202 || TREE_OPERAND (base, 0) != TREE_OPERAND (base2, 0)
3203 || !tree_int_cst_equal (TREE_OPERAND (base, 1),
3204 TREE_OPERAND (base2, 1))))
3205 || !stmt_kills_ref_p (def_stmt, ref))
3206 return (void *)-1;
3208 /* Find the common base of ref and the lhs. lhs_ops already
3209 contains valueized operands for the lhs. */
3210 i = vr->operands.length () - 1;
3211 j = lhs_ops.length () - 1;
3212 while (j >= 0 && i >= 0
3213 && vn_reference_op_eq (&vr->operands[i], &lhs_ops[j]))
3215 i--;
3216 j--;
3219 /* ??? The innermost op should always be a MEM_REF and we already
3220 checked that the assignment to the lhs kills vr. Thus for
3221 aggregate copies using char[] types the vn_reference_op_eq
3222 may fail when comparing types for compatibility. But we really
3223 don't care here - further lookups with the rewritten operands
3224 will simply fail if we messed up types too badly. */
3225 poly_int64 extra_off = 0;
3226 if (j == 0 && i >= 0
3227 && lhs_ops[0].opcode == MEM_REF
3228 && maybe_ne (lhs_ops[0].off, -1))
3230 if (known_eq (lhs_ops[0].off, vr->operands[i].off))
3231 i--, j--;
3232 else if (vr->operands[i].opcode == MEM_REF
3233 && maybe_ne (vr->operands[i].off, -1))
3235 extra_off = vr->operands[i].off - lhs_ops[0].off;
3236 i--, j--;
3240 /* i now points to the first additional op.
3241 ??? LHS may not be completely contained in VR, one or more
3242 VIEW_CONVERT_EXPRs could be in its way. We could at least
3243 try handling outermost VIEW_CONVERT_EXPRs. */
3244 if (j != -1)
3245 return (void *)-1;
3247 /* Punt if the additional ops contain a storage order barrier. */
3248 for (k = i; k >= 0; k--)
3250 vro = &vr->operands[k];
3251 if (vro->opcode == VIEW_CONVERT_EXPR && vro->reverse)
3252 return (void *)-1;
3255 /* Now re-write REF to be based on the rhs of the assignment. */
3256 tree rhs1 = gimple_assign_rhs1 (def_stmt);
3257 copy_reference_ops_from_ref (rhs1, &rhs);
3259 /* Apply an extra offset to the inner MEM_REF of the RHS. */
3260 if (maybe_ne (extra_off, 0))
3262 if (rhs.length () < 2)
3263 return (void *)-1;
3264 int ix = rhs.length () - 2;
3265 if (rhs[ix].opcode != MEM_REF
3266 || known_eq (rhs[ix].off, -1))
3267 return (void *)-1;
3268 rhs[ix].off += extra_off;
3269 rhs[ix].op0 = int_const_binop (PLUS_EXPR, rhs[ix].op0,
3270 build_int_cst (TREE_TYPE (rhs[ix].op0),
3271 extra_off));
3274 /* Save the operands since we need to use the original ones for
3275 the hash entry we use. */
3276 if (!data->saved_operands.exists ())
3277 data->saved_operands = vr->operands.copy ();
3279 /* We need to pre-pend vr->operands[0..i] to rhs. */
3280 vec<vn_reference_op_s> old = vr->operands;
3281 if (i + 1 + rhs.length () > vr->operands.length ())
3282 vr->operands.safe_grow (i + 1 + rhs.length (), true);
3283 else
3284 vr->operands.truncate (i + 1 + rhs.length ());
3285 FOR_EACH_VEC_ELT (rhs, j, vro)
3286 vr->operands[i + 1 + j] = *vro;
3287 valueize_refs (&vr->operands);
3288 if (old == shared_lookup_references)
3289 shared_lookup_references = vr->operands;
3290 vr->hashcode = vn_reference_compute_hash (vr);
3292 /* Try folding the new reference to a constant. */
3293 tree val = fully_constant_vn_reference_p (vr);
3294 if (val)
3296 if (data->partial_defs.is_empty ())
3297 return data->finish (ao_ref_alias_set (&lhs_ref),
3298 ao_ref_base_alias_set (&lhs_ref), val);
3299 /* This is the only interesting case for partial-def handling
3300 coming from targets that like to gimplify init-ctors as
3301 aggregate copies from constant data like aarch64 for
3302 PR83518. */
3303 if (maxsize.is_constant (&maxsizei) && known_eq (ref->size, maxsize))
3305 pd_data pd;
3306 pd.rhs = val;
3307 pd.offset = 0;
3308 pd.size = maxsizei;
3309 return data->push_partial_def (pd, ao_ref_alias_set (&lhs_ref),
3310 ao_ref_base_alias_set (&lhs_ref),
3311 0, maxsizei);
3315 /* Continuing with partial defs isn't easily possible here, we
3316 have to find a full def from further lookups from here. Probably
3317 not worth the special-casing everywhere. */
3318 if (!data->partial_defs.is_empty ())
3319 return (void *)-1;
3321 /* Adjust *ref from the new operands. */
3322 ao_ref rhs1_ref;
3323 ao_ref_init (&rhs1_ref, rhs1);
3324 if (!ao_ref_init_from_vn_reference (&r, ao_ref_alias_set (&rhs1_ref),
3325 ao_ref_base_alias_set (&rhs1_ref),
3326 vr->type, vr->operands))
3327 return (void *)-1;
3328 /* This can happen with bitfields. */
3329 if (maybe_ne (ref->size, r.size))
3331 /* If the access lacks some subsetting simply apply that by
3332 shortening it. That in the end can only be successful
3333 if we can pun the lookup result which in turn requires
3334 exact offsets. */
3335 if (known_eq (r.size, r.max_size)
3336 && known_lt (ref->size, r.size))
3337 r.size = r.max_size = ref->size;
3338 else
3339 return (void *)-1;
3341 *ref = r;
3343 /* Do not update last seen VUSE after translating. */
3344 data->last_vuse_ptr = NULL;
3345 /* Invalidate the original access path since it now contains
3346 the wrong base. */
3347 data->orig_ref.ref = NULL_TREE;
3348 /* Use the alias-set of this LHS for recording an eventual result. */
3349 if (data->first_set == -2)
3351 data->first_set = ao_ref_alias_set (&lhs_ref);
3352 data->first_base_set = ao_ref_base_alias_set (&lhs_ref);
3355 /* Keep looking for the adjusted *REF / VR pair. */
3356 return NULL;
3359 /* 6) For memcpy copies translate the reference through them if the copy
3360 kills ref. But we cannot (easily) do this translation if the memcpy is
3361 a storage order barrier, i.e. is equivalent to a VIEW_CONVERT_EXPR that
3362 can modify the storage order of objects (see storage_order_barrier_p). */
3363 else if (data->vn_walk_kind == VN_WALKREWRITE
3364 && is_gimple_reg_type (vr->type)
3365 /* ??? Handle BCOPY as well. */
3366 && (gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY)
3367 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMCPY_CHK)
3368 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY)
3369 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMPCPY_CHK)
3370 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE)
3371 || gimple_call_builtin_p (def_stmt, BUILT_IN_MEMMOVE_CHK))
3372 && (TREE_CODE (gimple_call_arg (def_stmt, 0)) == ADDR_EXPR
3373 || TREE_CODE (gimple_call_arg (def_stmt, 0)) == SSA_NAME)
3374 && (TREE_CODE (gimple_call_arg (def_stmt, 1)) == ADDR_EXPR
3375 || TREE_CODE (gimple_call_arg (def_stmt, 1)) == SSA_NAME)
3376 && (poly_int_tree_p (gimple_call_arg (def_stmt, 2), &copy_size)
3377 || (TREE_CODE (gimple_call_arg (def_stmt, 2)) == SSA_NAME
3378 && poly_int_tree_p (SSA_VAL (gimple_call_arg (def_stmt, 2)),
3379 &copy_size)))
3380 /* Handling this is more complicated, give up for now. */
3381 && data->partial_defs.is_empty ())
3383 tree lhs, rhs;
3384 ao_ref r;
3385 poly_int64 rhs_offset, lhs_offset;
3386 vn_reference_op_s op;
3387 poly_uint64 mem_offset;
3388 poly_int64 at, byte_maxsize;
3390 /* Only handle non-variable, addressable refs. */
3391 if (maybe_ne (ref->size, maxsize)
3392 || !multiple_p (offset, BITS_PER_UNIT, &at)
3393 || !multiple_p (maxsize, BITS_PER_UNIT, &byte_maxsize))
3394 return (void *)-1;
3396 /* Extract a pointer base and an offset for the destination. */
3397 lhs = gimple_call_arg (def_stmt, 0);
3398 lhs_offset = 0;
3399 if (TREE_CODE (lhs) == SSA_NAME)
3401 lhs = vn_valueize (lhs);
3402 if (TREE_CODE (lhs) == SSA_NAME)
3404 gimple *def_stmt = SSA_NAME_DEF_STMT (lhs);
3405 if (gimple_assign_single_p (def_stmt)
3406 && gimple_assign_rhs_code (def_stmt) == ADDR_EXPR)
3407 lhs = gimple_assign_rhs1 (def_stmt);
3410 if (TREE_CODE (lhs) == ADDR_EXPR)
3412 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (lhs)))
3413 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (lhs))))
3414 return (void *)-1;
3415 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (lhs, 0),
3416 &lhs_offset);
3417 if (!tem)
3418 return (void *)-1;
3419 if (TREE_CODE (tem) == MEM_REF
3420 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3422 lhs = TREE_OPERAND (tem, 0);
3423 if (TREE_CODE (lhs) == SSA_NAME)
3424 lhs = vn_valueize (lhs);
3425 lhs_offset += mem_offset;
3427 else if (DECL_P (tem))
3428 lhs = build_fold_addr_expr (tem);
3429 else
3430 return (void *)-1;
3432 if (TREE_CODE (lhs) != SSA_NAME
3433 && TREE_CODE (lhs) != ADDR_EXPR)
3434 return (void *)-1;
3436 /* Extract a pointer base and an offset for the source. */
3437 rhs = gimple_call_arg (def_stmt, 1);
3438 rhs_offset = 0;
3439 if (TREE_CODE (rhs) == SSA_NAME)
3440 rhs = vn_valueize (rhs);
3441 if (TREE_CODE (rhs) == ADDR_EXPR)
3443 if (AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (rhs)))
3444 && TYPE_REVERSE_STORAGE_ORDER (TREE_TYPE (TREE_TYPE (rhs))))
3445 return (void *)-1;
3446 tree tem = get_addr_base_and_unit_offset (TREE_OPERAND (rhs, 0),
3447 &rhs_offset);
3448 if (!tem)
3449 return (void *)-1;
3450 if (TREE_CODE (tem) == MEM_REF
3451 && poly_int_tree_p (TREE_OPERAND (tem, 1), &mem_offset))
3453 rhs = TREE_OPERAND (tem, 0);
3454 rhs_offset += mem_offset;
3456 else if (DECL_P (tem)
3457 || TREE_CODE (tem) == STRING_CST)
3458 rhs = build_fold_addr_expr (tem);
3459 else
3460 return (void *)-1;
3462 if (TREE_CODE (rhs) == SSA_NAME)
3463 rhs = SSA_VAL (rhs);
3464 else if (TREE_CODE (rhs) != ADDR_EXPR)
3465 return (void *)-1;
3467 /* The bases of the destination and the references have to agree. */
3468 if (TREE_CODE (base) == MEM_REF)
3470 if (TREE_OPERAND (base, 0) != lhs
3471 || !poly_int_tree_p (TREE_OPERAND (base, 1), &mem_offset))
3472 return (void *) -1;
3473 at += mem_offset;
3475 else if (!DECL_P (base)
3476 || TREE_CODE (lhs) != ADDR_EXPR
3477 || TREE_OPERAND (lhs, 0) != base)
3478 return (void *)-1;
3480 /* If the access is completely outside of the memcpy destination
3481 area there is no aliasing. */
3482 if (!ranges_maybe_overlap_p (lhs_offset, copy_size, at, byte_maxsize))
3483 return NULL;
3484 /* And the access has to be contained within the memcpy destination. */
3485 if (!known_subrange_p (at, byte_maxsize, lhs_offset, copy_size))
3486 return (void *)-1;
3488 /* Save the operands since we need to use the original ones for
3489 the hash entry we use. */
3490 if (!data->saved_operands.exists ())
3491 data->saved_operands = vr->operands.copy ();
3493 /* Make room for 2 operands in the new reference. */
3494 if (vr->operands.length () < 2)
3496 vec<vn_reference_op_s> old = vr->operands;
3497 vr->operands.safe_grow_cleared (2, true);
3498 if (old == shared_lookup_references)
3499 shared_lookup_references = vr->operands;
3501 else
3502 vr->operands.truncate (2);
3504 /* The looked-through reference is a simple MEM_REF. */
3505 memset (&op, 0, sizeof (op));
3506 op.type = vr->type;
3507 op.opcode = MEM_REF;
3508 op.op0 = build_int_cst (ptr_type_node, at - lhs_offset + rhs_offset);
3509 op.off = at - lhs_offset + rhs_offset;
3510 vr->operands[0] = op;
3511 op.type = TREE_TYPE (rhs);
3512 op.opcode = TREE_CODE (rhs);
3513 op.op0 = rhs;
3514 op.off = -1;
3515 vr->operands[1] = op;
3516 vr->hashcode = vn_reference_compute_hash (vr);
3518 /* Try folding the new reference to a constant. */
3519 tree val = fully_constant_vn_reference_p (vr);
3520 if (val)
3521 return data->finish (0, 0, val);
3523 /* Adjust *ref from the new operands. */
3524 if (!ao_ref_init_from_vn_reference (&r, 0, 0, vr->type, vr->operands))
3525 return (void *)-1;
3526 /* This can happen with bitfields. */
3527 if (maybe_ne (ref->size, r.size))
3528 return (void *)-1;
3529 *ref = r;
3531 /* Do not update last seen VUSE after translating. */
3532 data->last_vuse_ptr = NULL;
3533 /* Invalidate the original access path since it now contains
3534 the wrong base. */
3535 data->orig_ref.ref = NULL_TREE;
3536 /* Use the alias-set of this stmt for recording an eventual result. */
3537 if (data->first_set == -2)
3539 data->first_set = 0;
3540 data->first_base_set = 0;
3543 /* Keep looking for the adjusted *REF / VR pair. */
3544 return NULL;
3547 /* Bail out and stop walking. */
3548 return (void *)-1;
3551 /* Return a reference op vector from OP that can be used for
3552 vn_reference_lookup_pieces. The caller is responsible for releasing
3553 the vector. */
3555 vec<vn_reference_op_s>
3556 vn_reference_operands_for_lookup (tree op)
3558 bool valueized;
3559 return valueize_shared_reference_ops_from_ref (op, &valueized).copy ();
3562 /* Lookup a reference operation by it's parts, in the current hash table.
3563 Returns the resulting value number if it exists in the hash table,
3564 NULL_TREE otherwise. VNRESULT will be filled in with the actual
3565 vn_reference_t stored in the hashtable if something is found. */
3567 tree
3568 vn_reference_lookup_pieces (tree vuse, alias_set_type set,
3569 alias_set_type base_set, tree type,
3570 vec<vn_reference_op_s> operands,
3571 vn_reference_t *vnresult, vn_lookup_kind kind)
3573 struct vn_reference_s vr1;
3574 vn_reference_t tmp;
3575 tree cst;
3577 if (!vnresult)
3578 vnresult = &tmp;
3579 *vnresult = NULL;
3581 vr1.vuse = vuse_ssa_val (vuse);
3582 shared_lookup_references.truncate (0);
3583 shared_lookup_references.safe_grow (operands.length (), true);
3584 memcpy (shared_lookup_references.address (),
3585 operands.address (),
3586 sizeof (vn_reference_op_s)
3587 * operands.length ());
3588 bool valueized_p;
3589 valueize_refs_1 (&shared_lookup_references, &valueized_p);
3590 vr1.operands = shared_lookup_references;
3591 vr1.type = type;
3592 vr1.set = set;
3593 vr1.base_set = base_set;
3594 vr1.hashcode = vn_reference_compute_hash (&vr1);
3595 if ((cst = fully_constant_vn_reference_p (&vr1)))
3596 return cst;
3598 vn_reference_lookup_1 (&vr1, vnresult);
3599 if (!*vnresult
3600 && kind != VN_NOWALK
3601 && vr1.vuse)
3603 ao_ref r;
3604 unsigned limit = param_sccvn_max_alias_queries_per_access;
3605 vn_walk_cb_data data (&vr1, NULL_TREE, NULL, kind, true, NULL_TREE);
3606 vec<vn_reference_op_s> ops_for_ref;
3607 if (!valueized_p)
3608 ops_for_ref = vr1.operands;
3609 else
3611 /* For ao_ref_from_mem we have to ensure only available SSA names
3612 end up in base and the only convenient way to make this work
3613 for PRE is to re-valueize with that in mind. */
3614 ops_for_ref.create (operands.length ());
3615 ops_for_ref.quick_grow (operands.length ());
3616 memcpy (ops_for_ref.address (),
3617 operands.address (),
3618 sizeof (vn_reference_op_s)
3619 * operands.length ());
3620 valueize_refs_1 (&ops_for_ref, &valueized_p, true);
3622 if (ao_ref_init_from_vn_reference (&r, set, base_set, type,
3623 ops_for_ref))
3624 *vnresult
3625 = ((vn_reference_t)
3626 walk_non_aliased_vuses (&r, vr1.vuse, true, vn_reference_lookup_2,
3627 vn_reference_lookup_3, vuse_valueize,
3628 limit, &data));
3629 if (ops_for_ref != shared_lookup_references)
3630 ops_for_ref.release ();
3631 gcc_checking_assert (vr1.operands == shared_lookup_references);
3634 if (*vnresult)
3635 return (*vnresult)->result;
3637 return NULL_TREE;
3640 /* Lookup OP in the current hash table, and return the resulting value
3641 number if it exists in the hash table. Return NULL_TREE if it does
3642 not exist in the hash table or if the result field of the structure
3643 was NULL.. VNRESULT will be filled in with the vn_reference_t
3644 stored in the hashtable if one exists. When TBAA_P is false assume
3645 we are looking up a store and treat it as having alias-set zero.
3646 *LAST_VUSE_PTR will be updated with the VUSE the value lookup succeeded.
3647 MASK is either NULL_TREE, or can be an INTEGER_CST if the result of the
3648 load is bitwise anded with MASK and so we are only interested in a subset
3649 of the bits and can ignore if the other bits are uninitialized or
3650 not initialized with constants. */
3652 tree
3653 vn_reference_lookup (tree op, tree vuse, vn_lookup_kind kind,
3654 vn_reference_t *vnresult, bool tbaa_p,
3655 tree *last_vuse_ptr, tree mask)
3657 vec<vn_reference_op_s> operands;
3658 struct vn_reference_s vr1;
3659 bool valueized_anything;
3661 if (vnresult)
3662 *vnresult = NULL;
3664 vr1.vuse = vuse_ssa_val (vuse);
3665 vr1.operands = operands
3666 = valueize_shared_reference_ops_from_ref (op, &valueized_anything);
3667 vr1.type = TREE_TYPE (op);
3668 ao_ref op_ref;
3669 ao_ref_init (&op_ref, op);
3670 vr1.set = ao_ref_alias_set (&op_ref);
3671 vr1.base_set = ao_ref_base_alias_set (&op_ref);
3672 vr1.hashcode = vn_reference_compute_hash (&vr1);
3673 if (mask == NULL_TREE)
3674 if (tree cst = fully_constant_vn_reference_p (&vr1))
3675 return cst;
3677 if (kind != VN_NOWALK && vr1.vuse)
3679 vn_reference_t wvnresult;
3680 ao_ref r;
3681 unsigned limit = param_sccvn_max_alias_queries_per_access;
3682 auto_vec<vn_reference_op_s> ops_for_ref;
3683 if (valueized_anything)
3685 copy_reference_ops_from_ref (op, &ops_for_ref);
3686 bool tem;
3687 valueize_refs_1 (&ops_for_ref, &tem, true);
3689 /* Make sure to use a valueized reference if we valueized anything.
3690 Otherwise preserve the full reference for advanced TBAA. */
3691 if (!valueized_anything
3692 || !ao_ref_init_from_vn_reference (&r, vr1.set, vr1.base_set,
3693 vr1.type, ops_for_ref))
3694 ao_ref_init (&r, op);
3695 vn_walk_cb_data data (&vr1, r.ref ? NULL_TREE : op,
3696 last_vuse_ptr, kind, tbaa_p, mask);
3698 wvnresult
3699 = ((vn_reference_t)
3700 walk_non_aliased_vuses (&r, vr1.vuse, tbaa_p, vn_reference_lookup_2,
3701 vn_reference_lookup_3, vuse_valueize, limit,
3702 &data));
3703 gcc_checking_assert (vr1.operands == shared_lookup_references);
3704 if (wvnresult)
3706 gcc_assert (mask == NULL_TREE);
3707 if (vnresult)
3708 *vnresult = wvnresult;
3709 return wvnresult->result;
3711 else if (mask)
3712 return data.masked_result;
3714 return NULL_TREE;
3717 if (last_vuse_ptr)
3718 *last_vuse_ptr = vr1.vuse;
3719 if (mask)
3720 return NULL_TREE;
3721 return vn_reference_lookup_1 (&vr1, vnresult);
3724 /* Lookup CALL in the current hash table and return the entry in
3725 *VNRESULT if found. Populates *VR for the hashtable lookup. */
3727 void
3728 vn_reference_lookup_call (gcall *call, vn_reference_t *vnresult,
3729 vn_reference_t vr)
3731 if (vnresult)
3732 *vnresult = NULL;
3734 tree vuse = gimple_vuse (call);
3736 vr->vuse = vuse ? SSA_VAL (vuse) : NULL_TREE;
3737 vr->operands = valueize_shared_reference_ops_from_call (call);
3738 tree lhs = gimple_call_lhs (call);
3739 /* For non-SSA return values the referece ops contain the LHS. */
3740 vr->type = ((lhs && TREE_CODE (lhs) == SSA_NAME)
3741 ? TREE_TYPE (lhs) : NULL_TREE);
3742 vr->punned = false;
3743 vr->set = 0;
3744 vr->base_set = 0;
3745 vr->hashcode = vn_reference_compute_hash (vr);
3746 vn_reference_lookup_1 (vr, vnresult);
3749 /* Insert OP into the current hash table with a value number of RESULT. */
3751 static void
3752 vn_reference_insert (tree op, tree result, tree vuse, tree vdef)
3754 vn_reference_s **slot;
3755 vn_reference_t vr1;
3756 bool tem;
3758 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3759 if (TREE_CODE (result) == SSA_NAME)
3760 vr1->value_id = VN_INFO (result)->value_id;
3761 else
3762 vr1->value_id = get_or_alloc_constant_value_id (result);
3763 vr1->vuse = vuse_ssa_val (vuse);
3764 vr1->operands = valueize_shared_reference_ops_from_ref (op, &tem).copy ();
3765 vr1->type = TREE_TYPE (op);
3766 vr1->punned = false;
3767 ao_ref op_ref;
3768 ao_ref_init (&op_ref, op);
3769 vr1->set = ao_ref_alias_set (&op_ref);
3770 vr1->base_set = ao_ref_base_alias_set (&op_ref);
3771 vr1->hashcode = vn_reference_compute_hash (vr1);
3772 vr1->result = TREE_CODE (result) == SSA_NAME ? SSA_VAL (result) : result;
3773 vr1->result_vdef = vdef;
3775 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3776 INSERT);
3778 /* Because IL walking on reference lookup can end up visiting
3779 a def that is only to be visited later in iteration order
3780 when we are about to make an irreducible region reducible
3781 the def can be effectively processed and its ref being inserted
3782 by vn_reference_lookup_3 already. So we cannot assert (!*slot)
3783 but save a lookup if we deal with already inserted refs here. */
3784 if (*slot)
3786 /* We cannot assert that we have the same value either because
3787 when disentangling an irreducible region we may end up visiting
3788 a use before the corresponding def. That's a missed optimization
3789 only though. See gcc.dg/tree-ssa/pr87126.c for example. */
3790 if (dump_file && (dump_flags & TDF_DETAILS)
3791 && !operand_equal_p ((*slot)->result, vr1->result, 0))
3793 fprintf (dump_file, "Keeping old value ");
3794 print_generic_expr (dump_file, (*slot)->result);
3795 fprintf (dump_file, " because of collision\n");
3797 free_reference (vr1);
3798 obstack_free (&vn_tables_obstack, vr1);
3799 return;
3802 *slot = vr1;
3803 vr1->next = last_inserted_ref;
3804 last_inserted_ref = vr1;
3807 /* Insert a reference by it's pieces into the current hash table with
3808 a value number of RESULT. Return the resulting reference
3809 structure we created. */
3811 vn_reference_t
3812 vn_reference_insert_pieces (tree vuse, alias_set_type set,
3813 alias_set_type base_set, tree type,
3814 vec<vn_reference_op_s> operands,
3815 tree result, unsigned int value_id)
3818 vn_reference_s **slot;
3819 vn_reference_t vr1;
3821 vr1 = XOBNEW (&vn_tables_obstack, vn_reference_s);
3822 vr1->value_id = value_id;
3823 vr1->vuse = vuse_ssa_val (vuse);
3824 vr1->operands = operands;
3825 valueize_refs (&vr1->operands);
3826 vr1->type = type;
3827 vr1->punned = false;
3828 vr1->set = set;
3829 vr1->base_set = base_set;
3830 vr1->hashcode = vn_reference_compute_hash (vr1);
3831 if (result && TREE_CODE (result) == SSA_NAME)
3832 result = SSA_VAL (result);
3833 vr1->result = result;
3834 vr1->result_vdef = NULL_TREE;
3836 slot = valid_info->references->find_slot_with_hash (vr1, vr1->hashcode,
3837 INSERT);
3839 /* At this point we should have all the things inserted that we have
3840 seen before, and we should never try inserting something that
3841 already exists. */
3842 gcc_assert (!*slot);
3844 *slot = vr1;
3845 vr1->next = last_inserted_ref;
3846 last_inserted_ref = vr1;
3847 return vr1;
3850 /* Compute and return the hash value for nary operation VBO1. */
3852 static hashval_t
3853 vn_nary_op_compute_hash (const vn_nary_op_t vno1)
3855 inchash::hash hstate;
3856 unsigned i;
3858 for (i = 0; i < vno1->length; ++i)
3859 if (TREE_CODE (vno1->op[i]) == SSA_NAME)
3860 vno1->op[i] = SSA_VAL (vno1->op[i]);
3862 if (((vno1->length == 2
3863 && commutative_tree_code (vno1->opcode))
3864 || (vno1->length == 3
3865 && commutative_ternary_tree_code (vno1->opcode)))
3866 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3867 std::swap (vno1->op[0], vno1->op[1]);
3868 else if (TREE_CODE_CLASS (vno1->opcode) == tcc_comparison
3869 && tree_swap_operands_p (vno1->op[0], vno1->op[1]))
3871 std::swap (vno1->op[0], vno1->op[1]);
3872 vno1->opcode = swap_tree_comparison (vno1->opcode);
3875 hstate.add_int (vno1->opcode);
3876 for (i = 0; i < vno1->length; ++i)
3877 inchash::add_expr (vno1->op[i], hstate);
3879 return hstate.end ();
3882 /* Compare nary operations VNO1 and VNO2 and return true if they are
3883 equivalent. */
3885 bool
3886 vn_nary_op_eq (const_vn_nary_op_t const vno1, const_vn_nary_op_t const vno2)
3888 unsigned i;
3890 if (vno1->hashcode != vno2->hashcode)
3891 return false;
3893 if (vno1->length != vno2->length)
3894 return false;
3896 if (vno1->opcode != vno2->opcode
3897 || !types_compatible_p (vno1->type, vno2->type))
3898 return false;
3900 for (i = 0; i < vno1->length; ++i)
3901 if (!expressions_equal_p (vno1->op[i], vno2->op[i]))
3902 return false;
3904 /* BIT_INSERT_EXPR has an implict operand as the type precision
3905 of op1. Need to check to make sure they are the same. */
3906 if (vno1->opcode == BIT_INSERT_EXPR
3907 && TREE_CODE (vno1->op[1]) == INTEGER_CST
3908 && TYPE_PRECISION (TREE_TYPE (vno1->op[1]))
3909 != TYPE_PRECISION (TREE_TYPE (vno2->op[1])))
3910 return false;
3912 return true;
3915 /* Initialize VNO from the pieces provided. */
3917 static void
3918 init_vn_nary_op_from_pieces (vn_nary_op_t vno, unsigned int length,
3919 enum tree_code code, tree type, tree *ops)
3921 vno->opcode = code;
3922 vno->length = length;
3923 vno->type = type;
3924 memcpy (&vno->op[0], ops, sizeof (tree) * length);
3927 /* Return the number of operands for a vn_nary ops structure from STMT. */
3929 static unsigned int
3930 vn_nary_length_from_stmt (gimple *stmt)
3932 switch (gimple_assign_rhs_code (stmt))
3934 case REALPART_EXPR:
3935 case IMAGPART_EXPR:
3936 case VIEW_CONVERT_EXPR:
3937 return 1;
3939 case BIT_FIELD_REF:
3940 return 3;
3942 case CONSTRUCTOR:
3943 return CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3945 default:
3946 return gimple_num_ops (stmt) - 1;
3950 /* Initialize VNO from STMT. */
3952 static void
3953 init_vn_nary_op_from_stmt (vn_nary_op_t vno, gassign *stmt)
3955 unsigned i;
3957 vno->opcode = gimple_assign_rhs_code (stmt);
3958 vno->type = TREE_TYPE (gimple_assign_lhs (stmt));
3959 switch (vno->opcode)
3961 case REALPART_EXPR:
3962 case IMAGPART_EXPR:
3963 case VIEW_CONVERT_EXPR:
3964 vno->length = 1;
3965 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3966 break;
3968 case BIT_FIELD_REF:
3969 vno->length = 3;
3970 vno->op[0] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
3971 vno->op[1] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 1);
3972 vno->op[2] = TREE_OPERAND (gimple_assign_rhs1 (stmt), 2);
3973 break;
3975 case CONSTRUCTOR:
3976 vno->length = CONSTRUCTOR_NELTS (gimple_assign_rhs1 (stmt));
3977 for (i = 0; i < vno->length; ++i)
3978 vno->op[i] = CONSTRUCTOR_ELT (gimple_assign_rhs1 (stmt), i)->value;
3979 break;
3981 default:
3982 gcc_checking_assert (!gimple_assign_single_p (stmt));
3983 vno->length = gimple_num_ops (stmt) - 1;
3984 for (i = 0; i < vno->length; ++i)
3985 vno->op[i] = gimple_op (stmt, i + 1);
3989 /* Compute the hashcode for VNO and look for it in the hash table;
3990 return the resulting value number if it exists in the hash table.
3991 Return NULL_TREE if it does not exist in the hash table or if the
3992 result field of the operation is NULL. VNRESULT will contain the
3993 vn_nary_op_t from the hashtable if it exists. */
3995 static tree
3996 vn_nary_op_lookup_1 (vn_nary_op_t vno, vn_nary_op_t *vnresult)
3998 vn_nary_op_s **slot;
4000 if (vnresult)
4001 *vnresult = NULL;
4003 vno->hashcode = vn_nary_op_compute_hash (vno);
4004 slot = valid_info->nary->find_slot_with_hash (vno, vno->hashcode, NO_INSERT);
4005 if (!slot)
4006 return NULL_TREE;
4007 if (vnresult)
4008 *vnresult = *slot;
4009 return (*slot)->predicated_values ? NULL_TREE : (*slot)->u.result;
4012 /* Lookup a n-ary operation by its pieces and return the resulting value
4013 number if it exists in the hash table. Return NULL_TREE if it does
4014 not exist in the hash table or if the result field of the operation
4015 is NULL. VNRESULT will contain the vn_nary_op_t from the hashtable
4016 if it exists. */
4018 tree
4019 vn_nary_op_lookup_pieces (unsigned int length, enum tree_code code,
4020 tree type, tree *ops, vn_nary_op_t *vnresult)
4022 vn_nary_op_t vno1 = XALLOCAVAR (struct vn_nary_op_s,
4023 sizeof_vn_nary_op (length));
4024 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4025 return vn_nary_op_lookup_1 (vno1, vnresult);
4028 /* Lookup the rhs of STMT in the current hash table, and return the resulting
4029 value number if it exists in the hash table. Return NULL_TREE if
4030 it does not exist in the hash table. VNRESULT will contain the
4031 vn_nary_op_t from the hashtable if it exists. */
4033 tree
4034 vn_nary_op_lookup_stmt (gimple *stmt, vn_nary_op_t *vnresult)
4036 vn_nary_op_t vno1
4037 = XALLOCAVAR (struct vn_nary_op_s,
4038 sizeof_vn_nary_op (vn_nary_length_from_stmt (stmt)));
4039 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4040 return vn_nary_op_lookup_1 (vno1, vnresult);
4043 /* Allocate a vn_nary_op_t with LENGTH operands on STACK. */
4045 static vn_nary_op_t
4046 alloc_vn_nary_op_noinit (unsigned int length, struct obstack *stack)
4048 return (vn_nary_op_t) obstack_alloc (stack, sizeof_vn_nary_op (length));
4051 /* Allocate and initialize a vn_nary_op_t on CURRENT_INFO's
4052 obstack. */
4054 static vn_nary_op_t
4055 alloc_vn_nary_op (unsigned int length, tree result, unsigned int value_id)
4057 vn_nary_op_t vno1 = alloc_vn_nary_op_noinit (length, &vn_tables_obstack);
4059 vno1->value_id = value_id;
4060 vno1->length = length;
4061 vno1->predicated_values = 0;
4062 vno1->u.result = result;
4064 return vno1;
4067 /* Insert VNO into TABLE. If COMPUTE_HASH is true, then compute
4068 VNO->HASHCODE first. */
4070 static vn_nary_op_t
4071 vn_nary_op_insert_into (vn_nary_op_t vno, vn_nary_op_table_type *table,
4072 bool compute_hash)
4074 vn_nary_op_s **slot;
4076 if (compute_hash)
4078 vno->hashcode = vn_nary_op_compute_hash (vno);
4079 gcc_assert (! vno->predicated_values
4080 || (! vno->u.values->next
4081 && vno->u.values->n == 1));
4084 slot = table->find_slot_with_hash (vno, vno->hashcode, INSERT);
4085 vno->unwind_to = *slot;
4086 if (*slot)
4088 /* Prefer non-predicated values.
4089 ??? Only if those are constant, otherwise, with constant predicated
4090 value, turn them into predicated values with entry-block validity
4091 (??? but we always find the first valid result currently). */
4092 if ((*slot)->predicated_values
4093 && ! vno->predicated_values)
4095 /* ??? We cannot remove *slot from the unwind stack list.
4096 For the moment we deal with this by skipping not found
4097 entries but this isn't ideal ... */
4098 *slot = vno;
4099 /* ??? Maintain a stack of states we can unwind in
4100 vn_nary_op_s? But how far do we unwind? In reality
4101 we need to push change records somewhere... Or not
4102 unwind vn_nary_op_s and linking them but instead
4103 unwind the results "list", linking that, which also
4104 doesn't move on hashtable resize. */
4105 /* We can also have a ->unwind_to recording *slot there.
4106 That way we can make u.values a fixed size array with
4107 recording the number of entries but of course we then
4108 have always N copies for each unwind_to-state. Or we
4109 make sure to only ever append and each unwinding will
4110 pop off one entry (but how to deal with predicated
4111 replaced with non-predicated here?) */
4112 vno->next = last_inserted_nary;
4113 last_inserted_nary = vno;
4114 return vno;
4116 else if (vno->predicated_values
4117 && ! (*slot)->predicated_values)
4118 return *slot;
4119 else if (vno->predicated_values
4120 && (*slot)->predicated_values)
4122 /* ??? Factor this all into a insert_single_predicated_value
4123 routine. */
4124 gcc_assert (!vno->u.values->next && vno->u.values->n == 1);
4125 basic_block vno_bb
4126 = BASIC_BLOCK_FOR_FN (cfun, vno->u.values->valid_dominated_by_p[0]);
4127 vn_pval *nval = vno->u.values;
4128 vn_pval **next = &vno->u.values;
4129 bool found = false;
4130 for (vn_pval *val = (*slot)->u.values; val; val = val->next)
4132 if (expressions_equal_p (val->result, nval->result))
4134 found = true;
4135 for (unsigned i = 0; i < val->n; ++i)
4137 basic_block val_bb
4138 = BASIC_BLOCK_FOR_FN (cfun,
4139 val->valid_dominated_by_p[i]);
4140 if (dominated_by_p (CDI_DOMINATORS, vno_bb, val_bb))
4141 /* Value registered with more generic predicate. */
4142 return *slot;
4143 else if (dominated_by_p (CDI_DOMINATORS, val_bb, vno_bb))
4144 /* Shouldn't happen, we insert in RPO order. */
4145 gcc_unreachable ();
4147 /* Append value. */
4148 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4149 sizeof (vn_pval)
4150 + val->n * sizeof (int));
4151 (*next)->next = NULL;
4152 (*next)->result = val->result;
4153 (*next)->n = val->n + 1;
4154 memcpy ((*next)->valid_dominated_by_p,
4155 val->valid_dominated_by_p,
4156 val->n * sizeof (int));
4157 (*next)->valid_dominated_by_p[val->n] = vno_bb->index;
4158 next = &(*next)->next;
4159 if (dump_file && (dump_flags & TDF_DETAILS))
4160 fprintf (dump_file, "Appending predicate to value.\n");
4161 continue;
4163 /* Copy other predicated values. */
4164 *next = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4165 sizeof (vn_pval)
4166 + (val->n-1) * sizeof (int));
4167 memcpy (*next, val, sizeof (vn_pval) + (val->n-1) * sizeof (int));
4168 (*next)->next = NULL;
4169 next = &(*next)->next;
4171 if (!found)
4172 *next = nval;
4174 *slot = vno;
4175 vno->next = last_inserted_nary;
4176 last_inserted_nary = vno;
4177 return vno;
4180 /* While we do not want to insert things twice it's awkward to
4181 avoid it in the case where visit_nary_op pattern-matches stuff
4182 and ends up simplifying the replacement to itself. We then
4183 get two inserts, one from visit_nary_op and one from
4184 vn_nary_build_or_lookup.
4185 So allow inserts with the same value number. */
4186 if ((*slot)->u.result == vno->u.result)
4187 return *slot;
4190 /* ??? There's also optimistic vs. previous commited state merging
4191 that is problematic for the case of unwinding. */
4193 /* ??? We should return NULL if we do not use 'vno' and have the
4194 caller release it. */
4195 gcc_assert (!*slot);
4197 *slot = vno;
4198 vno->next = last_inserted_nary;
4199 last_inserted_nary = vno;
4200 return vno;
4203 /* Insert a n-ary operation into the current hash table using it's
4204 pieces. Return the vn_nary_op_t structure we created and put in
4205 the hashtable. */
4207 vn_nary_op_t
4208 vn_nary_op_insert_pieces (unsigned int length, enum tree_code code,
4209 tree type, tree *ops,
4210 tree result, unsigned int value_id)
4212 vn_nary_op_t vno1 = alloc_vn_nary_op (length, result, value_id);
4213 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4214 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4217 static vn_nary_op_t
4218 vn_nary_op_insert_pieces_predicated (unsigned int length, enum tree_code code,
4219 tree type, tree *ops,
4220 tree result, unsigned int value_id,
4221 edge pred_e)
4223 /* ??? Currently tracking BBs. */
4224 if (! single_pred_p (pred_e->dest))
4226 /* Never record for backedges. */
4227 if (pred_e->flags & EDGE_DFS_BACK)
4228 return NULL;
4229 edge_iterator ei;
4230 edge e;
4231 int cnt = 0;
4232 /* Ignore backedges. */
4233 FOR_EACH_EDGE (e, ei, pred_e->dest->preds)
4234 if (! dominated_by_p (CDI_DOMINATORS, e->src, e->dest))
4235 cnt++;
4236 if (cnt != 1)
4237 return NULL;
4239 if (dump_file && (dump_flags & TDF_DETAILS)
4240 /* ??? Fix dumping, but currently we only get comparisons. */
4241 && TREE_CODE_CLASS (code) == tcc_comparison)
4243 fprintf (dump_file, "Recording on edge %d->%d ", pred_e->src->index,
4244 pred_e->dest->index);
4245 print_generic_expr (dump_file, ops[0], TDF_SLIM);
4246 fprintf (dump_file, " %s ", get_tree_code_name (code));
4247 print_generic_expr (dump_file, ops[1], TDF_SLIM);
4248 fprintf (dump_file, " == %s\n",
4249 integer_zerop (result) ? "false" : "true");
4251 vn_nary_op_t vno1 = alloc_vn_nary_op (length, NULL_TREE, value_id);
4252 init_vn_nary_op_from_pieces (vno1, length, code, type, ops);
4253 vno1->predicated_values = 1;
4254 vno1->u.values = (vn_pval *) obstack_alloc (&vn_tables_obstack,
4255 sizeof (vn_pval));
4256 vno1->u.values->next = NULL;
4257 vno1->u.values->result = result;
4258 vno1->u.values->n = 1;
4259 vno1->u.values->valid_dominated_by_p[0] = pred_e->dest->index;
4260 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4263 static bool
4264 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool);
4266 static tree
4267 vn_nary_op_get_predicated_value (vn_nary_op_t vno, basic_block bb)
4269 if (! vno->predicated_values)
4270 return vno->u.result;
4271 for (vn_pval *val = vno->u.values; val; val = val->next)
4272 for (unsigned i = 0; i < val->n; ++i)
4273 /* Do not handle backedge executability optimistically since
4274 when figuring out whether to iterate we do not consider
4275 changed predication. */
4276 if (dominated_by_p_w_unex
4277 (bb, BASIC_BLOCK_FOR_FN (cfun, val->valid_dominated_by_p[i]),
4278 false))
4279 return val->result;
4280 return NULL_TREE;
4283 /* Insert the rhs of STMT into the current hash table with a value number of
4284 RESULT. */
4286 static vn_nary_op_t
4287 vn_nary_op_insert_stmt (gimple *stmt, tree result)
4289 vn_nary_op_t vno1
4290 = alloc_vn_nary_op (vn_nary_length_from_stmt (stmt),
4291 result, VN_INFO (result)->value_id);
4292 init_vn_nary_op_from_stmt (vno1, as_a <gassign *> (stmt));
4293 return vn_nary_op_insert_into (vno1, valid_info->nary, true);
4296 /* Compute a hashcode for PHI operation VP1 and return it. */
4298 static inline hashval_t
4299 vn_phi_compute_hash (vn_phi_t vp1)
4301 inchash::hash hstate;
4302 tree phi1op;
4303 tree type;
4304 edge e;
4305 edge_iterator ei;
4307 hstate.add_int (EDGE_COUNT (vp1->block->preds));
4308 switch (EDGE_COUNT (vp1->block->preds))
4310 case 1:
4311 break;
4312 case 2:
4313 if (vp1->block->loop_father->header == vp1->block)
4315 else
4316 break;
4317 /* Fallthru. */
4318 default:
4319 hstate.add_int (vp1->block->index);
4322 /* If all PHI arguments are constants we need to distinguish
4323 the PHI node via its type. */
4324 type = vp1->type;
4325 hstate.merge_hash (vn_hash_type (type));
4327 FOR_EACH_EDGE (e, ei, vp1->block->preds)
4329 /* Don't hash backedge values they need to be handled as VN_TOP
4330 for optimistic value-numbering. */
4331 if (e->flags & EDGE_DFS_BACK)
4332 continue;
4334 phi1op = vp1->phiargs[e->dest_idx];
4335 if (phi1op == VN_TOP)
4336 continue;
4337 inchash::add_expr (phi1op, hstate);
4340 return hstate.end ();
4344 /* Return true if COND1 and COND2 represent the same condition, set
4345 *INVERTED_P if one needs to be inverted to make it the same as
4346 the other. */
4348 static bool
4349 cond_stmts_equal_p (gcond *cond1, tree lhs1, tree rhs1,
4350 gcond *cond2, tree lhs2, tree rhs2, bool *inverted_p)
4352 enum tree_code code1 = gimple_cond_code (cond1);
4353 enum tree_code code2 = gimple_cond_code (cond2);
4355 *inverted_p = false;
4356 if (code1 == code2)
4358 else if (code1 == swap_tree_comparison (code2))
4359 std::swap (lhs2, rhs2);
4360 else if (code1 == invert_tree_comparison (code2, HONOR_NANS (lhs2)))
4361 *inverted_p = true;
4362 else if (code1 == invert_tree_comparison
4363 (swap_tree_comparison (code2), HONOR_NANS (lhs2)))
4365 std::swap (lhs2, rhs2);
4366 *inverted_p = true;
4368 else
4369 return false;
4371 return ((expressions_equal_p (lhs1, lhs2)
4372 && expressions_equal_p (rhs1, rhs2))
4373 || (commutative_tree_code (code1)
4374 && expressions_equal_p (lhs1, rhs2)
4375 && expressions_equal_p (rhs1, lhs2)));
4378 /* Compare two phi entries for equality, ignoring VN_TOP arguments. */
4380 static int
4381 vn_phi_eq (const_vn_phi_t const vp1, const_vn_phi_t const vp2)
4383 if (vp1->hashcode != vp2->hashcode)
4384 return false;
4386 if (vp1->block != vp2->block)
4388 if (EDGE_COUNT (vp1->block->preds) != EDGE_COUNT (vp2->block->preds))
4389 return false;
4391 switch (EDGE_COUNT (vp1->block->preds))
4393 case 1:
4394 /* Single-arg PHIs are just copies. */
4395 break;
4397 case 2:
4399 /* Rule out backedges into the PHI. */
4400 if (vp1->block->loop_father->header == vp1->block
4401 || vp2->block->loop_father->header == vp2->block)
4402 return false;
4404 /* If the PHI nodes do not have compatible types
4405 they are not the same. */
4406 if (!types_compatible_p (vp1->type, vp2->type))
4407 return false;
4409 basic_block idom1
4410 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4411 basic_block idom2
4412 = get_immediate_dominator (CDI_DOMINATORS, vp2->block);
4413 /* If the immediate dominator end in switch stmts multiple
4414 values may end up in the same PHI arg via intermediate
4415 CFG merges. */
4416 if (EDGE_COUNT (idom1->succs) != 2
4417 || EDGE_COUNT (idom2->succs) != 2)
4418 return false;
4420 /* Verify the controlling stmt is the same. */
4421 gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1));
4422 gcond *last2 = safe_dyn_cast <gcond *> (last_stmt (idom2));
4423 if (! last1 || ! last2)
4424 return false;
4425 bool inverted_p;
4426 if (! cond_stmts_equal_p (last1, vp1->cclhs, vp1->ccrhs,
4427 last2, vp2->cclhs, vp2->ccrhs,
4428 &inverted_p))
4429 return false;
4431 /* Get at true/false controlled edges into the PHI. */
4432 edge te1, te2, fe1, fe2;
4433 if (! extract_true_false_controlled_edges (idom1, vp1->block,
4434 &te1, &fe1)
4435 || ! extract_true_false_controlled_edges (idom2, vp2->block,
4436 &te2, &fe2))
4437 return false;
4439 /* Swap edges if the second condition is the inverted of the
4440 first. */
4441 if (inverted_p)
4442 std::swap (te2, fe2);
4444 /* Since we do not know which edge will be executed we have
4445 to be careful when matching VN_TOP. Be conservative and
4446 only match VN_TOP == VN_TOP for now, we could allow
4447 VN_TOP on the not prevailing PHI though. See for example
4448 PR102920. */
4449 if (! expressions_equal_p (vp1->phiargs[te1->dest_idx],
4450 vp2->phiargs[te2->dest_idx], false)
4451 || ! expressions_equal_p (vp1->phiargs[fe1->dest_idx],
4452 vp2->phiargs[fe2->dest_idx], false))
4453 return false;
4455 return true;
4458 default:
4459 return false;
4463 /* If the PHI nodes do not have compatible types
4464 they are not the same. */
4465 if (!types_compatible_p (vp1->type, vp2->type))
4466 return false;
4468 /* Any phi in the same block will have it's arguments in the
4469 same edge order, because of how we store phi nodes. */
4470 unsigned nargs = EDGE_COUNT (vp1->block->preds);
4471 for (unsigned i = 0; i < nargs; ++i)
4473 tree phi1op = vp1->phiargs[i];
4474 tree phi2op = vp2->phiargs[i];
4475 if (phi1op == phi2op)
4476 continue;
4477 if (!expressions_equal_p (phi1op, phi2op, false))
4478 return false;
4481 return true;
4484 /* Lookup PHI in the current hash table, and return the resulting
4485 value number if it exists in the hash table. Return NULL_TREE if
4486 it does not exist in the hash table. */
4488 static tree
4489 vn_phi_lookup (gimple *phi, bool backedges_varying_p)
4491 vn_phi_s **slot;
4492 struct vn_phi_s *vp1;
4493 edge e;
4494 edge_iterator ei;
4496 vp1 = XALLOCAVAR (struct vn_phi_s,
4497 sizeof (struct vn_phi_s)
4498 + (gimple_phi_num_args (phi) - 1) * sizeof (tree));
4500 /* Canonicalize the SSA_NAME's to their value number. */
4501 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4503 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4504 if (TREE_CODE (def) == SSA_NAME
4505 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4507 if (ssa_undefined_value_p (def, false))
4508 def = VN_TOP;
4509 else
4510 def = SSA_VAL (def);
4512 vp1->phiargs[e->dest_idx] = def;
4514 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4515 vp1->block = gimple_bb (phi);
4516 /* Extract values of the controlling condition. */
4517 vp1->cclhs = NULL_TREE;
4518 vp1->ccrhs = NULL_TREE;
4519 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4520 if (EDGE_COUNT (idom1->succs) == 2)
4521 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4523 /* ??? We want to use SSA_VAL here. But possibly not
4524 allow VN_TOP. */
4525 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4526 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4528 vp1->hashcode = vn_phi_compute_hash (vp1);
4529 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, NO_INSERT);
4530 if (!slot)
4531 return NULL_TREE;
4532 return (*slot)->result;
4535 /* Insert PHI into the current hash table with a value number of
4536 RESULT. */
4538 static vn_phi_t
4539 vn_phi_insert (gimple *phi, tree result, bool backedges_varying_p)
4541 vn_phi_s **slot;
4542 vn_phi_t vp1 = (vn_phi_t) obstack_alloc (&vn_tables_obstack,
4543 sizeof (vn_phi_s)
4544 + ((gimple_phi_num_args (phi) - 1)
4545 * sizeof (tree)));
4546 edge e;
4547 edge_iterator ei;
4549 /* Canonicalize the SSA_NAME's to their value number. */
4550 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
4552 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
4553 if (TREE_CODE (def) == SSA_NAME
4554 && (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK)))
4556 if (ssa_undefined_value_p (def, false))
4557 def = VN_TOP;
4558 else
4559 def = SSA_VAL (def);
4561 vp1->phiargs[e->dest_idx] = def;
4563 vp1->value_id = VN_INFO (result)->value_id;
4564 vp1->type = TREE_TYPE (gimple_phi_result (phi));
4565 vp1->block = gimple_bb (phi);
4566 /* Extract values of the controlling condition. */
4567 vp1->cclhs = NULL_TREE;
4568 vp1->ccrhs = NULL_TREE;
4569 basic_block idom1 = get_immediate_dominator (CDI_DOMINATORS, vp1->block);
4570 if (EDGE_COUNT (idom1->succs) == 2)
4571 if (gcond *last1 = safe_dyn_cast <gcond *> (last_stmt (idom1)))
4573 /* ??? We want to use SSA_VAL here. But possibly not
4574 allow VN_TOP. */
4575 vp1->cclhs = vn_valueize (gimple_cond_lhs (last1));
4576 vp1->ccrhs = vn_valueize (gimple_cond_rhs (last1));
4578 vp1->result = result;
4579 vp1->hashcode = vn_phi_compute_hash (vp1);
4581 slot = valid_info->phis->find_slot_with_hash (vp1, vp1->hashcode, INSERT);
4582 gcc_assert (!*slot);
4584 *slot = vp1;
4585 vp1->next = last_inserted_phi;
4586 last_inserted_phi = vp1;
4587 return vp1;
4591 /* Return true if BB1 is dominated by BB2 taking into account edges
4592 that are not executable. When ALLOW_BACK is false consider not
4593 executable backedges as executable. */
4595 static bool
4596 dominated_by_p_w_unex (basic_block bb1, basic_block bb2, bool allow_back)
4598 edge_iterator ei;
4599 edge e;
4601 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4602 return true;
4604 /* Before iterating we'd like to know if there exists a
4605 (executable) path from bb2 to bb1 at all, if not we can
4606 directly return false. For now simply iterate once. */
4608 /* Iterate to the single executable bb1 predecessor. */
4609 if (EDGE_COUNT (bb1->preds) > 1)
4611 edge prede = NULL;
4612 FOR_EACH_EDGE (e, ei, bb1->preds)
4613 if ((e->flags & EDGE_EXECUTABLE)
4614 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4616 if (prede)
4618 prede = NULL;
4619 break;
4621 prede = e;
4623 if (prede)
4625 bb1 = prede->src;
4627 /* Re-do the dominance check with changed bb1. */
4628 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4629 return true;
4633 /* Iterate to the single executable bb2 successor. */
4634 edge succe = NULL;
4635 FOR_EACH_EDGE (e, ei, bb2->succs)
4636 if ((e->flags & EDGE_EXECUTABLE)
4637 || (!allow_back && (e->flags & EDGE_DFS_BACK)))
4639 if (succe)
4641 succe = NULL;
4642 break;
4644 succe = e;
4646 if (succe)
4648 /* Verify the reached block is only reached through succe.
4649 If there is only one edge we can spare us the dominator
4650 check and iterate directly. */
4651 if (EDGE_COUNT (succe->dest->preds) > 1)
4653 FOR_EACH_EDGE (e, ei, succe->dest->preds)
4654 if (e != succe
4655 && ((e->flags & EDGE_EXECUTABLE)
4656 || (!allow_back && (e->flags & EDGE_DFS_BACK))))
4658 succe = NULL;
4659 break;
4662 if (succe)
4664 bb2 = succe->dest;
4666 /* Re-do the dominance check with changed bb2. */
4667 if (dominated_by_p (CDI_DOMINATORS, bb1, bb2))
4668 return true;
4672 /* We could now iterate updating bb1 / bb2. */
4673 return false;
4676 /* Set the value number of FROM to TO, return true if it has changed
4677 as a result. */
4679 static inline bool
4680 set_ssa_val_to (tree from, tree to)
4682 vn_ssa_aux_t from_info = VN_INFO (from);
4683 tree currval = from_info->valnum; // SSA_VAL (from)
4684 poly_int64 toff, coff;
4685 bool curr_undefined = false;
4686 bool curr_invariant = false;
4688 /* The only thing we allow as value numbers are ssa_names
4689 and invariants. So assert that here. We don't allow VN_TOP
4690 as visiting a stmt should produce a value-number other than
4691 that.
4692 ??? Still VN_TOP can happen for unreachable code, so force
4693 it to varying in that case. Not all code is prepared to
4694 get VN_TOP on valueization. */
4695 if (to == VN_TOP)
4697 /* ??? When iterating and visiting PHI <undef, backedge-value>
4698 for the first time we rightfully get VN_TOP and we need to
4699 preserve that to optimize for example gcc.dg/tree-ssa/ssa-sccvn-2.c.
4700 With SCCVN we were simply lucky we iterated the other PHI
4701 cycles first and thus visited the backedge-value DEF. */
4702 if (currval == VN_TOP)
4703 goto set_and_exit;
4704 if (dump_file && (dump_flags & TDF_DETAILS))
4705 fprintf (dump_file, "Forcing value number to varying on "
4706 "receiving VN_TOP\n");
4707 to = from;
4710 gcc_checking_assert (to != NULL_TREE
4711 && ((TREE_CODE (to) == SSA_NAME
4712 && (to == from || SSA_VAL (to) == to))
4713 || is_gimple_min_invariant (to)));
4715 if (from != to)
4717 if (currval == from)
4719 if (dump_file && (dump_flags & TDF_DETAILS))
4721 fprintf (dump_file, "Not changing value number of ");
4722 print_generic_expr (dump_file, from);
4723 fprintf (dump_file, " from VARYING to ");
4724 print_generic_expr (dump_file, to);
4725 fprintf (dump_file, "\n");
4727 return false;
4729 curr_invariant = is_gimple_min_invariant (currval);
4730 curr_undefined = (TREE_CODE (currval) == SSA_NAME
4731 && ssa_undefined_value_p (currval, false));
4732 if (currval != VN_TOP
4733 && !curr_invariant
4734 && !curr_undefined
4735 && is_gimple_min_invariant (to))
4737 if (dump_file && (dump_flags & TDF_DETAILS))
4739 fprintf (dump_file, "Forcing VARYING instead of changing "
4740 "value number of ");
4741 print_generic_expr (dump_file, from);
4742 fprintf (dump_file, " from ");
4743 print_generic_expr (dump_file, currval);
4744 fprintf (dump_file, " (non-constant) to ");
4745 print_generic_expr (dump_file, to);
4746 fprintf (dump_file, " (constant)\n");
4748 to = from;
4750 else if (currval != VN_TOP
4751 && !curr_undefined
4752 && TREE_CODE (to) == SSA_NAME
4753 && ssa_undefined_value_p (to, false))
4755 if (dump_file && (dump_flags & TDF_DETAILS))
4757 fprintf (dump_file, "Forcing VARYING instead of changing "
4758 "value number of ");
4759 print_generic_expr (dump_file, from);
4760 fprintf (dump_file, " from ");
4761 print_generic_expr (dump_file, currval);
4762 fprintf (dump_file, " (non-undefined) to ");
4763 print_generic_expr (dump_file, to);
4764 fprintf (dump_file, " (undefined)\n");
4766 to = from;
4768 else if (TREE_CODE (to) == SSA_NAME
4769 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (to))
4770 to = from;
4773 set_and_exit:
4774 if (dump_file && (dump_flags & TDF_DETAILS))
4776 fprintf (dump_file, "Setting value number of ");
4777 print_generic_expr (dump_file, from);
4778 fprintf (dump_file, " to ");
4779 print_generic_expr (dump_file, to);
4782 if (currval != to
4783 && !operand_equal_p (currval, to, 0)
4784 /* Different undefined SSA names are not actually different. See
4785 PR82320 for a testcase were we'd otherwise not terminate iteration. */
4786 && !(curr_undefined
4787 && TREE_CODE (to) == SSA_NAME
4788 && ssa_undefined_value_p (to, false))
4789 /* ??? For addresses involving volatile objects or types operand_equal_p
4790 does not reliably detect ADDR_EXPRs as equal. We know we are only
4791 getting invariant gimple addresses here, so can use
4792 get_addr_base_and_unit_offset to do this comparison. */
4793 && !(TREE_CODE (currval) == ADDR_EXPR
4794 && TREE_CODE (to) == ADDR_EXPR
4795 && (get_addr_base_and_unit_offset (TREE_OPERAND (currval, 0), &coff)
4796 == get_addr_base_and_unit_offset (TREE_OPERAND (to, 0), &toff))
4797 && known_eq (coff, toff)))
4799 if (to != from
4800 && currval != VN_TOP
4801 && !curr_undefined
4802 /* We do not want to allow lattice transitions from one value
4803 to another since that may lead to not terminating iteration
4804 (see PR95049). Since there's no convenient way to check
4805 for the allowed transition of VAL -> PHI (loop entry value,
4806 same on two PHIs, to same PHI result) we restrict the check
4807 to invariants. */
4808 && curr_invariant
4809 && is_gimple_min_invariant (to))
4811 if (dump_file && (dump_flags & TDF_DETAILS))
4812 fprintf (dump_file, " forced VARYING");
4813 to = from;
4815 if (dump_file && (dump_flags & TDF_DETAILS))
4816 fprintf (dump_file, " (changed)\n");
4817 from_info->valnum = to;
4818 return true;
4820 if (dump_file && (dump_flags & TDF_DETAILS))
4821 fprintf (dump_file, "\n");
4822 return false;
4825 /* Set all definitions in STMT to value number to themselves.
4826 Return true if a value number changed. */
4828 static bool
4829 defs_to_varying (gimple *stmt)
4831 bool changed = false;
4832 ssa_op_iter iter;
4833 def_operand_p defp;
4835 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_ALL_DEFS)
4837 tree def = DEF_FROM_PTR (defp);
4838 changed |= set_ssa_val_to (def, def);
4840 return changed;
4843 /* Visit a copy between LHS and RHS, return true if the value number
4844 changed. */
4846 static bool
4847 visit_copy (tree lhs, tree rhs)
4849 /* Valueize. */
4850 rhs = SSA_VAL (rhs);
4852 return set_ssa_val_to (lhs, rhs);
4855 /* Lookup a value for OP in type WIDE_TYPE where the value in type of OP
4856 is the same. */
4858 static tree
4859 valueized_wider_op (tree wide_type, tree op, bool allow_truncate)
4861 if (TREE_CODE (op) == SSA_NAME)
4862 op = vn_valueize (op);
4864 /* Either we have the op widened available. */
4865 tree ops[3] = {};
4866 ops[0] = op;
4867 tree tem = vn_nary_op_lookup_pieces (1, NOP_EXPR,
4868 wide_type, ops, NULL);
4869 if (tem)
4870 return tem;
4872 /* Or the op is truncated from some existing value. */
4873 if (allow_truncate && TREE_CODE (op) == SSA_NAME)
4875 gimple *def = SSA_NAME_DEF_STMT (op);
4876 if (is_gimple_assign (def)
4877 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def)))
4879 tem = gimple_assign_rhs1 (def);
4880 if (useless_type_conversion_p (wide_type, TREE_TYPE (tem)))
4882 if (TREE_CODE (tem) == SSA_NAME)
4883 tem = vn_valueize (tem);
4884 return tem;
4889 /* For constants simply extend it. */
4890 if (TREE_CODE (op) == INTEGER_CST)
4891 return wide_int_to_tree (wide_type, wi::to_wide (op));
4893 return NULL_TREE;
4896 /* Visit a nary operator RHS, value number it, and return true if the
4897 value number of LHS has changed as a result. */
4899 static bool
4900 visit_nary_op (tree lhs, gassign *stmt)
4902 vn_nary_op_t vnresult;
4903 tree result = vn_nary_op_lookup_stmt (stmt, &vnresult);
4904 if (! result && vnresult)
4905 result = vn_nary_op_get_predicated_value (vnresult, gimple_bb (stmt));
4906 if (result)
4907 return set_ssa_val_to (lhs, result);
4909 /* Do some special pattern matching for redundancies of operations
4910 in different types. */
4911 enum tree_code code = gimple_assign_rhs_code (stmt);
4912 tree type = TREE_TYPE (lhs);
4913 tree rhs1 = gimple_assign_rhs1 (stmt);
4914 switch (code)
4916 CASE_CONVERT:
4917 /* Match arithmetic done in a different type where we can easily
4918 substitute the result from some earlier sign-changed or widened
4919 operation. */
4920 if (INTEGRAL_TYPE_P (type)
4921 && TREE_CODE (rhs1) == SSA_NAME
4922 /* We only handle sign-changes, zero-extension -> & mask or
4923 sign-extension if we know the inner operation doesn't
4924 overflow. */
4925 && (((TYPE_UNSIGNED (TREE_TYPE (rhs1))
4926 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4927 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4928 && TYPE_PRECISION (type) > TYPE_PRECISION (TREE_TYPE (rhs1)))
4929 || TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (rhs1))))
4931 gassign *def = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
4932 if (def
4933 && (gimple_assign_rhs_code (def) == PLUS_EXPR
4934 || gimple_assign_rhs_code (def) == MINUS_EXPR
4935 || gimple_assign_rhs_code (def) == MULT_EXPR))
4937 tree ops[3] = {};
4938 /* When requiring a sign-extension we cannot model a
4939 previous truncation with a single op so don't bother. */
4940 bool allow_truncate = TYPE_UNSIGNED (TREE_TYPE (rhs1));
4941 /* Either we have the op widened available. */
4942 ops[0] = valueized_wider_op (type, gimple_assign_rhs1 (def),
4943 allow_truncate);
4944 if (ops[0])
4945 ops[1] = valueized_wider_op (type, gimple_assign_rhs2 (def),
4946 allow_truncate);
4947 if (ops[0] && ops[1])
4949 ops[0] = vn_nary_op_lookup_pieces
4950 (2, gimple_assign_rhs_code (def), type, ops, NULL);
4951 /* We have wider operation available. */
4952 if (ops[0]
4953 /* If the leader is a wrapping operation we can
4954 insert it for code hoisting w/o introducing
4955 undefined overflow. If it is not it has to
4956 be available. See PR86554. */
4957 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (ops[0]))
4958 || (rpo_avail && vn_context_bb
4959 && rpo_avail->eliminate_avail (vn_context_bb,
4960 ops[0]))))
4962 unsigned lhs_prec = TYPE_PRECISION (type);
4963 unsigned rhs_prec = TYPE_PRECISION (TREE_TYPE (rhs1));
4964 if (lhs_prec == rhs_prec
4965 || (INTEGRAL_TYPE_P (TREE_TYPE (rhs1))
4966 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (rhs1))))
4968 gimple_match_op match_op (gimple_match_cond::UNCOND,
4969 NOP_EXPR, type, ops[0]);
4970 result = vn_nary_build_or_lookup (&match_op);
4971 if (result)
4973 bool changed = set_ssa_val_to (lhs, result);
4974 vn_nary_op_insert_stmt (stmt, result);
4975 return changed;
4978 else
4980 tree mask = wide_int_to_tree
4981 (type, wi::mask (rhs_prec, false, lhs_prec));
4982 gimple_match_op match_op (gimple_match_cond::UNCOND,
4983 BIT_AND_EXPR,
4984 TREE_TYPE (lhs),
4985 ops[0], mask);
4986 result = vn_nary_build_or_lookup (&match_op);
4987 if (result)
4989 bool changed = set_ssa_val_to (lhs, result);
4990 vn_nary_op_insert_stmt (stmt, result);
4991 return changed;
4998 break;
4999 case BIT_AND_EXPR:
5000 if (INTEGRAL_TYPE_P (type)
5001 && TREE_CODE (rhs1) == SSA_NAME
5002 && TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
5003 && !SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)
5004 && default_vn_walk_kind != VN_NOWALK
5005 && CHAR_BIT == 8
5006 && BITS_PER_UNIT == 8
5007 && BYTES_BIG_ENDIAN == WORDS_BIG_ENDIAN
5008 && !integer_all_onesp (gimple_assign_rhs2 (stmt))
5009 && !integer_zerop (gimple_assign_rhs2 (stmt)))
5011 gassign *ass = dyn_cast <gassign *> (SSA_NAME_DEF_STMT (rhs1));
5012 if (ass
5013 && !gimple_has_volatile_ops (ass)
5014 && vn_get_stmt_kind (ass) == VN_REFERENCE)
5016 tree last_vuse = gimple_vuse (ass);
5017 tree op = gimple_assign_rhs1 (ass);
5018 tree result = vn_reference_lookup (op, gimple_vuse (ass),
5019 default_vn_walk_kind,
5020 NULL, true, &last_vuse,
5021 gimple_assign_rhs2 (stmt));
5022 if (result
5023 && useless_type_conversion_p (TREE_TYPE (result),
5024 TREE_TYPE (op)))
5025 return set_ssa_val_to (lhs, result);
5028 break;
5029 case TRUNC_DIV_EXPR:
5030 if (TYPE_UNSIGNED (type))
5031 break;
5032 /* Fallthru. */
5033 case RDIV_EXPR:
5034 case MULT_EXPR:
5035 /* Match up ([-]a){/,*}([-])b with v=a{/,*}b, replacing it with -v. */
5036 if (! HONOR_SIGN_DEPENDENT_ROUNDING (type))
5038 tree rhs[2];
5039 rhs[0] = rhs1;
5040 rhs[1] = gimple_assign_rhs2 (stmt);
5041 for (unsigned i = 0; i <= 1; ++i)
5043 unsigned j = i == 0 ? 1 : 0;
5044 tree ops[2];
5045 gimple_match_op match_op (gimple_match_cond::UNCOND,
5046 NEGATE_EXPR, type, rhs[i]);
5047 ops[i] = vn_nary_build_or_lookup_1 (&match_op, false, true);
5048 ops[j] = rhs[j];
5049 if (ops[i]
5050 && (ops[0] = vn_nary_op_lookup_pieces (2, code,
5051 type, ops, NULL)))
5053 gimple_match_op match_op (gimple_match_cond::UNCOND,
5054 NEGATE_EXPR, type, ops[0]);
5055 result = vn_nary_build_or_lookup_1 (&match_op, true, false);
5056 if (result)
5058 bool changed = set_ssa_val_to (lhs, result);
5059 vn_nary_op_insert_stmt (stmt, result);
5060 return changed;
5065 break;
5066 default:
5067 break;
5070 bool changed = set_ssa_val_to (lhs, lhs);
5071 vn_nary_op_insert_stmt (stmt, lhs);
5072 return changed;
5075 /* Visit a call STMT storing into LHS. Return true if the value number
5076 of the LHS has changed as a result. */
5078 static bool
5079 visit_reference_op_call (tree lhs, gcall *stmt)
5081 bool changed = false;
5082 struct vn_reference_s vr1;
5083 vn_reference_t vnresult = NULL;
5084 tree vdef = gimple_vdef (stmt);
5086 /* Non-ssa lhs is handled in copy_reference_ops_from_call. */
5087 if (lhs && TREE_CODE (lhs) != SSA_NAME)
5088 lhs = NULL_TREE;
5090 vn_reference_lookup_call (stmt, &vnresult, &vr1);
5091 if (vnresult)
5093 if (vnresult->result_vdef && vdef)
5094 changed |= set_ssa_val_to (vdef, vnresult->result_vdef);
5095 else if (vdef)
5096 /* If the call was discovered to be pure or const reflect
5097 that as far as possible. */
5098 changed |= set_ssa_val_to (vdef, vuse_ssa_val (gimple_vuse (stmt)));
5100 if (!vnresult->result && lhs)
5101 vnresult->result = lhs;
5103 if (vnresult->result && lhs)
5104 changed |= set_ssa_val_to (lhs, vnresult->result);
5106 else
5108 vn_reference_t vr2;
5109 vn_reference_s **slot;
5110 tree vdef_val = vdef;
5111 if (vdef)
5113 /* If we value numbered an indirect functions function to
5114 one not clobbering memory value number its VDEF to its
5115 VUSE. */
5116 tree fn = gimple_call_fn (stmt);
5117 if (fn && TREE_CODE (fn) == SSA_NAME)
5119 fn = SSA_VAL (fn);
5120 if (TREE_CODE (fn) == ADDR_EXPR
5121 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL
5122 && (flags_from_decl_or_type (TREE_OPERAND (fn, 0))
5123 & (ECF_CONST | ECF_PURE)))
5124 vdef_val = vuse_ssa_val (gimple_vuse (stmt));
5126 changed |= set_ssa_val_to (vdef, vdef_val);
5128 if (lhs)
5129 changed |= set_ssa_val_to (lhs, lhs);
5130 vr2 = XOBNEW (&vn_tables_obstack, vn_reference_s);
5131 vr2->vuse = vr1.vuse;
5132 /* As we are not walking the virtual operand chain we know the
5133 shared_lookup_references are still original so we can re-use
5134 them here. */
5135 vr2->operands = vr1.operands.copy ();
5136 vr2->type = vr1.type;
5137 vr2->punned = vr1.punned;
5138 vr2->set = vr1.set;
5139 vr2->base_set = vr1.base_set;
5140 vr2->hashcode = vr1.hashcode;
5141 vr2->result = lhs;
5142 vr2->result_vdef = vdef_val;
5143 vr2->value_id = 0;
5144 slot = valid_info->references->find_slot_with_hash (vr2, vr2->hashcode,
5145 INSERT);
5146 gcc_assert (!*slot);
5147 *slot = vr2;
5148 vr2->next = last_inserted_ref;
5149 last_inserted_ref = vr2;
5152 return changed;
5155 /* Visit a load from a reference operator RHS, part of STMT, value number it,
5156 and return true if the value number of the LHS has changed as a result. */
5158 static bool
5159 visit_reference_op_load (tree lhs, tree op, gimple *stmt)
5161 bool changed = false;
5162 tree result;
5163 vn_reference_t res;
5165 tree vuse = gimple_vuse (stmt);
5166 tree last_vuse = vuse;
5167 result = vn_reference_lookup (op, vuse, default_vn_walk_kind, &res, true, &last_vuse);
5169 /* We handle type-punning through unions by value-numbering based
5170 on offset and size of the access. Be prepared to handle a
5171 type-mismatch here via creating a VIEW_CONVERT_EXPR. */
5172 if (result
5173 && !useless_type_conversion_p (TREE_TYPE (result), TREE_TYPE (op)))
5175 /* Avoid the type punning in case the result mode has padding where
5176 the op we lookup has not. */
5177 if (maybe_lt (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (result))),
5178 GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (op)))))
5179 result = NULL_TREE;
5180 else
5182 /* We will be setting the value number of lhs to the value number
5183 of VIEW_CONVERT_EXPR <TREE_TYPE (result)> (result).
5184 So first simplify and lookup this expression to see if it
5185 is already available. */
5186 gimple_match_op res_op (gimple_match_cond::UNCOND,
5187 VIEW_CONVERT_EXPR, TREE_TYPE (op), result);
5188 result = vn_nary_build_or_lookup (&res_op);
5189 if (result
5190 && TREE_CODE (result) == SSA_NAME
5191 && VN_INFO (result)->needs_insertion)
5192 /* Track whether this is the canonical expression for different
5193 typed loads. We use that as a stopgap measure for code
5194 hoisting when dealing with floating point loads. */
5195 res->punned = true;
5198 /* When building the conversion fails avoid inserting the reference
5199 again. */
5200 if (!result)
5201 return set_ssa_val_to (lhs, lhs);
5204 if (result)
5205 changed = set_ssa_val_to (lhs, result);
5206 else
5208 changed = set_ssa_val_to (lhs, lhs);
5209 vn_reference_insert (op, lhs, last_vuse, NULL_TREE);
5210 if (vuse && SSA_VAL (last_vuse) != SSA_VAL (vuse))
5212 if (dump_file && (dump_flags & TDF_DETAILS))
5214 fprintf (dump_file, "Using extra use virtual operand ");
5215 print_generic_expr (dump_file, last_vuse);
5216 fprintf (dump_file, "\n");
5218 vn_reference_insert (op, lhs, vuse, NULL_TREE);
5222 return changed;
5226 /* Visit a store to a reference operator LHS, part of STMT, value number it,
5227 and return true if the value number of the LHS has changed as a result. */
5229 static bool
5230 visit_reference_op_store (tree lhs, tree op, gimple *stmt)
5232 bool changed = false;
5233 vn_reference_t vnresult = NULL;
5234 tree assign;
5235 bool resultsame = false;
5236 tree vuse = gimple_vuse (stmt);
5237 tree vdef = gimple_vdef (stmt);
5239 if (TREE_CODE (op) == SSA_NAME)
5240 op = SSA_VAL (op);
5242 /* First we want to lookup using the *vuses* from the store and see
5243 if there the last store to this location with the same address
5244 had the same value.
5246 The vuses represent the memory state before the store. If the
5247 memory state, address, and value of the store is the same as the
5248 last store to this location, then this store will produce the
5249 same memory state as that store.
5251 In this case the vdef versions for this store are value numbered to those
5252 vuse versions, since they represent the same memory state after
5253 this store.
5255 Otherwise, the vdefs for the store are used when inserting into
5256 the table, since the store generates a new memory state. */
5258 vn_reference_lookup (lhs, vuse, VN_NOWALK, &vnresult, false);
5259 if (vnresult
5260 && vnresult->result)
5262 tree result = vnresult->result;
5263 gcc_checking_assert (TREE_CODE (result) != SSA_NAME
5264 || result == SSA_VAL (result));
5265 resultsame = expressions_equal_p (result, op);
5266 if (resultsame)
5268 /* If the TBAA state isn't compatible for downstream reads
5269 we cannot value-number the VDEFs the same. */
5270 ao_ref lhs_ref;
5271 ao_ref_init (&lhs_ref, lhs);
5272 alias_set_type set = ao_ref_alias_set (&lhs_ref);
5273 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
5274 if ((vnresult->set != set
5275 && ! alias_set_subset_of (set, vnresult->set))
5276 || (vnresult->base_set != base_set
5277 && ! alias_set_subset_of (base_set, vnresult->base_set)))
5278 resultsame = false;
5282 if (!resultsame)
5284 /* Only perform the following when being called from PRE
5285 which embeds tail merging. */
5286 if (default_vn_walk_kind == VN_WALK)
5288 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5289 vn_reference_lookup (assign, vuse, VN_NOWALK, &vnresult, false);
5290 if (vnresult)
5292 VN_INFO (vdef)->visited = true;
5293 return set_ssa_val_to (vdef, vnresult->result_vdef);
5297 if (dump_file && (dump_flags & TDF_DETAILS))
5299 fprintf (dump_file, "No store match\n");
5300 fprintf (dump_file, "Value numbering store ");
5301 print_generic_expr (dump_file, lhs);
5302 fprintf (dump_file, " to ");
5303 print_generic_expr (dump_file, op);
5304 fprintf (dump_file, "\n");
5306 /* Have to set value numbers before insert, since insert is
5307 going to valueize the references in-place. */
5308 if (vdef)
5309 changed |= set_ssa_val_to (vdef, vdef);
5311 /* Do not insert structure copies into the tables. */
5312 if (is_gimple_min_invariant (op)
5313 || is_gimple_reg (op))
5314 vn_reference_insert (lhs, op, vdef, NULL);
5316 /* Only perform the following when being called from PRE
5317 which embeds tail merging. */
5318 if (default_vn_walk_kind == VN_WALK)
5320 assign = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, op);
5321 vn_reference_insert (assign, lhs, vuse, vdef);
5324 else
5326 /* We had a match, so value number the vdef to have the value
5327 number of the vuse it came from. */
5329 if (dump_file && (dump_flags & TDF_DETAILS))
5330 fprintf (dump_file, "Store matched earlier value, "
5331 "value numbering store vdefs to matching vuses.\n");
5333 changed |= set_ssa_val_to (vdef, SSA_VAL (vuse));
5336 return changed;
5339 /* Visit and value number PHI, return true if the value number
5340 changed. When BACKEDGES_VARYING_P is true then assume all
5341 backedge values are varying. When INSERTED is not NULL then
5342 this is just a ahead query for a possible iteration, set INSERTED
5343 to true if we'd insert into the hashtable. */
5345 static bool
5346 visit_phi (gimple *phi, bool *inserted, bool backedges_varying_p)
5348 tree result, sameval = VN_TOP, seen_undef = NULL_TREE;
5349 tree backedge_val = NULL_TREE;
5350 bool seen_non_backedge = false;
5351 tree sameval_base = NULL_TREE;
5352 poly_int64 soff, doff;
5353 unsigned n_executable = 0;
5354 edge_iterator ei;
5355 edge e;
5357 /* TODO: We could check for this in initialization, and replace this
5358 with a gcc_assert. */
5359 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (PHI_RESULT (phi)))
5360 return set_ssa_val_to (PHI_RESULT (phi), PHI_RESULT (phi));
5362 /* We track whether a PHI was CSEd to to avoid excessive iterations
5363 that would be necessary only because the PHI changed arguments
5364 but not value. */
5365 if (!inserted)
5366 gimple_set_plf (phi, GF_PLF_1, false);
5368 /* See if all non-TOP arguments have the same value. TOP is
5369 equivalent to everything, so we can ignore it. */
5370 FOR_EACH_EDGE (e, ei, gimple_bb (phi)->preds)
5371 if (e->flags & EDGE_EXECUTABLE)
5373 tree def = PHI_ARG_DEF_FROM_EDGE (phi, e);
5375 if (def == PHI_RESULT (phi))
5376 continue;
5377 ++n_executable;
5378 if (TREE_CODE (def) == SSA_NAME)
5380 if (!backedges_varying_p || !(e->flags & EDGE_DFS_BACK))
5381 def = SSA_VAL (def);
5382 if (e->flags & EDGE_DFS_BACK)
5383 backedge_val = def;
5385 if (!(e->flags & EDGE_DFS_BACK))
5386 seen_non_backedge = true;
5387 if (def == VN_TOP)
5389 /* Ignore undefined defs for sameval but record one. */
5390 else if (TREE_CODE (def) == SSA_NAME
5391 && ! virtual_operand_p (def)
5392 && ssa_undefined_value_p (def, false))
5393 seen_undef = def;
5394 else if (sameval == VN_TOP)
5395 sameval = def;
5396 else if (!expressions_equal_p (def, sameval))
5398 /* We know we're arriving only with invariant addresses here,
5399 try harder comparing them. We can do some caching here
5400 which we cannot do in expressions_equal_p. */
5401 if (TREE_CODE (def) == ADDR_EXPR
5402 && TREE_CODE (sameval) == ADDR_EXPR
5403 && sameval_base != (void *)-1)
5405 if (!sameval_base)
5406 sameval_base = get_addr_base_and_unit_offset
5407 (TREE_OPERAND (sameval, 0), &soff);
5408 if (!sameval_base)
5409 sameval_base = (tree)(void *)-1;
5410 else if ((get_addr_base_and_unit_offset
5411 (TREE_OPERAND (def, 0), &doff) == sameval_base)
5412 && known_eq (soff, doff))
5413 continue;
5415 sameval = NULL_TREE;
5416 break;
5420 /* If the value we want to use is flowing over the backedge and we
5421 should take it as VARYING but it has a non-VARYING value drop to
5422 VARYING.
5423 If we value-number a virtual operand never value-number to the
5424 value from the backedge as that confuses the alias-walking code.
5425 See gcc.dg/torture/pr87176.c. If the value is the same on a
5426 non-backedge everything is OK though. */
5427 bool visited_p;
5428 if ((backedge_val
5429 && !seen_non_backedge
5430 && TREE_CODE (backedge_val) == SSA_NAME
5431 && sameval == backedge_val
5432 && (SSA_NAME_IS_VIRTUAL_OPERAND (backedge_val)
5433 || SSA_VAL (backedge_val) != backedge_val))
5434 /* Do not value-number a virtual operand to sth not visited though
5435 given that allows us to escape a region in alias walking. */
5436 || (sameval
5437 && TREE_CODE (sameval) == SSA_NAME
5438 && !SSA_NAME_IS_DEFAULT_DEF (sameval)
5439 && SSA_NAME_IS_VIRTUAL_OPERAND (sameval)
5440 && (SSA_VAL (sameval, &visited_p), !visited_p)))
5441 /* Note this just drops to VARYING without inserting the PHI into
5442 the hashes. */
5443 result = PHI_RESULT (phi);
5444 /* If none of the edges was executable keep the value-number at VN_TOP,
5445 if only a single edge is exectuable use its value. */
5446 else if (n_executable <= 1)
5447 result = seen_undef ? seen_undef : sameval;
5448 /* If we saw only undefined values and VN_TOP use one of the
5449 undefined values. */
5450 else if (sameval == VN_TOP)
5451 result = seen_undef ? seen_undef : sameval;
5452 /* First see if it is equivalent to a phi node in this block. We prefer
5453 this as it allows IV elimination - see PRs 66502 and 67167. */
5454 else if ((result = vn_phi_lookup (phi, backedges_varying_p)))
5456 if (!inserted
5457 && TREE_CODE (result) == SSA_NAME
5458 && gimple_code (SSA_NAME_DEF_STMT (result)) == GIMPLE_PHI)
5460 gimple_set_plf (SSA_NAME_DEF_STMT (result), GF_PLF_1, true);
5461 if (dump_file && (dump_flags & TDF_DETAILS))
5463 fprintf (dump_file, "Marking CSEd to PHI node ");
5464 print_gimple_expr (dump_file, SSA_NAME_DEF_STMT (result),
5465 0, TDF_SLIM);
5466 fprintf (dump_file, "\n");
5470 /* If all values are the same use that, unless we've seen undefined
5471 values as well and the value isn't constant.
5472 CCP/copyprop have the same restriction to not remove uninit warnings. */
5473 else if (sameval
5474 && (! seen_undef || is_gimple_min_invariant (sameval)))
5475 result = sameval;
5476 else
5478 result = PHI_RESULT (phi);
5479 /* Only insert PHIs that are varying, for constant value numbers
5480 we mess up equivalences otherwise as we are only comparing
5481 the immediate controlling predicates. */
5482 vn_phi_insert (phi, result, backedges_varying_p);
5483 if (inserted)
5484 *inserted = true;
5487 return set_ssa_val_to (PHI_RESULT (phi), result);
5490 /* Try to simplify RHS using equivalences and constant folding. */
5492 static tree
5493 try_to_simplify (gassign *stmt)
5495 enum tree_code code = gimple_assign_rhs_code (stmt);
5496 tree tem;
5498 /* For stores we can end up simplifying a SSA_NAME rhs. Just return
5499 in this case, there is no point in doing extra work. */
5500 if (code == SSA_NAME)
5501 return NULL_TREE;
5503 /* First try constant folding based on our current lattice. */
5504 mprts_hook = vn_lookup_simplify_result;
5505 tem = gimple_fold_stmt_to_constant_1 (stmt, vn_valueize, vn_valueize);
5506 mprts_hook = NULL;
5507 if (tem
5508 && (TREE_CODE (tem) == SSA_NAME
5509 || is_gimple_min_invariant (tem)))
5510 return tem;
5512 return NULL_TREE;
5515 /* Visit and value number STMT, return true if the value number
5516 changed. */
5518 static bool
5519 visit_stmt (gimple *stmt, bool backedges_varying_p = false)
5521 bool changed = false;
5523 if (dump_file && (dump_flags & TDF_DETAILS))
5525 fprintf (dump_file, "Value numbering stmt = ");
5526 print_gimple_stmt (dump_file, stmt, 0);
5529 if (gimple_code (stmt) == GIMPLE_PHI)
5530 changed = visit_phi (stmt, NULL, backedges_varying_p);
5531 else if (gimple_has_volatile_ops (stmt))
5532 changed = defs_to_varying (stmt);
5533 else if (gassign *ass = dyn_cast <gassign *> (stmt))
5535 enum tree_code code = gimple_assign_rhs_code (ass);
5536 tree lhs = gimple_assign_lhs (ass);
5537 tree rhs1 = gimple_assign_rhs1 (ass);
5538 tree simplified;
5540 /* Shortcut for copies. Simplifying copies is pointless,
5541 since we copy the expression and value they represent. */
5542 if (code == SSA_NAME
5543 && TREE_CODE (lhs) == SSA_NAME)
5545 changed = visit_copy (lhs, rhs1);
5546 goto done;
5548 simplified = try_to_simplify (ass);
5549 if (simplified)
5551 if (dump_file && (dump_flags & TDF_DETAILS))
5553 fprintf (dump_file, "RHS ");
5554 print_gimple_expr (dump_file, ass, 0);
5555 fprintf (dump_file, " simplified to ");
5556 print_generic_expr (dump_file, simplified);
5557 fprintf (dump_file, "\n");
5560 /* Setting value numbers to constants will occasionally
5561 screw up phi congruence because constants are not
5562 uniquely associated with a single ssa name that can be
5563 looked up. */
5564 if (simplified
5565 && is_gimple_min_invariant (simplified)
5566 && TREE_CODE (lhs) == SSA_NAME)
5568 changed = set_ssa_val_to (lhs, simplified);
5569 goto done;
5571 else if (simplified
5572 && TREE_CODE (simplified) == SSA_NAME
5573 && TREE_CODE (lhs) == SSA_NAME)
5575 changed = visit_copy (lhs, simplified);
5576 goto done;
5579 if ((TREE_CODE (lhs) == SSA_NAME
5580 /* We can substitute SSA_NAMEs that are live over
5581 abnormal edges with their constant value. */
5582 && !(gimple_assign_copy_p (ass)
5583 && is_gimple_min_invariant (rhs1))
5584 && !(simplified
5585 && is_gimple_min_invariant (simplified))
5586 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5587 /* Stores or copies from SSA_NAMEs that are live over
5588 abnormal edges are a problem. */
5589 || (code == SSA_NAME
5590 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (rhs1)))
5591 changed = defs_to_varying (ass);
5592 else if (REFERENCE_CLASS_P (lhs)
5593 || DECL_P (lhs))
5594 changed = visit_reference_op_store (lhs, rhs1, ass);
5595 else if (TREE_CODE (lhs) == SSA_NAME)
5597 if ((gimple_assign_copy_p (ass)
5598 && is_gimple_min_invariant (rhs1))
5599 || (simplified
5600 && is_gimple_min_invariant (simplified)))
5602 if (simplified)
5603 changed = set_ssa_val_to (lhs, simplified);
5604 else
5605 changed = set_ssa_val_to (lhs, rhs1);
5607 else
5609 /* Visit the original statement. */
5610 switch (vn_get_stmt_kind (ass))
5612 case VN_NARY:
5613 changed = visit_nary_op (lhs, ass);
5614 break;
5615 case VN_REFERENCE:
5616 changed = visit_reference_op_load (lhs, rhs1, ass);
5617 break;
5618 default:
5619 changed = defs_to_varying (ass);
5620 break;
5624 else
5625 changed = defs_to_varying (ass);
5627 else if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
5629 tree lhs = gimple_call_lhs (call_stmt);
5630 if (lhs && TREE_CODE (lhs) == SSA_NAME)
5632 /* Try constant folding based on our current lattice. */
5633 tree simplified = gimple_fold_stmt_to_constant_1 (call_stmt,
5634 vn_valueize);
5635 if (simplified)
5637 if (dump_file && (dump_flags & TDF_DETAILS))
5639 fprintf (dump_file, "call ");
5640 print_gimple_expr (dump_file, call_stmt, 0);
5641 fprintf (dump_file, " simplified to ");
5642 print_generic_expr (dump_file, simplified);
5643 fprintf (dump_file, "\n");
5646 /* Setting value numbers to constants will occasionally
5647 screw up phi congruence because constants are not
5648 uniquely associated with a single ssa name that can be
5649 looked up. */
5650 if (simplified
5651 && is_gimple_min_invariant (simplified))
5653 changed = set_ssa_val_to (lhs, simplified);
5654 if (gimple_vdef (call_stmt))
5655 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5656 SSA_VAL (gimple_vuse (call_stmt)));
5657 goto done;
5659 else if (simplified
5660 && TREE_CODE (simplified) == SSA_NAME)
5662 changed = visit_copy (lhs, simplified);
5663 if (gimple_vdef (call_stmt))
5664 changed |= set_ssa_val_to (gimple_vdef (call_stmt),
5665 SSA_VAL (gimple_vuse (call_stmt)));
5666 goto done;
5668 else if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
5670 changed = defs_to_varying (call_stmt);
5671 goto done;
5675 /* Pick up flags from a devirtualization target. */
5676 tree fn = gimple_call_fn (stmt);
5677 int extra_fnflags = 0;
5678 if (fn && TREE_CODE (fn) == SSA_NAME)
5680 fn = SSA_VAL (fn);
5681 if (TREE_CODE (fn) == ADDR_EXPR
5682 && TREE_CODE (TREE_OPERAND (fn, 0)) == FUNCTION_DECL)
5683 extra_fnflags = flags_from_decl_or_type (TREE_OPERAND (fn, 0));
5685 if ((/* Calls to the same function with the same vuse
5686 and the same operands do not necessarily return the same
5687 value, unless they're pure or const. */
5688 ((gimple_call_flags (call_stmt) | extra_fnflags)
5689 & (ECF_PURE | ECF_CONST))
5690 /* If calls have a vdef, subsequent calls won't have
5691 the same incoming vuse. So, if 2 calls with vdef have the
5692 same vuse, we know they're not subsequent.
5693 We can value number 2 calls to the same function with the
5694 same vuse and the same operands which are not subsequent
5695 the same, because there is no code in the program that can
5696 compare the 2 values... */
5697 || (gimple_vdef (call_stmt)
5698 /* ... unless the call returns a pointer which does
5699 not alias with anything else. In which case the
5700 information that the values are distinct are encoded
5701 in the IL. */
5702 && !(gimple_call_return_flags (call_stmt) & ERF_NOALIAS)
5703 /* Only perform the following when being called from PRE
5704 which embeds tail merging. */
5705 && default_vn_walk_kind == VN_WALK))
5706 /* Do not process .DEFERRED_INIT since that confuses uninit
5707 analysis. */
5708 && !gimple_call_internal_p (call_stmt, IFN_DEFERRED_INIT))
5709 changed = visit_reference_op_call (lhs, call_stmt);
5710 else
5711 changed = defs_to_varying (call_stmt);
5713 else
5714 changed = defs_to_varying (stmt);
5715 done:
5716 return changed;
5720 /* Allocate a value number table. */
5722 static void
5723 allocate_vn_table (vn_tables_t table, unsigned size)
5725 table->phis = new vn_phi_table_type (size);
5726 table->nary = new vn_nary_op_table_type (size);
5727 table->references = new vn_reference_table_type (size);
5730 /* Free a value number table. */
5732 static void
5733 free_vn_table (vn_tables_t table)
5735 /* Walk over elements and release vectors. */
5736 vn_reference_iterator_type hir;
5737 vn_reference_t vr;
5738 FOR_EACH_HASH_TABLE_ELEMENT (*table->references, vr, vn_reference_t, hir)
5739 vr->operands.release ();
5740 delete table->phis;
5741 table->phis = NULL;
5742 delete table->nary;
5743 table->nary = NULL;
5744 delete table->references;
5745 table->references = NULL;
5748 /* Set *ID according to RESULT. */
5750 static void
5751 set_value_id_for_result (tree result, unsigned int *id)
5753 if (result && TREE_CODE (result) == SSA_NAME)
5754 *id = VN_INFO (result)->value_id;
5755 else if (result && is_gimple_min_invariant (result))
5756 *id = get_or_alloc_constant_value_id (result);
5757 else
5758 *id = get_next_value_id ();
5761 /* Set the value ids in the valid hash tables. */
5763 static void
5764 set_hashtable_value_ids (void)
5766 vn_nary_op_iterator_type hin;
5767 vn_phi_iterator_type hip;
5768 vn_reference_iterator_type hir;
5769 vn_nary_op_t vno;
5770 vn_reference_t vr;
5771 vn_phi_t vp;
5773 /* Now set the value ids of the things we had put in the hash
5774 table. */
5776 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->nary, vno, vn_nary_op_t, hin)
5777 if (! vno->predicated_values)
5778 set_value_id_for_result (vno->u.result, &vno->value_id);
5780 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->phis, vp, vn_phi_t, hip)
5781 set_value_id_for_result (vp->result, &vp->value_id);
5783 FOR_EACH_HASH_TABLE_ELEMENT (*valid_info->references, vr, vn_reference_t,
5784 hir)
5785 set_value_id_for_result (vr->result, &vr->value_id);
5788 /* Return the maximum value id we have ever seen. */
5790 unsigned int
5791 get_max_value_id (void)
5793 return next_value_id;
5796 /* Return the maximum constant value id we have ever seen. */
5798 unsigned int
5799 get_max_constant_value_id (void)
5801 return -next_constant_value_id;
5804 /* Return the next unique value id. */
5806 unsigned int
5807 get_next_value_id (void)
5809 gcc_checking_assert ((int)next_value_id > 0);
5810 return next_value_id++;
5813 /* Return the next unique value id for constants. */
5815 unsigned int
5816 get_next_constant_value_id (void)
5818 gcc_checking_assert (next_constant_value_id < 0);
5819 return next_constant_value_id--;
5823 /* Compare two expressions E1 and E2 and return true if they are equal.
5824 If match_vn_top_optimistically is true then VN_TOP is equal to anything,
5825 otherwise VN_TOP only matches VN_TOP. */
5827 bool
5828 expressions_equal_p (tree e1, tree e2, bool match_vn_top_optimistically)
5830 /* The obvious case. */
5831 if (e1 == e2)
5832 return true;
5834 /* If either one is VN_TOP consider them equal. */
5835 if (match_vn_top_optimistically
5836 && (e1 == VN_TOP || e2 == VN_TOP))
5837 return true;
5839 /* SSA_NAME compare pointer equal. */
5840 if (TREE_CODE (e1) == SSA_NAME || TREE_CODE (e2) == SSA_NAME)
5841 return false;
5843 /* Now perform the actual comparison. */
5844 if (TREE_CODE (e1) == TREE_CODE (e2)
5845 && operand_equal_p (e1, e2, OEP_PURE_SAME))
5846 return true;
5848 return false;
5852 /* Return true if the nary operation NARY may trap. This is a copy
5853 of stmt_could_throw_1_p adjusted to the SCCVN IL. */
5855 bool
5856 vn_nary_may_trap (vn_nary_op_t nary)
5858 tree type;
5859 tree rhs2 = NULL_TREE;
5860 bool honor_nans = false;
5861 bool honor_snans = false;
5862 bool fp_operation = false;
5863 bool honor_trapv = false;
5864 bool handled, ret;
5865 unsigned i;
5867 if (TREE_CODE_CLASS (nary->opcode) == tcc_comparison
5868 || TREE_CODE_CLASS (nary->opcode) == tcc_unary
5869 || TREE_CODE_CLASS (nary->opcode) == tcc_binary)
5871 type = nary->type;
5872 fp_operation = FLOAT_TYPE_P (type);
5873 if (fp_operation)
5875 honor_nans = flag_trapping_math && !flag_finite_math_only;
5876 honor_snans = flag_signaling_nans != 0;
5878 else if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type))
5879 honor_trapv = true;
5881 if (nary->length >= 2)
5882 rhs2 = nary->op[1];
5883 ret = operation_could_trap_helper_p (nary->opcode, fp_operation,
5884 honor_trapv, honor_nans, honor_snans,
5885 rhs2, &handled);
5886 if (handled && ret)
5887 return true;
5889 for (i = 0; i < nary->length; ++i)
5890 if (tree_could_trap_p (nary->op[i]))
5891 return true;
5893 return false;
5896 /* Return true if the reference operation REF may trap. */
5898 bool
5899 vn_reference_may_trap (vn_reference_t ref)
5901 switch (ref->operands[0].opcode)
5903 case MODIFY_EXPR:
5904 case CALL_EXPR:
5905 /* We do not handle calls. */
5906 return true;
5907 case ADDR_EXPR:
5908 /* And toplevel address computations never trap. */
5909 return false;
5910 default:;
5913 vn_reference_op_t op;
5914 unsigned i;
5915 FOR_EACH_VEC_ELT (ref->operands, i, op)
5917 switch (op->opcode)
5919 case WITH_SIZE_EXPR:
5920 case TARGET_MEM_REF:
5921 /* Always variable. */
5922 return true;
5923 case COMPONENT_REF:
5924 if (op->op1 && TREE_CODE (op->op1) == SSA_NAME)
5925 return true;
5926 break;
5927 case ARRAY_RANGE_REF:
5928 if (TREE_CODE (op->op0) == SSA_NAME)
5929 return true;
5930 break;
5931 case ARRAY_REF:
5933 if (TREE_CODE (op->op0) != INTEGER_CST)
5934 return true;
5936 /* !in_array_bounds */
5937 tree domain_type = TYPE_DOMAIN (ref->operands[i+1].type);
5938 if (!domain_type)
5939 return true;
5941 tree min = op->op1;
5942 tree max = TYPE_MAX_VALUE (domain_type);
5943 if (!min
5944 || !max
5945 || TREE_CODE (min) != INTEGER_CST
5946 || TREE_CODE (max) != INTEGER_CST)
5947 return true;
5949 if (tree_int_cst_lt (op->op0, min)
5950 || tree_int_cst_lt (max, op->op0))
5951 return true;
5953 break;
5955 case MEM_REF:
5956 /* Nothing interesting in itself, the base is separate. */
5957 break;
5958 /* The following are the address bases. */
5959 case SSA_NAME:
5960 return true;
5961 case ADDR_EXPR:
5962 if (op->op0)
5963 return tree_could_trap_p (TREE_OPERAND (op->op0, 0));
5964 return false;
5965 default:;
5968 return false;
5971 eliminate_dom_walker::eliminate_dom_walker (cdi_direction direction,
5972 bitmap inserted_exprs_)
5973 : dom_walker (direction), do_pre (inserted_exprs_ != NULL),
5974 el_todo (0), eliminations (0), insertions (0),
5975 inserted_exprs (inserted_exprs_)
5977 need_eh_cleanup = BITMAP_ALLOC (NULL);
5978 need_ab_cleanup = BITMAP_ALLOC (NULL);
5981 eliminate_dom_walker::~eliminate_dom_walker ()
5983 BITMAP_FREE (need_eh_cleanup);
5984 BITMAP_FREE (need_ab_cleanup);
5987 /* Return a leader for OP that is available at the current point of the
5988 eliminate domwalk. */
5990 tree
5991 eliminate_dom_walker::eliminate_avail (basic_block, tree op)
5993 tree valnum = VN_INFO (op)->valnum;
5994 if (TREE_CODE (valnum) == SSA_NAME)
5996 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
5997 return valnum;
5998 if (avail.length () > SSA_NAME_VERSION (valnum))
5999 return avail[SSA_NAME_VERSION (valnum)];
6001 else if (is_gimple_min_invariant (valnum))
6002 return valnum;
6003 return NULL_TREE;
6006 /* At the current point of the eliminate domwalk make OP available. */
6008 void
6009 eliminate_dom_walker::eliminate_push_avail (basic_block, tree op)
6011 tree valnum = VN_INFO (op)->valnum;
6012 if (TREE_CODE (valnum) == SSA_NAME)
6014 if (avail.length () <= SSA_NAME_VERSION (valnum))
6015 avail.safe_grow_cleared (SSA_NAME_VERSION (valnum) + 1, true);
6016 tree pushop = op;
6017 if (avail[SSA_NAME_VERSION (valnum)])
6018 pushop = avail[SSA_NAME_VERSION (valnum)];
6019 avail_stack.safe_push (pushop);
6020 avail[SSA_NAME_VERSION (valnum)] = op;
6024 /* Insert the expression recorded by SCCVN for VAL at *GSI. Returns
6025 the leader for the expression if insertion was successful. */
6027 tree
6028 eliminate_dom_walker::eliminate_insert (basic_block bb,
6029 gimple_stmt_iterator *gsi, tree val)
6031 /* We can insert a sequence with a single assignment only. */
6032 gimple_seq stmts = VN_INFO (val)->expr;
6033 if (!gimple_seq_singleton_p (stmts))
6034 return NULL_TREE;
6035 gassign *stmt = dyn_cast <gassign *> (gimple_seq_first_stmt (stmts));
6036 if (!stmt
6037 || (!CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6038 && gimple_assign_rhs_code (stmt) != VIEW_CONVERT_EXPR
6039 && gimple_assign_rhs_code (stmt) != NEGATE_EXPR
6040 && gimple_assign_rhs_code (stmt) != BIT_FIELD_REF
6041 && (gimple_assign_rhs_code (stmt) != BIT_AND_EXPR
6042 || TREE_CODE (gimple_assign_rhs2 (stmt)) != INTEGER_CST)))
6043 return NULL_TREE;
6045 tree op = gimple_assign_rhs1 (stmt);
6046 if (gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR
6047 || gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6048 op = TREE_OPERAND (op, 0);
6049 tree leader = TREE_CODE (op) == SSA_NAME ? eliminate_avail (bb, op) : op;
6050 if (!leader)
6051 return NULL_TREE;
6053 tree res;
6054 stmts = NULL;
6055 if (gimple_assign_rhs_code (stmt) == BIT_FIELD_REF)
6056 res = gimple_build (&stmts, BIT_FIELD_REF,
6057 TREE_TYPE (val), leader,
6058 TREE_OPERAND (gimple_assign_rhs1 (stmt), 1),
6059 TREE_OPERAND (gimple_assign_rhs1 (stmt), 2));
6060 else if (gimple_assign_rhs_code (stmt) == BIT_AND_EXPR)
6061 res = gimple_build (&stmts, BIT_AND_EXPR,
6062 TREE_TYPE (val), leader, gimple_assign_rhs2 (stmt));
6063 else
6064 res = gimple_build (&stmts, gimple_assign_rhs_code (stmt),
6065 TREE_TYPE (val), leader);
6066 if (TREE_CODE (res) != SSA_NAME
6067 || SSA_NAME_IS_DEFAULT_DEF (res)
6068 || gimple_bb (SSA_NAME_DEF_STMT (res)))
6070 gimple_seq_discard (stmts);
6072 /* During propagation we have to treat SSA info conservatively
6073 and thus we can end up simplifying the inserted expression
6074 at elimination time to sth not defined in stmts. */
6075 /* But then this is a redundancy we failed to detect. Which means
6076 res now has two values. That doesn't play well with how
6077 we track availability here, so give up. */
6078 if (dump_file && (dump_flags & TDF_DETAILS))
6080 if (TREE_CODE (res) == SSA_NAME)
6081 res = eliminate_avail (bb, res);
6082 if (res)
6084 fprintf (dump_file, "Failed to insert expression for value ");
6085 print_generic_expr (dump_file, val);
6086 fprintf (dump_file, " which is really fully redundant to ");
6087 print_generic_expr (dump_file, res);
6088 fprintf (dump_file, "\n");
6092 return NULL_TREE;
6094 else
6096 gsi_insert_seq_before (gsi, stmts, GSI_SAME_STMT);
6097 vn_ssa_aux_t vn_info = VN_INFO (res);
6098 vn_info->valnum = val;
6099 vn_info->visited = true;
6102 insertions++;
6103 if (dump_file && (dump_flags & TDF_DETAILS))
6105 fprintf (dump_file, "Inserted ");
6106 print_gimple_stmt (dump_file, SSA_NAME_DEF_STMT (res), 0);
6109 return res;
6112 void
6113 eliminate_dom_walker::eliminate_stmt (basic_block b, gimple_stmt_iterator *gsi)
6115 tree sprime = NULL_TREE;
6116 gimple *stmt = gsi_stmt (*gsi);
6117 tree lhs = gimple_get_lhs (stmt);
6118 if (lhs && TREE_CODE (lhs) == SSA_NAME
6119 && !gimple_has_volatile_ops (stmt)
6120 /* See PR43491. Do not replace a global register variable when
6121 it is a the RHS of an assignment. Do replace local register
6122 variables since gcc does not guarantee a local variable will
6123 be allocated in register.
6124 ??? The fix isn't effective here. This should instead
6125 be ensured by not value-numbering them the same but treating
6126 them like volatiles? */
6127 && !(gimple_assign_single_p (stmt)
6128 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == VAR_DECL
6129 && DECL_HARD_REGISTER (gimple_assign_rhs1 (stmt))
6130 && is_global_var (gimple_assign_rhs1 (stmt)))))
6132 sprime = eliminate_avail (b, lhs);
6133 if (!sprime)
6135 /* If there is no existing usable leader but SCCVN thinks
6136 it has an expression it wants to use as replacement,
6137 insert that. */
6138 tree val = VN_INFO (lhs)->valnum;
6139 vn_ssa_aux_t vn_info;
6140 if (val != VN_TOP
6141 && TREE_CODE (val) == SSA_NAME
6142 && (vn_info = VN_INFO (val), true)
6143 && vn_info->needs_insertion
6144 && vn_info->expr != NULL
6145 && (sprime = eliminate_insert (b, gsi, val)) != NULL_TREE)
6146 eliminate_push_avail (b, sprime);
6149 /* If this now constitutes a copy duplicate points-to
6150 and range info appropriately. This is especially
6151 important for inserted code. See tree-ssa-copy.c
6152 for similar code. */
6153 if (sprime
6154 && TREE_CODE (sprime) == SSA_NAME)
6156 basic_block sprime_b = gimple_bb (SSA_NAME_DEF_STMT (sprime));
6157 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6158 && SSA_NAME_PTR_INFO (lhs)
6159 && ! SSA_NAME_PTR_INFO (sprime))
6161 duplicate_ssa_name_ptr_info (sprime,
6162 SSA_NAME_PTR_INFO (lhs));
6163 if (b != sprime_b)
6164 reset_flow_sensitive_info (sprime);
6166 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6167 && SSA_NAME_RANGE_INFO (lhs)
6168 && ! SSA_NAME_RANGE_INFO (sprime)
6169 && b == sprime_b)
6170 duplicate_ssa_name_range_info (sprime,
6171 SSA_NAME_RANGE_TYPE (lhs),
6172 SSA_NAME_RANGE_INFO (lhs));
6175 /* Inhibit the use of an inserted PHI on a loop header when
6176 the address of the memory reference is a simple induction
6177 variable. In other cases the vectorizer won't do anything
6178 anyway (either it's loop invariant or a complicated
6179 expression). */
6180 if (sprime
6181 && TREE_CODE (sprime) == SSA_NAME
6182 && do_pre
6183 && (flag_tree_loop_vectorize || flag_tree_parallelize_loops > 1)
6184 && loop_outer (b->loop_father)
6185 && has_zero_uses (sprime)
6186 && bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))
6187 && gimple_assign_load_p (stmt))
6189 gimple *def_stmt = SSA_NAME_DEF_STMT (sprime);
6190 basic_block def_bb = gimple_bb (def_stmt);
6191 if (gimple_code (def_stmt) == GIMPLE_PHI
6192 && def_bb->loop_father->header == def_bb)
6194 loop_p loop = def_bb->loop_father;
6195 ssa_op_iter iter;
6196 tree op;
6197 bool found = false;
6198 FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE)
6200 affine_iv iv;
6201 def_bb = gimple_bb (SSA_NAME_DEF_STMT (op));
6202 if (def_bb
6203 && flow_bb_inside_loop_p (loop, def_bb)
6204 && simple_iv (loop, loop, op, &iv, true))
6206 found = true;
6207 break;
6210 if (found)
6212 if (dump_file && (dump_flags & TDF_DETAILS))
6214 fprintf (dump_file, "Not replacing ");
6215 print_gimple_expr (dump_file, stmt, 0);
6216 fprintf (dump_file, " with ");
6217 print_generic_expr (dump_file, sprime);
6218 fprintf (dump_file, " which would add a loop"
6219 " carried dependence to loop %d\n",
6220 loop->num);
6222 /* Don't keep sprime available. */
6223 sprime = NULL_TREE;
6228 if (sprime)
6230 /* If we can propagate the value computed for LHS into
6231 all uses don't bother doing anything with this stmt. */
6232 if (may_propagate_copy (lhs, sprime))
6234 /* Mark it for removal. */
6235 to_remove.safe_push (stmt);
6237 /* ??? Don't count copy/constant propagations. */
6238 if (gimple_assign_single_p (stmt)
6239 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6240 || gimple_assign_rhs1 (stmt) == sprime))
6241 return;
6243 if (dump_file && (dump_flags & TDF_DETAILS))
6245 fprintf (dump_file, "Replaced ");
6246 print_gimple_expr (dump_file, stmt, 0);
6247 fprintf (dump_file, " with ");
6248 print_generic_expr (dump_file, sprime);
6249 fprintf (dump_file, " in all uses of ");
6250 print_gimple_stmt (dump_file, stmt, 0);
6253 eliminations++;
6254 return;
6257 /* If this is an assignment from our leader (which
6258 happens in the case the value-number is a constant)
6259 then there is nothing to do. Likewise if we run into
6260 inserted code that needed a conversion because of
6261 our type-agnostic value-numbering of loads. */
6262 if ((gimple_assign_single_p (stmt)
6263 || (is_gimple_assign (stmt)
6264 && (CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt))
6265 || gimple_assign_rhs_code (stmt) == VIEW_CONVERT_EXPR)))
6266 && sprime == gimple_assign_rhs1 (stmt))
6267 return;
6269 /* Else replace its RHS. */
6270 if (dump_file && (dump_flags & TDF_DETAILS))
6272 fprintf (dump_file, "Replaced ");
6273 print_gimple_expr (dump_file, stmt, 0);
6274 fprintf (dump_file, " with ");
6275 print_generic_expr (dump_file, sprime);
6276 fprintf (dump_file, " in ");
6277 print_gimple_stmt (dump_file, stmt, 0);
6279 eliminations++;
6281 bool can_make_abnormal_goto = (is_gimple_call (stmt)
6282 && stmt_can_make_abnormal_goto (stmt));
6283 gimple *orig_stmt = stmt;
6284 if (!useless_type_conversion_p (TREE_TYPE (lhs),
6285 TREE_TYPE (sprime)))
6287 /* We preserve conversions to but not from function or method
6288 types. This asymmetry makes it necessary to re-instantiate
6289 conversions here. */
6290 if (POINTER_TYPE_P (TREE_TYPE (lhs))
6291 && FUNC_OR_METHOD_TYPE_P (TREE_TYPE (TREE_TYPE (lhs))))
6292 sprime = fold_convert (TREE_TYPE (lhs), sprime);
6293 else
6294 gcc_unreachable ();
6296 tree vdef = gimple_vdef (stmt);
6297 tree vuse = gimple_vuse (stmt);
6298 propagate_tree_value_into_stmt (gsi, sprime);
6299 stmt = gsi_stmt (*gsi);
6300 update_stmt (stmt);
6301 /* In case the VDEF on the original stmt was released, value-number
6302 it to the VUSE. This is to make vuse_ssa_val able to skip
6303 released virtual operands. */
6304 if (vdef != gimple_vdef (stmt))
6306 gcc_assert (SSA_NAME_IN_FREE_LIST (vdef));
6307 VN_INFO (vdef)->valnum = vuse;
6310 /* If we removed EH side-effects from the statement, clean
6311 its EH information. */
6312 if (maybe_clean_or_replace_eh_stmt (orig_stmt, stmt))
6314 bitmap_set_bit (need_eh_cleanup,
6315 gimple_bb (stmt)->index);
6316 if (dump_file && (dump_flags & TDF_DETAILS))
6317 fprintf (dump_file, " Removed EH side-effects.\n");
6320 /* Likewise for AB side-effects. */
6321 if (can_make_abnormal_goto
6322 && !stmt_can_make_abnormal_goto (stmt))
6324 bitmap_set_bit (need_ab_cleanup,
6325 gimple_bb (stmt)->index);
6326 if (dump_file && (dump_flags & TDF_DETAILS))
6327 fprintf (dump_file, " Removed AB side-effects.\n");
6330 return;
6334 /* If the statement is a scalar store, see if the expression
6335 has the same value number as its rhs. If so, the store is
6336 dead. */
6337 if (gimple_assign_single_p (stmt)
6338 && !gimple_has_volatile_ops (stmt)
6339 && !is_gimple_reg (gimple_assign_lhs (stmt))
6340 && (TREE_CODE (gimple_assign_rhs1 (stmt)) == SSA_NAME
6341 || is_gimple_min_invariant (gimple_assign_rhs1 (stmt))))
6343 tree rhs = gimple_assign_rhs1 (stmt);
6344 vn_reference_t vnresult;
6345 /* ??? gcc.dg/torture/pr91445.c shows that we lookup a boolean
6346 typed load of a byte known to be 0x11 as 1 so a store of
6347 a boolean 1 is detected as redundant. Because of this we
6348 have to make sure to lookup with a ref where its size
6349 matches the precision. */
6350 tree lookup_lhs = lhs;
6351 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs))
6352 && (TREE_CODE (lhs) != COMPONENT_REF
6353 || !DECL_BIT_FIELD_TYPE (TREE_OPERAND (lhs, 1)))
6354 && !type_has_mode_precision_p (TREE_TYPE (lhs)))
6356 if (TREE_CODE (lhs) == COMPONENT_REF
6357 || TREE_CODE (lhs) == MEM_REF)
6359 tree ltype = build_nonstandard_integer_type
6360 (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (lhs))),
6361 TYPE_UNSIGNED (TREE_TYPE (lhs)));
6362 if (TREE_CODE (lhs) == COMPONENT_REF)
6364 tree foff = component_ref_field_offset (lhs);
6365 tree f = TREE_OPERAND (lhs, 1);
6366 if (!poly_int_tree_p (foff))
6367 lookup_lhs = NULL_TREE;
6368 else
6369 lookup_lhs = build3 (BIT_FIELD_REF, ltype,
6370 TREE_OPERAND (lhs, 0),
6371 TYPE_SIZE (TREE_TYPE (lhs)),
6372 bit_from_pos
6373 (foff, DECL_FIELD_BIT_OFFSET (f)));
6375 else
6376 lookup_lhs = build2 (MEM_REF, ltype,
6377 TREE_OPERAND (lhs, 0),
6378 TREE_OPERAND (lhs, 1));
6380 else
6381 lookup_lhs = NULL_TREE;
6383 tree val = NULL_TREE;
6384 if (lookup_lhs)
6385 val = vn_reference_lookup (lookup_lhs, gimple_vuse (stmt),
6386 VN_WALKREWRITE, &vnresult, false);
6387 if (TREE_CODE (rhs) == SSA_NAME)
6388 rhs = VN_INFO (rhs)->valnum;
6389 if (val
6390 && (operand_equal_p (val, rhs, 0)
6391 /* Due to the bitfield lookups above we can get bit
6392 interpretations of the same RHS as values here. Those
6393 are redundant as well. */
6394 || (TREE_CODE (val) == SSA_NAME
6395 && gimple_assign_single_p (SSA_NAME_DEF_STMT (val))
6396 && (val = gimple_assign_rhs1 (SSA_NAME_DEF_STMT (val)))
6397 && TREE_CODE (val) == VIEW_CONVERT_EXPR
6398 && TREE_OPERAND (val, 0) == rhs)))
6400 /* We can only remove the later store if the former aliases
6401 at least all accesses the later one does or if the store
6402 was to readonly memory storing the same value. */
6403 ao_ref lhs_ref;
6404 ao_ref_init (&lhs_ref, lhs);
6405 alias_set_type set = ao_ref_alias_set (&lhs_ref);
6406 alias_set_type base_set = ao_ref_base_alias_set (&lhs_ref);
6407 if (! vnresult
6408 || ((vnresult->set == set
6409 || alias_set_subset_of (set, vnresult->set))
6410 && (vnresult->base_set == base_set
6411 || alias_set_subset_of (base_set, vnresult->base_set))))
6413 if (dump_file && (dump_flags & TDF_DETAILS))
6415 fprintf (dump_file, "Deleted redundant store ");
6416 print_gimple_stmt (dump_file, stmt, 0);
6419 /* Queue stmt for removal. */
6420 to_remove.safe_push (stmt);
6421 return;
6426 /* If this is a control statement value numbering left edges
6427 unexecuted on force the condition in a way consistent with
6428 that. */
6429 if (gcond *cond = dyn_cast <gcond *> (stmt))
6431 if ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE)
6432 ^ (EDGE_SUCC (b, 1)->flags & EDGE_EXECUTABLE))
6434 if (dump_file && (dump_flags & TDF_DETAILS))
6436 fprintf (dump_file, "Removing unexecutable edge from ");
6437 print_gimple_stmt (dump_file, stmt, 0);
6439 if (((EDGE_SUCC (b, 0)->flags & EDGE_TRUE_VALUE) != 0)
6440 == ((EDGE_SUCC (b, 0)->flags & EDGE_EXECUTABLE) != 0))
6441 gimple_cond_make_true (cond);
6442 else
6443 gimple_cond_make_false (cond);
6444 update_stmt (cond);
6445 el_todo |= TODO_cleanup_cfg;
6446 return;
6450 bool can_make_abnormal_goto = stmt_can_make_abnormal_goto (stmt);
6451 bool was_noreturn = (is_gimple_call (stmt)
6452 && gimple_call_noreturn_p (stmt));
6453 tree vdef = gimple_vdef (stmt);
6454 tree vuse = gimple_vuse (stmt);
6456 /* If we didn't replace the whole stmt (or propagate the result
6457 into all uses), replace all uses on this stmt with their
6458 leaders. */
6459 bool modified = false;
6460 use_operand_p use_p;
6461 ssa_op_iter iter;
6462 FOR_EACH_SSA_USE_OPERAND (use_p, stmt, iter, SSA_OP_USE)
6464 tree use = USE_FROM_PTR (use_p);
6465 /* ??? The call code above leaves stmt operands un-updated. */
6466 if (TREE_CODE (use) != SSA_NAME)
6467 continue;
6468 tree sprime;
6469 if (SSA_NAME_IS_DEFAULT_DEF (use))
6470 /* ??? For default defs BB shouldn't matter, but we have to
6471 solve the inconsistency between rpo eliminate and
6472 dom eliminate avail valueization first. */
6473 sprime = eliminate_avail (b, use);
6474 else
6475 /* Look for sth available at the definition block of the argument.
6476 This avoids inconsistencies between availability there which
6477 decides if the stmt can be removed and availability at the
6478 use site. The SSA property ensures that things available
6479 at the definition are also available at uses. */
6480 sprime = eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (use)), use);
6481 if (sprime && sprime != use
6482 && may_propagate_copy (use, sprime)
6483 /* We substitute into debug stmts to avoid excessive
6484 debug temporaries created by removed stmts, but we need
6485 to avoid doing so for inserted sprimes as we never want
6486 to create debug temporaries for them. */
6487 && (!inserted_exprs
6488 || TREE_CODE (sprime) != SSA_NAME
6489 || !is_gimple_debug (stmt)
6490 || !bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (sprime))))
6492 propagate_value (use_p, sprime);
6493 modified = true;
6497 /* Fold the stmt if modified, this canonicalizes MEM_REFs we propagated
6498 into which is a requirement for the IPA devirt machinery. */
6499 gimple *old_stmt = stmt;
6500 if (modified)
6502 /* If a formerly non-invariant ADDR_EXPR is turned into an
6503 invariant one it was on a separate stmt. */
6504 if (gimple_assign_single_p (stmt)
6505 && TREE_CODE (gimple_assign_rhs1 (stmt)) == ADDR_EXPR)
6506 recompute_tree_invariant_for_addr_expr (gimple_assign_rhs1 (stmt));
6507 gimple_stmt_iterator prev = *gsi;
6508 gsi_prev (&prev);
6509 if (fold_stmt (gsi, follow_all_ssa_edges))
6511 /* fold_stmt may have created new stmts inbetween
6512 the previous stmt and the folded stmt. Mark
6513 all defs created there as varying to not confuse
6514 the SCCVN machinery as we're using that even during
6515 elimination. */
6516 if (gsi_end_p (prev))
6517 prev = gsi_start_bb (b);
6518 else
6519 gsi_next (&prev);
6520 if (gsi_stmt (prev) != gsi_stmt (*gsi))
6523 tree def;
6524 ssa_op_iter dit;
6525 FOR_EACH_SSA_TREE_OPERAND (def, gsi_stmt (prev),
6526 dit, SSA_OP_ALL_DEFS)
6527 /* As existing DEFs may move between stmts
6528 only process new ones. */
6529 if (! has_VN_INFO (def))
6531 vn_ssa_aux_t vn_info = VN_INFO (def);
6532 vn_info->valnum = def;
6533 vn_info->visited = true;
6535 if (gsi_stmt (prev) == gsi_stmt (*gsi))
6536 break;
6537 gsi_next (&prev);
6539 while (1);
6541 stmt = gsi_stmt (*gsi);
6542 /* In case we folded the stmt away schedule the NOP for removal. */
6543 if (gimple_nop_p (stmt))
6544 to_remove.safe_push (stmt);
6547 /* Visit indirect calls and turn them into direct calls if
6548 possible using the devirtualization machinery. Do this before
6549 checking for required EH/abnormal/noreturn cleanup as devird
6550 may expose more of those. */
6551 if (gcall *call_stmt = dyn_cast <gcall *> (stmt))
6553 tree fn = gimple_call_fn (call_stmt);
6554 if (fn
6555 && flag_devirtualize
6556 && virtual_method_call_p (fn))
6558 tree otr_type = obj_type_ref_class (fn);
6559 unsigned HOST_WIDE_INT otr_tok
6560 = tree_to_uhwi (OBJ_TYPE_REF_TOKEN (fn));
6561 tree instance;
6562 ipa_polymorphic_call_context context (current_function_decl,
6563 fn, stmt, &instance);
6564 context.get_dynamic_type (instance, OBJ_TYPE_REF_OBJECT (fn),
6565 otr_type, stmt, NULL);
6566 bool final;
6567 vec <cgraph_node *> targets
6568 = possible_polymorphic_call_targets (obj_type_ref_class (fn),
6569 otr_tok, context, &final);
6570 if (dump_file)
6571 dump_possible_polymorphic_call_targets (dump_file,
6572 obj_type_ref_class (fn),
6573 otr_tok, context);
6574 if (final && targets.length () <= 1 && dbg_cnt (devirt))
6576 tree fn;
6577 if (targets.length () == 1)
6578 fn = targets[0]->decl;
6579 else
6580 fn = builtin_decl_implicit (BUILT_IN_UNREACHABLE);
6581 if (dump_enabled_p ())
6583 dump_printf_loc (MSG_OPTIMIZED_LOCATIONS, stmt,
6584 "converting indirect call to "
6585 "function %s\n",
6586 lang_hooks.decl_printable_name (fn, 2));
6588 gimple_call_set_fndecl (call_stmt, fn);
6589 /* If changing the call to __builtin_unreachable
6590 or similar noreturn function, adjust gimple_call_fntype
6591 too. */
6592 if (gimple_call_noreturn_p (call_stmt)
6593 && VOID_TYPE_P (TREE_TYPE (TREE_TYPE (fn)))
6594 && TYPE_ARG_TYPES (TREE_TYPE (fn))
6595 && (TREE_VALUE (TYPE_ARG_TYPES (TREE_TYPE (fn)))
6596 == void_type_node))
6597 gimple_call_set_fntype (call_stmt, TREE_TYPE (fn));
6598 maybe_remove_unused_call_args (cfun, call_stmt);
6599 modified = true;
6604 if (modified)
6606 /* When changing a call into a noreturn call, cfg cleanup
6607 is needed to fix up the noreturn call. */
6608 if (!was_noreturn
6609 && is_gimple_call (stmt) && gimple_call_noreturn_p (stmt))
6610 to_fixup.safe_push (stmt);
6611 /* When changing a condition or switch into one we know what
6612 edge will be executed, schedule a cfg cleanup. */
6613 if ((gimple_code (stmt) == GIMPLE_COND
6614 && (gimple_cond_true_p (as_a <gcond *> (stmt))
6615 || gimple_cond_false_p (as_a <gcond *> (stmt))))
6616 || (gimple_code (stmt) == GIMPLE_SWITCH
6617 && TREE_CODE (gimple_switch_index
6618 (as_a <gswitch *> (stmt))) == INTEGER_CST))
6619 el_todo |= TODO_cleanup_cfg;
6620 /* If we removed EH side-effects from the statement, clean
6621 its EH information. */
6622 if (maybe_clean_or_replace_eh_stmt (old_stmt, stmt))
6624 bitmap_set_bit (need_eh_cleanup,
6625 gimple_bb (stmt)->index);
6626 if (dump_file && (dump_flags & TDF_DETAILS))
6627 fprintf (dump_file, " Removed EH side-effects.\n");
6629 /* Likewise for AB side-effects. */
6630 if (can_make_abnormal_goto
6631 && !stmt_can_make_abnormal_goto (stmt))
6633 bitmap_set_bit (need_ab_cleanup,
6634 gimple_bb (stmt)->index);
6635 if (dump_file && (dump_flags & TDF_DETAILS))
6636 fprintf (dump_file, " Removed AB side-effects.\n");
6638 update_stmt (stmt);
6639 /* In case the VDEF on the original stmt was released, value-number
6640 it to the VUSE. This is to make vuse_ssa_val able to skip
6641 released virtual operands. */
6642 if (vdef && SSA_NAME_IN_FREE_LIST (vdef))
6643 VN_INFO (vdef)->valnum = vuse;
6646 /* Make new values available - for fully redundant LHS we
6647 continue with the next stmt above and skip this. */
6648 def_operand_p defp;
6649 FOR_EACH_SSA_DEF_OPERAND (defp, stmt, iter, SSA_OP_DEF)
6650 eliminate_push_avail (b, DEF_FROM_PTR (defp));
6653 /* Perform elimination for the basic-block B during the domwalk. */
6655 edge
6656 eliminate_dom_walker::before_dom_children (basic_block b)
6658 /* Mark new bb. */
6659 avail_stack.safe_push (NULL_TREE);
6661 /* Skip unreachable blocks marked unreachable during the SCCVN domwalk. */
6662 if (!(b->flags & BB_EXECUTABLE))
6663 return NULL;
6665 vn_context_bb = b;
6667 for (gphi_iterator gsi = gsi_start_phis (b); !gsi_end_p (gsi);)
6669 gphi *phi = gsi.phi ();
6670 tree res = PHI_RESULT (phi);
6672 if (virtual_operand_p (res))
6674 gsi_next (&gsi);
6675 continue;
6678 tree sprime = eliminate_avail (b, res);
6679 if (sprime
6680 && sprime != res)
6682 if (dump_file && (dump_flags & TDF_DETAILS))
6684 fprintf (dump_file, "Replaced redundant PHI node defining ");
6685 print_generic_expr (dump_file, res);
6686 fprintf (dump_file, " with ");
6687 print_generic_expr (dump_file, sprime);
6688 fprintf (dump_file, "\n");
6691 /* If we inserted this PHI node ourself, it's not an elimination. */
6692 if (! inserted_exprs
6693 || ! bitmap_bit_p (inserted_exprs, SSA_NAME_VERSION (res)))
6694 eliminations++;
6696 /* If we will propagate into all uses don't bother to do
6697 anything. */
6698 if (may_propagate_copy (res, sprime))
6700 /* Mark the PHI for removal. */
6701 to_remove.safe_push (phi);
6702 gsi_next (&gsi);
6703 continue;
6706 remove_phi_node (&gsi, false);
6708 if (!useless_type_conversion_p (TREE_TYPE (res), TREE_TYPE (sprime)))
6709 sprime = fold_convert (TREE_TYPE (res), sprime);
6710 gimple *stmt = gimple_build_assign (res, sprime);
6711 gimple_stmt_iterator gsi2 = gsi_after_labels (b);
6712 gsi_insert_before (&gsi2, stmt, GSI_NEW_STMT);
6713 continue;
6716 eliminate_push_avail (b, res);
6717 gsi_next (&gsi);
6720 for (gimple_stmt_iterator gsi = gsi_start_bb (b);
6721 !gsi_end_p (gsi);
6722 gsi_next (&gsi))
6723 eliminate_stmt (b, &gsi);
6725 /* Replace destination PHI arguments. */
6726 edge_iterator ei;
6727 edge e;
6728 FOR_EACH_EDGE (e, ei, b->succs)
6729 if (e->flags & EDGE_EXECUTABLE)
6730 for (gphi_iterator gsi = gsi_start_phis (e->dest);
6731 !gsi_end_p (gsi);
6732 gsi_next (&gsi))
6734 gphi *phi = gsi.phi ();
6735 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
6736 tree arg = USE_FROM_PTR (use_p);
6737 if (TREE_CODE (arg) != SSA_NAME
6738 || virtual_operand_p (arg))
6739 continue;
6740 tree sprime = eliminate_avail (b, arg);
6741 if (sprime && may_propagate_copy (arg, sprime))
6742 propagate_value (use_p, sprime);
6745 vn_context_bb = NULL;
6747 return NULL;
6750 /* Make no longer available leaders no longer available. */
6752 void
6753 eliminate_dom_walker::after_dom_children (basic_block)
6755 tree entry;
6756 while ((entry = avail_stack.pop ()) != NULL_TREE)
6758 tree valnum = VN_INFO (entry)->valnum;
6759 tree old = avail[SSA_NAME_VERSION (valnum)];
6760 if (old == entry)
6761 avail[SSA_NAME_VERSION (valnum)] = NULL_TREE;
6762 else
6763 avail[SSA_NAME_VERSION (valnum)] = entry;
6767 /* Remove queued stmts and perform delayed cleanups. */
6769 unsigned
6770 eliminate_dom_walker::eliminate_cleanup (bool region_p)
6772 statistics_counter_event (cfun, "Eliminated", eliminations);
6773 statistics_counter_event (cfun, "Insertions", insertions);
6775 /* We cannot remove stmts during BB walk, especially not release SSA
6776 names there as this confuses the VN machinery. The stmts ending
6777 up in to_remove are either stores or simple copies.
6778 Remove stmts in reverse order to make debug stmt creation possible. */
6779 while (!to_remove.is_empty ())
6781 bool do_release_defs = true;
6782 gimple *stmt = to_remove.pop ();
6784 /* When we are value-numbering a region we do not require exit PHIs to
6785 be present so we have to make sure to deal with uses outside of the
6786 region of stmts that we thought are eliminated.
6787 ??? Note we may be confused by uses in dead regions we didn't run
6788 elimination on. Rather than checking individual uses we accept
6789 dead copies to be generated here (gcc.c-torture/execute/20060905-1.c
6790 contains such example). */
6791 if (region_p)
6793 if (gphi *phi = dyn_cast <gphi *> (stmt))
6795 tree lhs = gimple_phi_result (phi);
6796 if (!has_zero_uses (lhs))
6798 if (dump_file && (dump_flags & TDF_DETAILS))
6799 fprintf (dump_file, "Keeping eliminated stmt live "
6800 "as copy because of out-of-region uses\n");
6801 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6802 gimple *copy = gimple_build_assign (lhs, sprime);
6803 gimple_stmt_iterator gsi
6804 = gsi_after_labels (gimple_bb (stmt));
6805 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6806 do_release_defs = false;
6809 else if (tree lhs = gimple_get_lhs (stmt))
6810 if (TREE_CODE (lhs) == SSA_NAME
6811 && !has_zero_uses (lhs))
6813 if (dump_file && (dump_flags & TDF_DETAILS))
6814 fprintf (dump_file, "Keeping eliminated stmt live "
6815 "as copy because of out-of-region uses\n");
6816 tree sprime = eliminate_avail (gimple_bb (stmt), lhs);
6817 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6818 if (is_gimple_assign (stmt))
6820 gimple_assign_set_rhs_from_tree (&gsi, sprime);
6821 stmt = gsi_stmt (gsi);
6822 update_stmt (stmt);
6823 if (maybe_clean_or_replace_eh_stmt (stmt, stmt))
6824 bitmap_set_bit (need_eh_cleanup, gimple_bb (stmt)->index);
6825 continue;
6827 else
6829 gimple *copy = gimple_build_assign (lhs, sprime);
6830 gsi_insert_before (&gsi, copy, GSI_SAME_STMT);
6831 do_release_defs = false;
6836 if (dump_file && (dump_flags & TDF_DETAILS))
6838 fprintf (dump_file, "Removing dead stmt ");
6839 print_gimple_stmt (dump_file, stmt, 0, TDF_NONE);
6842 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
6843 if (gimple_code (stmt) == GIMPLE_PHI)
6844 remove_phi_node (&gsi, do_release_defs);
6845 else
6847 basic_block bb = gimple_bb (stmt);
6848 unlink_stmt_vdef (stmt);
6849 if (gsi_remove (&gsi, true))
6850 bitmap_set_bit (need_eh_cleanup, bb->index);
6851 if (is_gimple_call (stmt) && stmt_can_make_abnormal_goto (stmt))
6852 bitmap_set_bit (need_ab_cleanup, bb->index);
6853 if (do_release_defs)
6854 release_defs (stmt);
6857 /* Removing a stmt may expose a forwarder block. */
6858 el_todo |= TODO_cleanup_cfg;
6861 /* Fixup stmts that became noreturn calls. This may require splitting
6862 blocks and thus isn't possible during the dominator walk. Do this
6863 in reverse order so we don't inadvertedly remove a stmt we want to
6864 fixup by visiting a dominating now noreturn call first. */
6865 while (!to_fixup.is_empty ())
6867 gimple *stmt = to_fixup.pop ();
6869 if (dump_file && (dump_flags & TDF_DETAILS))
6871 fprintf (dump_file, "Fixing up noreturn call ");
6872 print_gimple_stmt (dump_file, stmt, 0);
6875 if (fixup_noreturn_call (stmt))
6876 el_todo |= TODO_cleanup_cfg;
6879 bool do_eh_cleanup = !bitmap_empty_p (need_eh_cleanup);
6880 bool do_ab_cleanup = !bitmap_empty_p (need_ab_cleanup);
6882 if (do_eh_cleanup)
6883 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
6885 if (do_ab_cleanup)
6886 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
6888 if (do_eh_cleanup || do_ab_cleanup)
6889 el_todo |= TODO_cleanup_cfg;
6891 return el_todo;
6894 /* Eliminate fully redundant computations. */
6896 unsigned
6897 eliminate_with_rpo_vn (bitmap inserted_exprs)
6899 eliminate_dom_walker walker (CDI_DOMINATORS, inserted_exprs);
6901 eliminate_dom_walker *saved_rpo_avail = rpo_avail;
6902 rpo_avail = &walker;
6903 walker.walk (cfun->cfg->x_entry_block_ptr);
6904 rpo_avail = saved_rpo_avail;
6906 return walker.eliminate_cleanup ();
6909 static unsigned
6910 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
6911 bool iterate, bool eliminate);
6913 void
6914 run_rpo_vn (vn_lookup_kind kind)
6916 default_vn_walk_kind = kind;
6917 do_rpo_vn (cfun, NULL, NULL, true, false);
6919 /* ??? Prune requirement of these. */
6920 constant_to_value_id = new hash_table<vn_constant_hasher> (23);
6922 /* Initialize the value ids and prune out remaining VN_TOPs
6923 from dead code. */
6924 tree name;
6925 unsigned i;
6926 FOR_EACH_SSA_NAME (i, name, cfun)
6928 vn_ssa_aux_t info = VN_INFO (name);
6929 if (!info->visited
6930 || info->valnum == VN_TOP)
6931 info->valnum = name;
6932 if (info->valnum == name)
6933 info->value_id = get_next_value_id ();
6934 else if (is_gimple_min_invariant (info->valnum))
6935 info->value_id = get_or_alloc_constant_value_id (info->valnum);
6938 /* Propagate. */
6939 FOR_EACH_SSA_NAME (i, name, cfun)
6941 vn_ssa_aux_t info = VN_INFO (name);
6942 if (TREE_CODE (info->valnum) == SSA_NAME
6943 && info->valnum != name
6944 && info->value_id != VN_INFO (info->valnum)->value_id)
6945 info->value_id = VN_INFO (info->valnum)->value_id;
6948 set_hashtable_value_ids ();
6950 if (dump_file && (dump_flags & TDF_DETAILS))
6952 fprintf (dump_file, "Value numbers:\n");
6953 FOR_EACH_SSA_NAME (i, name, cfun)
6955 if (VN_INFO (name)->visited
6956 && SSA_VAL (name) != name)
6958 print_generic_expr (dump_file, name);
6959 fprintf (dump_file, " = ");
6960 print_generic_expr (dump_file, SSA_VAL (name));
6961 fprintf (dump_file, " (%04d)\n", VN_INFO (name)->value_id);
6967 /* Free VN associated data structures. */
6969 void
6970 free_rpo_vn (void)
6972 free_vn_table (valid_info);
6973 XDELETE (valid_info);
6974 obstack_free (&vn_tables_obstack, NULL);
6975 obstack_free (&vn_tables_insert_obstack, NULL);
6977 vn_ssa_aux_iterator_type it;
6978 vn_ssa_aux_t info;
6979 FOR_EACH_HASH_TABLE_ELEMENT (*vn_ssa_aux_hash, info, vn_ssa_aux_t, it)
6980 if (info->needs_insertion)
6981 release_ssa_name (info->name);
6982 obstack_free (&vn_ssa_aux_obstack, NULL);
6983 delete vn_ssa_aux_hash;
6985 delete constant_to_value_id;
6986 constant_to_value_id = NULL;
6989 /* Hook for maybe_push_res_to_seq, lookup the expression in the VN tables. */
6991 static tree
6992 vn_lookup_simplify_result (gimple_match_op *res_op)
6994 if (!res_op->code.is_tree_code ())
6995 return NULL_TREE;
6996 tree *ops = res_op->ops;
6997 unsigned int length = res_op->num_ops;
6998 if (res_op->code == CONSTRUCTOR
6999 /* ??? We're arriving here with SCCVNs view, decomposed CONSTRUCTOR
7000 and GIMPLEs / match-and-simplifies, CONSTRUCTOR as GENERIC tree. */
7001 && TREE_CODE (res_op->ops[0]) == CONSTRUCTOR)
7003 length = CONSTRUCTOR_NELTS (res_op->ops[0]);
7004 ops = XALLOCAVEC (tree, length);
7005 for (unsigned i = 0; i < length; ++i)
7006 ops[i] = CONSTRUCTOR_ELT (res_op->ops[0], i)->value;
7008 vn_nary_op_t vnresult = NULL;
7009 tree res = vn_nary_op_lookup_pieces (length, (tree_code) res_op->code,
7010 res_op->type, ops, &vnresult);
7011 /* If this is used from expression simplification make sure to
7012 return an available expression. */
7013 if (res && TREE_CODE (res) == SSA_NAME && mprts_hook && rpo_avail)
7014 res = rpo_avail->eliminate_avail (vn_context_bb, res);
7015 return res;
7018 /* Return a leader for OPs value that is valid at BB. */
7020 tree
7021 rpo_elim::eliminate_avail (basic_block bb, tree op)
7023 bool visited;
7024 tree valnum = SSA_VAL (op, &visited);
7025 /* If we didn't visit OP then it must be defined outside of the
7026 region we process and also dominate it. So it is available. */
7027 if (!visited)
7028 return op;
7029 if (TREE_CODE (valnum) == SSA_NAME)
7031 if (SSA_NAME_IS_DEFAULT_DEF (valnum))
7032 return valnum;
7033 vn_avail *av = VN_INFO (valnum)->avail;
7034 if (!av)
7035 return NULL_TREE;
7036 if (av->location == bb->index)
7037 /* On tramp3d 90% of the cases are here. */
7038 return ssa_name (av->leader);
7041 basic_block abb = BASIC_BLOCK_FOR_FN (cfun, av->location);
7042 /* ??? During elimination we have to use availability at the
7043 definition site of a use we try to replace. This
7044 is required to not run into inconsistencies because
7045 of dominated_by_p_w_unex behavior and removing a definition
7046 while not replacing all uses.
7047 ??? We could try to consistently walk dominators
7048 ignoring non-executable regions. The nearest common
7049 dominator of bb and abb is where we can stop walking. We
7050 may also be able to "pre-compute" (bits of) the next immediate
7051 (non-)dominator during the RPO walk when marking edges as
7052 executable. */
7053 if (dominated_by_p_w_unex (bb, abb, true))
7055 tree leader = ssa_name (av->leader);
7056 /* Prevent eliminations that break loop-closed SSA. */
7057 if (loops_state_satisfies_p (LOOP_CLOSED_SSA)
7058 && ! SSA_NAME_IS_DEFAULT_DEF (leader)
7059 && ! flow_bb_inside_loop_p (gimple_bb (SSA_NAME_DEF_STMT
7060 (leader))->loop_father,
7061 bb))
7062 return NULL_TREE;
7063 if (dump_file && (dump_flags & TDF_DETAILS))
7065 print_generic_expr (dump_file, leader);
7066 fprintf (dump_file, " is available for ");
7067 print_generic_expr (dump_file, valnum);
7068 fprintf (dump_file, "\n");
7070 /* On tramp3d 99% of the _remaining_ cases succeed at
7071 the first enty. */
7072 return leader;
7074 /* ??? Can we somehow skip to the immediate dominator
7075 RPO index (bb_to_rpo)? Again, maybe not worth, on
7076 tramp3d the worst number of elements in the vector is 9. */
7077 av = av->next;
7079 while (av);
7081 else if (valnum != VN_TOP)
7082 /* valnum is is_gimple_min_invariant. */
7083 return valnum;
7084 return NULL_TREE;
7087 /* Make LEADER a leader for its value at BB. */
7089 void
7090 rpo_elim::eliminate_push_avail (basic_block bb, tree leader)
7092 tree valnum = VN_INFO (leader)->valnum;
7093 if (valnum == VN_TOP
7094 || is_gimple_min_invariant (valnum))
7095 return;
7096 if (dump_file && (dump_flags & TDF_DETAILS))
7098 fprintf (dump_file, "Making available beyond BB%d ", bb->index);
7099 print_generic_expr (dump_file, leader);
7100 fprintf (dump_file, " for value ");
7101 print_generic_expr (dump_file, valnum);
7102 fprintf (dump_file, "\n");
7104 vn_ssa_aux_t value = VN_INFO (valnum);
7105 vn_avail *av;
7106 if (m_avail_freelist)
7108 av = m_avail_freelist;
7109 m_avail_freelist = m_avail_freelist->next;
7111 else
7112 av = XOBNEW (&vn_ssa_aux_obstack, vn_avail);
7113 av->location = bb->index;
7114 av->leader = SSA_NAME_VERSION (leader);
7115 av->next = value->avail;
7116 av->next_undo = last_pushed_avail;
7117 last_pushed_avail = value;
7118 value->avail = av;
7121 /* Valueization hook for RPO VN plus required state. */
7123 tree
7124 rpo_vn_valueize (tree name)
7126 if (TREE_CODE (name) == SSA_NAME)
7128 vn_ssa_aux_t val = VN_INFO (name);
7129 if (val)
7131 tree tem = val->valnum;
7132 if (tem != VN_TOP && tem != name)
7134 if (TREE_CODE (tem) != SSA_NAME)
7135 return tem;
7136 /* For all values we only valueize to an available leader
7137 which means we can use SSA name info without restriction. */
7138 tem = rpo_avail->eliminate_avail (vn_context_bb, tem);
7139 if (tem)
7140 return tem;
7144 return name;
7147 /* Insert on PRED_E predicates derived from CODE OPS being true besides the
7148 inverted condition. */
7150 static void
7151 insert_related_predicates_on_edge (enum tree_code code, tree *ops, edge pred_e)
7153 switch (code)
7155 case LT_EXPR:
7156 /* a < b -> a {!,<}= b */
7157 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7158 ops, boolean_true_node, 0, pred_e);
7159 vn_nary_op_insert_pieces_predicated (2, LE_EXPR, boolean_type_node,
7160 ops, boolean_true_node, 0, pred_e);
7161 /* a < b -> ! a {>,=} b */
7162 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7163 ops, boolean_false_node, 0, pred_e);
7164 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7165 ops, boolean_false_node, 0, pred_e);
7166 break;
7167 case GT_EXPR:
7168 /* a > b -> a {!,>}= b */
7169 vn_nary_op_insert_pieces_predicated (2, NE_EXPR, boolean_type_node,
7170 ops, boolean_true_node, 0, pred_e);
7171 vn_nary_op_insert_pieces_predicated (2, GE_EXPR, boolean_type_node,
7172 ops, boolean_true_node, 0, pred_e);
7173 /* a > b -> ! a {<,=} b */
7174 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7175 ops, boolean_false_node, 0, pred_e);
7176 vn_nary_op_insert_pieces_predicated (2, EQ_EXPR, boolean_type_node,
7177 ops, boolean_false_node, 0, pred_e);
7178 break;
7179 case EQ_EXPR:
7180 /* a == b -> ! a {<,>} b */
7181 vn_nary_op_insert_pieces_predicated (2, LT_EXPR, boolean_type_node,
7182 ops, boolean_false_node, 0, pred_e);
7183 vn_nary_op_insert_pieces_predicated (2, GT_EXPR, boolean_type_node,
7184 ops, boolean_false_node, 0, pred_e);
7185 break;
7186 case LE_EXPR:
7187 case GE_EXPR:
7188 case NE_EXPR:
7189 /* Nothing besides inverted condition. */
7190 break;
7191 default:;
7195 /* Main stmt worker for RPO VN, process BB. */
7197 static unsigned
7198 process_bb (rpo_elim &avail, basic_block bb,
7199 bool bb_visited, bool iterate_phis, bool iterate, bool eliminate,
7200 bool do_region, bitmap exit_bbs, bool skip_phis)
7202 unsigned todo = 0;
7203 edge_iterator ei;
7204 edge e;
7206 vn_context_bb = bb;
7208 /* If we are in loop-closed SSA preserve this state. This is
7209 relevant when called on regions from outside of FRE/PRE. */
7210 bool lc_phi_nodes = false;
7211 if (!skip_phis
7212 && loops_state_satisfies_p (LOOP_CLOSED_SSA))
7213 FOR_EACH_EDGE (e, ei, bb->preds)
7214 if (e->src->loop_father != e->dest->loop_father
7215 && flow_loop_nested_p (e->dest->loop_father,
7216 e->src->loop_father))
7218 lc_phi_nodes = true;
7219 break;
7222 /* When we visit a loop header substitute into loop info. */
7223 if (!iterate && eliminate && bb->loop_father->header == bb)
7225 /* Keep fields in sync with substitute_in_loop_info. */
7226 if (bb->loop_father->nb_iterations)
7227 bb->loop_father->nb_iterations
7228 = simplify_replace_tree (bb->loop_father->nb_iterations,
7229 NULL_TREE, NULL_TREE, &vn_valueize_for_srt);
7232 /* Value-number all defs in the basic-block. */
7233 if (!skip_phis)
7234 for (gphi_iterator gsi = gsi_start_phis (bb); !gsi_end_p (gsi);
7235 gsi_next (&gsi))
7237 gphi *phi = gsi.phi ();
7238 tree res = PHI_RESULT (phi);
7239 vn_ssa_aux_t res_info = VN_INFO (res);
7240 if (!bb_visited)
7242 gcc_assert (!res_info->visited);
7243 res_info->valnum = VN_TOP;
7244 res_info->visited = true;
7247 /* When not iterating force backedge values to varying. */
7248 visit_stmt (phi, !iterate_phis);
7249 if (virtual_operand_p (res))
7250 continue;
7252 /* Eliminate */
7253 /* The interesting case is gcc.dg/tree-ssa/pr22230.c for correctness
7254 how we handle backedges and availability.
7255 And gcc.dg/tree-ssa/ssa-sccvn-2.c for optimization. */
7256 tree val = res_info->valnum;
7257 if (res != val && !iterate && eliminate)
7259 if (tree leader = avail.eliminate_avail (bb, res))
7261 if (leader != res
7262 /* Preserve loop-closed SSA form. */
7263 && (! lc_phi_nodes
7264 || is_gimple_min_invariant (leader)))
7266 if (dump_file && (dump_flags & TDF_DETAILS))
7268 fprintf (dump_file, "Replaced redundant PHI node "
7269 "defining ");
7270 print_generic_expr (dump_file, res);
7271 fprintf (dump_file, " with ");
7272 print_generic_expr (dump_file, leader);
7273 fprintf (dump_file, "\n");
7275 avail.eliminations++;
7277 if (may_propagate_copy (res, leader))
7279 /* Schedule for removal. */
7280 avail.to_remove.safe_push (phi);
7281 continue;
7283 /* ??? Else generate a copy stmt. */
7287 /* Only make defs available that not already are. But make
7288 sure loop-closed SSA PHI node defs are picked up for
7289 downstream uses. */
7290 if (lc_phi_nodes
7291 || res == val
7292 || ! avail.eliminate_avail (bb, res))
7293 avail.eliminate_push_avail (bb, res);
7296 /* For empty BBs mark outgoing edges executable. For non-empty BBs
7297 we do this when processing the last stmt as we have to do this
7298 before elimination which otherwise forces GIMPLE_CONDs to
7299 if (1 != 0) style when seeing non-executable edges. */
7300 if (gsi_end_p (gsi_start_bb (bb)))
7302 FOR_EACH_EDGE (e, ei, bb->succs)
7304 if (!(e->flags & EDGE_EXECUTABLE))
7306 if (dump_file && (dump_flags & TDF_DETAILS))
7307 fprintf (dump_file,
7308 "marking outgoing edge %d -> %d executable\n",
7309 e->src->index, e->dest->index);
7310 e->flags |= EDGE_EXECUTABLE;
7311 e->dest->flags |= BB_EXECUTABLE;
7313 else if (!(e->dest->flags & BB_EXECUTABLE))
7315 if (dump_file && (dump_flags & TDF_DETAILS))
7316 fprintf (dump_file,
7317 "marking destination block %d reachable\n",
7318 e->dest->index);
7319 e->dest->flags |= BB_EXECUTABLE;
7323 for (gimple_stmt_iterator gsi = gsi_start_bb (bb);
7324 !gsi_end_p (gsi); gsi_next (&gsi))
7326 ssa_op_iter i;
7327 tree op;
7328 if (!bb_visited)
7330 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_ALL_DEFS)
7332 vn_ssa_aux_t op_info = VN_INFO (op);
7333 gcc_assert (!op_info->visited);
7334 op_info->valnum = VN_TOP;
7335 op_info->visited = true;
7338 /* We somehow have to deal with uses that are not defined
7339 in the processed region. Forcing unvisited uses to
7340 varying here doesn't play well with def-use following during
7341 expression simplification, so we deal with this by checking
7342 the visited flag in SSA_VAL. */
7345 visit_stmt (gsi_stmt (gsi));
7347 gimple *last = gsi_stmt (gsi);
7348 e = NULL;
7349 switch (gimple_code (last))
7351 case GIMPLE_SWITCH:
7352 e = find_taken_edge (bb, vn_valueize (gimple_switch_index
7353 (as_a <gswitch *> (last))));
7354 break;
7355 case GIMPLE_COND:
7357 tree lhs = vn_valueize (gimple_cond_lhs (last));
7358 tree rhs = vn_valueize (gimple_cond_rhs (last));
7359 tree val = gimple_simplify (gimple_cond_code (last),
7360 boolean_type_node, lhs, rhs,
7361 NULL, vn_valueize);
7362 /* If the condition didn't simplfy see if we have recorded
7363 an expression from sofar taken edges. */
7364 if (! val || TREE_CODE (val) != INTEGER_CST)
7366 vn_nary_op_t vnresult;
7367 tree ops[2];
7368 ops[0] = lhs;
7369 ops[1] = rhs;
7370 val = vn_nary_op_lookup_pieces (2, gimple_cond_code (last),
7371 boolean_type_node, ops,
7372 &vnresult);
7373 /* Did we get a predicated value? */
7374 if (! val && vnresult && vnresult->predicated_values)
7376 val = vn_nary_op_get_predicated_value (vnresult, bb);
7377 if (val && dump_file && (dump_flags & TDF_DETAILS))
7379 fprintf (dump_file, "Got predicated value ");
7380 print_generic_expr (dump_file, val, TDF_NONE);
7381 fprintf (dump_file, " for ");
7382 print_gimple_stmt (dump_file, last, TDF_SLIM);
7386 if (val)
7387 e = find_taken_edge (bb, val);
7388 if (! e)
7390 /* If we didn't manage to compute the taken edge then
7391 push predicated expressions for the condition itself
7392 and related conditions to the hashtables. This allows
7393 simplification of redundant conditions which is
7394 important as early cleanup. */
7395 edge true_e, false_e;
7396 extract_true_false_edges_from_block (bb, &true_e, &false_e);
7397 enum tree_code code = gimple_cond_code (last);
7398 enum tree_code icode
7399 = invert_tree_comparison (code, HONOR_NANS (lhs));
7400 tree ops[2];
7401 ops[0] = lhs;
7402 ops[1] = rhs;
7403 if (do_region
7404 && bitmap_bit_p (exit_bbs, true_e->dest->index))
7405 true_e = NULL;
7406 if (do_region
7407 && bitmap_bit_p (exit_bbs, false_e->dest->index))
7408 false_e = NULL;
7409 if (true_e)
7410 vn_nary_op_insert_pieces_predicated
7411 (2, code, boolean_type_node, ops,
7412 boolean_true_node, 0, true_e);
7413 if (false_e)
7414 vn_nary_op_insert_pieces_predicated
7415 (2, code, boolean_type_node, ops,
7416 boolean_false_node, 0, false_e);
7417 if (icode != ERROR_MARK)
7419 if (true_e)
7420 vn_nary_op_insert_pieces_predicated
7421 (2, icode, boolean_type_node, ops,
7422 boolean_false_node, 0, true_e);
7423 if (false_e)
7424 vn_nary_op_insert_pieces_predicated
7425 (2, icode, boolean_type_node, ops,
7426 boolean_true_node, 0, false_e);
7428 /* Relax for non-integers, inverted condition handled
7429 above. */
7430 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
7432 if (true_e)
7433 insert_related_predicates_on_edge (code, ops, true_e);
7434 if (false_e)
7435 insert_related_predicates_on_edge (icode, ops, false_e);
7438 break;
7440 case GIMPLE_GOTO:
7441 e = find_taken_edge (bb, vn_valueize (gimple_goto_dest (last)));
7442 break;
7443 default:
7444 e = NULL;
7446 if (e)
7448 todo = TODO_cleanup_cfg;
7449 if (!(e->flags & EDGE_EXECUTABLE))
7451 if (dump_file && (dump_flags & TDF_DETAILS))
7452 fprintf (dump_file,
7453 "marking known outgoing %sedge %d -> %d executable\n",
7454 e->flags & EDGE_DFS_BACK ? "back-" : "",
7455 e->src->index, e->dest->index);
7456 e->flags |= EDGE_EXECUTABLE;
7457 e->dest->flags |= BB_EXECUTABLE;
7459 else if (!(e->dest->flags & BB_EXECUTABLE))
7461 if (dump_file && (dump_flags & TDF_DETAILS))
7462 fprintf (dump_file,
7463 "marking destination block %d reachable\n",
7464 e->dest->index);
7465 e->dest->flags |= BB_EXECUTABLE;
7468 else if (gsi_one_before_end_p (gsi))
7470 FOR_EACH_EDGE (e, ei, bb->succs)
7472 if (!(e->flags & EDGE_EXECUTABLE))
7474 if (dump_file && (dump_flags & TDF_DETAILS))
7475 fprintf (dump_file,
7476 "marking outgoing edge %d -> %d executable\n",
7477 e->src->index, e->dest->index);
7478 e->flags |= EDGE_EXECUTABLE;
7479 e->dest->flags |= BB_EXECUTABLE;
7481 else if (!(e->dest->flags & BB_EXECUTABLE))
7483 if (dump_file && (dump_flags & TDF_DETAILS))
7484 fprintf (dump_file,
7485 "marking destination block %d reachable\n",
7486 e->dest->index);
7487 e->dest->flags |= BB_EXECUTABLE;
7492 /* Eliminate. That also pushes to avail. */
7493 if (eliminate && ! iterate)
7494 avail.eliminate_stmt (bb, &gsi);
7495 else
7496 /* If not eliminating, make all not already available defs
7497 available. */
7498 FOR_EACH_SSA_TREE_OPERAND (op, gsi_stmt (gsi), i, SSA_OP_DEF)
7499 if (! avail.eliminate_avail (bb, op))
7500 avail.eliminate_push_avail (bb, op);
7503 /* Eliminate in destination PHI arguments. Always substitute in dest
7504 PHIs, even for non-executable edges. This handles region
7505 exits PHIs. */
7506 if (!iterate && eliminate)
7507 FOR_EACH_EDGE (e, ei, bb->succs)
7508 for (gphi_iterator gsi = gsi_start_phis (e->dest);
7509 !gsi_end_p (gsi); gsi_next (&gsi))
7511 gphi *phi = gsi.phi ();
7512 use_operand_p use_p = PHI_ARG_DEF_PTR_FROM_EDGE (phi, e);
7513 tree arg = USE_FROM_PTR (use_p);
7514 if (TREE_CODE (arg) != SSA_NAME
7515 || virtual_operand_p (arg))
7516 continue;
7517 tree sprime;
7518 if (SSA_NAME_IS_DEFAULT_DEF (arg))
7520 sprime = SSA_VAL (arg);
7521 gcc_assert (TREE_CODE (sprime) != SSA_NAME
7522 || SSA_NAME_IS_DEFAULT_DEF (sprime));
7524 else
7525 /* Look for sth available at the definition block of the argument.
7526 This avoids inconsistencies between availability there which
7527 decides if the stmt can be removed and availability at the
7528 use site. The SSA property ensures that things available
7529 at the definition are also available at uses. */
7530 sprime = avail.eliminate_avail (gimple_bb (SSA_NAME_DEF_STMT (arg)),
7531 arg);
7532 if (sprime
7533 && sprime != arg
7534 && may_propagate_copy (arg, sprime))
7535 propagate_value (use_p, sprime);
7538 vn_context_bb = NULL;
7539 return todo;
7542 /* Unwind state per basic-block. */
7544 struct unwind_state
7546 /* Times this block has been visited. */
7547 unsigned visited;
7548 /* Whether to handle this as iteration point or whether to treat
7549 incoming backedge PHI values as varying. */
7550 bool iterate;
7551 /* Maximum RPO index this block is reachable from. */
7552 int max_rpo;
7553 /* Unwind state. */
7554 void *ob_top;
7555 vn_reference_t ref_top;
7556 vn_phi_t phi_top;
7557 vn_nary_op_t nary_top;
7558 vn_avail *avail_top;
7561 /* Unwind the RPO VN state for iteration. */
7563 static void
7564 do_unwind (unwind_state *to, rpo_elim &avail)
7566 gcc_assert (to->iterate);
7567 for (; last_inserted_nary != to->nary_top;
7568 last_inserted_nary = last_inserted_nary->next)
7570 vn_nary_op_t *slot;
7571 slot = valid_info->nary->find_slot_with_hash
7572 (last_inserted_nary, last_inserted_nary->hashcode, NO_INSERT);
7573 /* Predication causes the need to restore previous state. */
7574 if ((*slot)->unwind_to)
7575 *slot = (*slot)->unwind_to;
7576 else
7577 valid_info->nary->clear_slot (slot);
7579 for (; last_inserted_phi != to->phi_top;
7580 last_inserted_phi = last_inserted_phi->next)
7582 vn_phi_t *slot;
7583 slot = valid_info->phis->find_slot_with_hash
7584 (last_inserted_phi, last_inserted_phi->hashcode, NO_INSERT);
7585 valid_info->phis->clear_slot (slot);
7587 for (; last_inserted_ref != to->ref_top;
7588 last_inserted_ref = last_inserted_ref->next)
7590 vn_reference_t *slot;
7591 slot = valid_info->references->find_slot_with_hash
7592 (last_inserted_ref, last_inserted_ref->hashcode, NO_INSERT);
7593 (*slot)->operands.release ();
7594 valid_info->references->clear_slot (slot);
7596 obstack_free (&vn_tables_obstack, to->ob_top);
7598 /* Prune [rpo_idx, ] from avail. */
7599 for (; last_pushed_avail && last_pushed_avail->avail != to->avail_top;)
7601 vn_ssa_aux_t val = last_pushed_avail;
7602 vn_avail *av = val->avail;
7603 val->avail = av->next;
7604 last_pushed_avail = av->next_undo;
7605 av->next = avail.m_avail_freelist;
7606 avail.m_avail_freelist = av;
7610 /* Do VN on a SEME region specified by ENTRY and EXIT_BBS in FN.
7611 If ITERATE is true then treat backedges optimistically as not
7612 executed and iterate. If ELIMINATE is true then perform
7613 elimination, otherwise leave that to the caller. */
7615 static unsigned
7616 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs,
7617 bool iterate, bool eliminate)
7619 unsigned todo = 0;
7621 /* We currently do not support region-based iteration when
7622 elimination is requested. */
7623 gcc_assert (!entry || !iterate || !eliminate);
7624 /* When iterating we need loop info up-to-date. */
7625 gcc_assert (!iterate || !loops_state_satisfies_p (LOOPS_NEED_FIXUP));
7627 bool do_region = entry != NULL;
7628 if (!do_region)
7630 entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fn));
7631 exit_bbs = BITMAP_ALLOC (NULL);
7632 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
7635 /* Clear EDGE_DFS_BACK on "all" entry edges, RPO order compute will
7636 re-mark those that are contained in the region. */
7637 edge_iterator ei;
7638 edge e;
7639 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7640 e->flags &= ~EDGE_DFS_BACK;
7642 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS);
7643 auto_vec<std::pair<int, int> > toplevel_scc_extents;
7644 int n = rev_post_order_and_mark_dfs_back_seme
7645 (fn, entry, exit_bbs, true, rpo, !iterate ? &toplevel_scc_extents : NULL);
7647 if (!do_region)
7648 BITMAP_FREE (exit_bbs);
7650 /* If there are any non-DFS_BACK edges into entry->dest skip
7651 processing PHI nodes for that block. This supports
7652 value-numbering loop bodies w/o the actual loop. */
7653 FOR_EACH_EDGE (e, ei, entry->dest->preds)
7654 if (e != entry
7655 && !(e->flags & EDGE_DFS_BACK))
7656 break;
7657 bool skip_entry_phis = e != NULL;
7658 if (skip_entry_phis && dump_file && (dump_flags & TDF_DETAILS))
7659 fprintf (dump_file, "Region does not contain all edges into "
7660 "the entry block, skipping its PHIs.\n");
7662 int *bb_to_rpo = XNEWVEC (int, last_basic_block_for_fn (fn));
7663 for (int i = 0; i < n; ++i)
7664 bb_to_rpo[rpo[i]] = i;
7666 unwind_state *rpo_state = XNEWVEC (unwind_state, n);
7668 rpo_elim avail (entry->dest);
7669 rpo_avail = &avail;
7671 /* Verify we have no extra entries into the region. */
7672 if (flag_checking && do_region)
7674 auto_bb_flag bb_in_region (fn);
7675 for (int i = 0; i < n; ++i)
7677 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7678 bb->flags |= bb_in_region;
7680 /* We can't merge the first two loops because we cannot rely
7681 on EDGE_DFS_BACK for edges not within the region. But if
7682 we decide to always have the bb_in_region flag we can
7683 do the checking during the RPO walk itself (but then it's
7684 also easy to handle MEME conservatively). */
7685 for (int i = 0; i < n; ++i)
7687 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7688 edge e;
7689 edge_iterator ei;
7690 FOR_EACH_EDGE (e, ei, bb->preds)
7691 gcc_assert (e == entry
7692 || (skip_entry_phis && bb == entry->dest)
7693 || (e->src->flags & bb_in_region));
7695 for (int i = 0; i < n; ++i)
7697 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7698 bb->flags &= ~bb_in_region;
7702 /* Create the VN state. For the initial size of the various hashtables
7703 use a heuristic based on region size and number of SSA names. */
7704 unsigned region_size = (((unsigned HOST_WIDE_INT)n * num_ssa_names)
7705 / (n_basic_blocks_for_fn (fn) - NUM_FIXED_BLOCKS));
7706 VN_TOP = create_tmp_var_raw (void_type_node, "vn_top");
7707 next_value_id = 1;
7708 next_constant_value_id = -1;
7710 vn_ssa_aux_hash = new hash_table <vn_ssa_aux_hasher> (region_size * 2);
7711 gcc_obstack_init (&vn_ssa_aux_obstack);
7713 gcc_obstack_init (&vn_tables_obstack);
7714 gcc_obstack_init (&vn_tables_insert_obstack);
7715 valid_info = XCNEW (struct vn_tables_s);
7716 allocate_vn_table (valid_info, region_size);
7717 last_inserted_ref = NULL;
7718 last_inserted_phi = NULL;
7719 last_inserted_nary = NULL;
7720 last_pushed_avail = NULL;
7722 vn_valueize = rpo_vn_valueize;
7724 /* Initialize the unwind state and edge/BB executable state. */
7725 unsigned curr_scc = 0;
7726 for (int i = 0; i < n; ++i)
7728 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7729 rpo_state[i].visited = 0;
7730 rpo_state[i].max_rpo = i;
7731 if (!iterate && curr_scc < toplevel_scc_extents.length ())
7733 if (i >= toplevel_scc_extents[curr_scc].first
7734 && i <= toplevel_scc_extents[curr_scc].second)
7735 rpo_state[i].max_rpo = toplevel_scc_extents[curr_scc].second;
7736 if (i == toplevel_scc_extents[curr_scc].second)
7737 curr_scc++;
7739 bb->flags &= ~BB_EXECUTABLE;
7740 bool has_backedges = false;
7741 edge e;
7742 edge_iterator ei;
7743 FOR_EACH_EDGE (e, ei, bb->preds)
7745 if (e->flags & EDGE_DFS_BACK)
7746 has_backedges = true;
7747 e->flags &= ~EDGE_EXECUTABLE;
7748 if (iterate || e == entry || (skip_entry_phis && bb == entry->dest))
7749 continue;
7751 rpo_state[i].iterate = iterate && has_backedges;
7753 entry->flags |= EDGE_EXECUTABLE;
7754 entry->dest->flags |= BB_EXECUTABLE;
7756 /* As heuristic to improve compile-time we handle only the N innermost
7757 loops and the outermost one optimistically. */
7758 if (iterate)
7760 unsigned max_depth = param_rpo_vn_max_loop_depth;
7761 for (auto loop : loops_list (cfun, LI_ONLY_INNERMOST))
7762 if (loop_depth (loop) > max_depth)
7763 for (unsigned i = 2;
7764 i < loop_depth (loop) - max_depth; ++i)
7766 basic_block header = superloop_at_depth (loop, i)->header;
7767 bool non_latch_backedge = false;
7768 edge e;
7769 edge_iterator ei;
7770 FOR_EACH_EDGE (e, ei, header->preds)
7771 if (e->flags & EDGE_DFS_BACK)
7773 /* There can be a non-latch backedge into the header
7774 which is part of an outer irreducible region. We
7775 cannot avoid iterating this block then. */
7776 if (!dominated_by_p (CDI_DOMINATORS,
7777 e->src, e->dest))
7779 if (dump_file && (dump_flags & TDF_DETAILS))
7780 fprintf (dump_file, "non-latch backedge %d -> %d "
7781 "forces iteration of loop %d\n",
7782 e->src->index, e->dest->index, loop->num);
7783 non_latch_backedge = true;
7785 else
7786 e->flags |= EDGE_EXECUTABLE;
7788 rpo_state[bb_to_rpo[header->index]].iterate = non_latch_backedge;
7792 uint64_t nblk = 0;
7793 int idx = 0;
7794 if (iterate)
7795 /* Go and process all blocks, iterating as necessary. */
7798 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7800 /* If the block has incoming backedges remember unwind state. This
7801 is required even for non-executable blocks since in irreducible
7802 regions we might reach them via the backedge and re-start iterating
7803 from there.
7804 Note we can individually mark blocks with incoming backedges to
7805 not iterate where we then handle PHIs conservatively. We do that
7806 heuristically to reduce compile-time for degenerate cases. */
7807 if (rpo_state[idx].iterate)
7809 rpo_state[idx].ob_top = obstack_alloc (&vn_tables_obstack, 0);
7810 rpo_state[idx].ref_top = last_inserted_ref;
7811 rpo_state[idx].phi_top = last_inserted_phi;
7812 rpo_state[idx].nary_top = last_inserted_nary;
7813 rpo_state[idx].avail_top
7814 = last_pushed_avail ? last_pushed_avail->avail : NULL;
7817 if (!(bb->flags & BB_EXECUTABLE))
7819 if (dump_file && (dump_flags & TDF_DETAILS))
7820 fprintf (dump_file, "Block %d: BB%d found not executable\n",
7821 idx, bb->index);
7822 idx++;
7823 continue;
7826 if (dump_file && (dump_flags & TDF_DETAILS))
7827 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7828 nblk++;
7829 todo |= process_bb (avail, bb,
7830 rpo_state[idx].visited != 0,
7831 rpo_state[idx].iterate,
7832 iterate, eliminate, do_region, exit_bbs, false);
7833 rpo_state[idx].visited++;
7835 /* Verify if changed values flow over executable outgoing backedges
7836 and those change destination PHI values (that's the thing we
7837 can easily verify). Reduce over all such edges to the farthest
7838 away PHI. */
7839 int iterate_to = -1;
7840 edge_iterator ei;
7841 edge e;
7842 FOR_EACH_EDGE (e, ei, bb->succs)
7843 if ((e->flags & (EDGE_DFS_BACK|EDGE_EXECUTABLE))
7844 == (EDGE_DFS_BACK|EDGE_EXECUTABLE)
7845 && rpo_state[bb_to_rpo[e->dest->index]].iterate)
7847 int destidx = bb_to_rpo[e->dest->index];
7848 if (!rpo_state[destidx].visited)
7850 if (dump_file && (dump_flags & TDF_DETAILS))
7851 fprintf (dump_file, "Unvisited destination %d\n",
7852 e->dest->index);
7853 if (iterate_to == -1 || destidx < iterate_to)
7854 iterate_to = destidx;
7855 continue;
7857 if (dump_file && (dump_flags & TDF_DETAILS))
7858 fprintf (dump_file, "Looking for changed values of backedge"
7859 " %d->%d destination PHIs\n",
7860 e->src->index, e->dest->index);
7861 vn_context_bb = e->dest;
7862 gphi_iterator gsi;
7863 for (gsi = gsi_start_phis (e->dest);
7864 !gsi_end_p (gsi); gsi_next (&gsi))
7866 bool inserted = false;
7867 /* While we'd ideally just iterate on value changes
7868 we CSE PHIs and do that even across basic-block
7869 boundaries. So even hashtable state changes can
7870 be important (which is roughly equivalent to
7871 PHI argument value changes). To not excessively
7872 iterate because of that we track whether a PHI
7873 was CSEd to with GF_PLF_1. */
7874 bool phival_changed;
7875 if ((phival_changed = visit_phi (gsi.phi (),
7876 &inserted, false))
7877 || (inserted && gimple_plf (gsi.phi (), GF_PLF_1)))
7879 if (!phival_changed
7880 && dump_file && (dump_flags & TDF_DETAILS))
7881 fprintf (dump_file, "PHI was CSEd and hashtable "
7882 "state (changed)\n");
7883 if (iterate_to == -1 || destidx < iterate_to)
7884 iterate_to = destidx;
7885 break;
7888 vn_context_bb = NULL;
7890 if (iterate_to != -1)
7892 do_unwind (&rpo_state[iterate_to], avail);
7893 idx = iterate_to;
7894 if (dump_file && (dump_flags & TDF_DETAILS))
7895 fprintf (dump_file, "Iterating to %d BB%d\n",
7896 iterate_to, rpo[iterate_to]);
7897 continue;
7900 idx++;
7902 while (idx < n);
7904 else /* !iterate */
7906 /* Process all blocks greedily with a worklist that enforces RPO
7907 processing of reachable blocks. */
7908 auto_bitmap worklist;
7909 bitmap_set_bit (worklist, 0);
7910 while (!bitmap_empty_p (worklist))
7912 int idx = bitmap_first_set_bit (worklist);
7913 bitmap_clear_bit (worklist, idx);
7914 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[idx]);
7915 gcc_assert ((bb->flags & BB_EXECUTABLE)
7916 && !rpo_state[idx].visited);
7918 if (dump_file && (dump_flags & TDF_DETAILS))
7919 fprintf (dump_file, "Processing block %d: BB%d\n", idx, bb->index);
7921 /* When we run into predecessor edges where we cannot trust its
7922 executable state mark them executable so PHI processing will
7923 be conservative.
7924 ??? Do we need to force arguments flowing over that edge
7925 to be varying or will they even always be? */
7926 edge_iterator ei;
7927 edge e;
7928 FOR_EACH_EDGE (e, ei, bb->preds)
7929 if (!(e->flags & EDGE_EXECUTABLE)
7930 && (bb == entry->dest
7931 || (!rpo_state[bb_to_rpo[e->src->index]].visited
7932 && (rpo_state[bb_to_rpo[e->src->index]].max_rpo
7933 >= (int)idx))))
7935 if (dump_file && (dump_flags & TDF_DETAILS))
7936 fprintf (dump_file, "Cannot trust state of predecessor "
7937 "edge %d -> %d, marking executable\n",
7938 e->src->index, e->dest->index);
7939 e->flags |= EDGE_EXECUTABLE;
7942 nblk++;
7943 todo |= process_bb (avail, bb, false, false, false, eliminate,
7944 do_region, exit_bbs,
7945 skip_entry_phis && bb == entry->dest);
7946 rpo_state[idx].visited++;
7948 FOR_EACH_EDGE (e, ei, bb->succs)
7949 if ((e->flags & EDGE_EXECUTABLE)
7950 && e->dest->index != EXIT_BLOCK
7951 && (!do_region || !bitmap_bit_p (exit_bbs, e->dest->index))
7952 && !rpo_state[bb_to_rpo[e->dest->index]].visited)
7953 bitmap_set_bit (worklist, bb_to_rpo[e->dest->index]);
7957 /* If statistics or dump file active. */
7958 int nex = 0;
7959 unsigned max_visited = 1;
7960 for (int i = 0; i < n; ++i)
7962 basic_block bb = BASIC_BLOCK_FOR_FN (fn, rpo[i]);
7963 if (bb->flags & BB_EXECUTABLE)
7964 nex++;
7965 statistics_histogram_event (cfun, "RPO block visited times",
7966 rpo_state[i].visited);
7967 if (rpo_state[i].visited > max_visited)
7968 max_visited = rpo_state[i].visited;
7970 unsigned nvalues = 0, navail = 0;
7971 for (hash_table<vn_ssa_aux_hasher>::iterator i = vn_ssa_aux_hash->begin ();
7972 i != vn_ssa_aux_hash->end (); ++i)
7974 nvalues++;
7975 vn_avail *av = (*i)->avail;
7976 while (av)
7978 navail++;
7979 av = av->next;
7982 statistics_counter_event (cfun, "RPO blocks", n);
7983 statistics_counter_event (cfun, "RPO blocks visited", nblk);
7984 statistics_counter_event (cfun, "RPO blocks executable", nex);
7985 statistics_histogram_event (cfun, "RPO iterations", 10*nblk / nex);
7986 statistics_histogram_event (cfun, "RPO num values", nvalues);
7987 statistics_histogram_event (cfun, "RPO num avail", navail);
7988 statistics_histogram_event (cfun, "RPO num lattice",
7989 vn_ssa_aux_hash->elements ());
7990 if (dump_file && (dump_flags & (TDF_DETAILS|TDF_STATS)))
7992 fprintf (dump_file, "RPO iteration over %d blocks visited %" PRIu64
7993 " blocks in total discovering %d executable blocks iterating "
7994 "%d.%d times, a block was visited max. %u times\n",
7995 n, nblk, nex,
7996 (int)((10*nblk / nex)/10), (int)((10*nblk / nex)%10),
7997 max_visited);
7998 fprintf (dump_file, "RPO tracked %d values available at %d locations "
7999 "and %" PRIu64 " lattice elements\n",
8000 nvalues, navail, (uint64_t) vn_ssa_aux_hash->elements ());
8003 if (eliminate)
8005 /* When !iterate we already performed elimination during the RPO
8006 walk. */
8007 if (iterate)
8009 /* Elimination for region-based VN needs to be done within the
8010 RPO walk. */
8011 gcc_assert (! do_region);
8012 /* Note we can't use avail.walk here because that gets confused
8013 by the existing availability and it will be less efficient
8014 as well. */
8015 todo |= eliminate_with_rpo_vn (NULL);
8017 else
8018 todo |= avail.eliminate_cleanup (do_region);
8021 vn_valueize = NULL;
8022 rpo_avail = NULL;
8024 XDELETEVEC (bb_to_rpo);
8025 XDELETEVEC (rpo);
8026 XDELETEVEC (rpo_state);
8028 return todo;
8031 /* Region-based entry for RPO VN. Performs value-numbering and elimination
8032 on the SEME region specified by ENTRY and EXIT_BBS. If ENTRY is not
8033 the only edge into the region at ENTRY->dest PHI nodes in ENTRY->dest
8034 are not considered. */
8036 unsigned
8037 do_rpo_vn (function *fn, edge entry, bitmap exit_bbs)
8039 default_vn_walk_kind = VN_WALKREWRITE;
8040 unsigned todo = do_rpo_vn (fn, entry, exit_bbs, false, true);
8041 free_rpo_vn ();
8042 return todo;
8046 namespace {
8048 const pass_data pass_data_fre =
8050 GIMPLE_PASS, /* type */
8051 "fre", /* name */
8052 OPTGROUP_NONE, /* optinfo_flags */
8053 TV_TREE_FRE, /* tv_id */
8054 ( PROP_cfg | PROP_ssa ), /* properties_required */
8055 0, /* properties_provided */
8056 0, /* properties_destroyed */
8057 0, /* todo_flags_start */
8058 0, /* todo_flags_finish */
8061 class pass_fre : public gimple_opt_pass
8063 public:
8064 pass_fre (gcc::context *ctxt)
8065 : gimple_opt_pass (pass_data_fre, ctxt), may_iterate (true)
8068 /* opt_pass methods: */
8069 opt_pass * clone () { return new pass_fre (m_ctxt); }
8070 void set_pass_param (unsigned int n, bool param)
8072 gcc_assert (n == 0);
8073 may_iterate = param;
8075 virtual bool gate (function *)
8077 return flag_tree_fre != 0 && (may_iterate || optimize > 1);
8079 virtual unsigned int execute (function *);
8081 private:
8082 bool may_iterate;
8083 }; // class pass_fre
8085 unsigned int
8086 pass_fre::execute (function *fun)
8088 unsigned todo = 0;
8090 /* At -O[1g] use the cheap non-iterating mode. */
8091 bool iterate_p = may_iterate && (optimize > 1);
8092 calculate_dominance_info (CDI_DOMINATORS);
8093 if (iterate_p)
8094 loop_optimizer_init (AVOID_CFG_MODIFICATIONS);
8096 default_vn_walk_kind = VN_WALKREWRITE;
8097 todo = do_rpo_vn (fun, NULL, NULL, iterate_p, true);
8098 free_rpo_vn ();
8100 if (iterate_p)
8101 loop_optimizer_finalize ();
8103 if (scev_initialized_p ())
8104 scev_reset_htab ();
8106 /* For late FRE after IVOPTs and unrolling, see if we can
8107 remove some TREE_ADDRESSABLE and rewrite stuff into SSA. */
8108 if (!may_iterate)
8109 todo |= TODO_update_address_taken;
8111 return todo;
8114 } // anon namespace
8116 gimple_opt_pass *
8117 make_pass_fre (gcc::context *ctxt)
8119 return new pass_fre (ctxt);
8122 #undef BB_EXECUTABLE