1 /* Basic block path solver.
2 Copyright (C) 2021-2022 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "value-range.h"
29 #include "gimple-range.h"
30 #include "tree-pretty-print.h"
31 #include "gimple-range-path.h"
34 #include "gimple-iterator.h"
36 // Internal construct to help facilitate debugging of solver.
37 #define DEBUG_SOLVER (dump_file && (param_threader_debug == THREADER_DEBUG_ALL))
39 path_range_query::path_range_query (bool resolve
, gimple_ranger
*ranger
)
40 : m_cache (new ssa_global_cache
),
41 m_has_cache_entry (BITMAP_ALLOC (NULL
)),
43 m_alloced_ranger (!ranger
)
46 m_ranger
= new gimple_ranger
;
50 m_oracle
= new path_oracle (m_ranger
->oracle ());
52 if (m_resolve
&& flag_checking
)
53 verify_marked_backedges (cfun
);
56 path_range_query::~path_range_query ()
61 BITMAP_FREE (m_has_cache_entry
);
65 // Return TRUE if NAME is in the import bitmap.
68 path_range_query::import_p (tree name
)
70 return (TREE_CODE (name
) == SSA_NAME
71 && bitmap_bit_p (m_imports
, SSA_NAME_VERSION (name
)));
74 // Mark cache entry for NAME as unused.
77 path_range_query::clear_cache (tree name
)
79 unsigned v
= SSA_NAME_VERSION (name
);
80 bitmap_clear_bit (m_has_cache_entry
, v
);
83 // If NAME has a cache entry, return it in R, and return TRUE.
86 path_range_query::get_cache (vrange
&r
, tree name
)
88 if (!gimple_range_ssa_p (name
))
89 return get_global_range_query ()->range_of_expr (r
, name
);
91 unsigned v
= SSA_NAME_VERSION (name
);
92 if (bitmap_bit_p (m_has_cache_entry
, v
))
93 return m_cache
->get_global_range (r
, name
);
98 // Set the cache entry for NAME to R.
101 path_range_query::set_cache (const vrange
&r
, tree name
)
103 unsigned v
= SSA_NAME_VERSION (name
);
104 bitmap_set_bit (m_has_cache_entry
, v
);
105 m_cache
->set_global_range (name
, r
);
109 path_range_query::dump (FILE *dump_file
)
111 push_dump_file
save (dump_file
, dump_flags
& ~TDF_DETAILS
);
113 if (m_path
.is_empty ())
119 dump_ranger (dump_file
, m_path
);
121 fprintf (dump_file
, "Imports:\n");
122 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
124 tree name
= ssa_name (i
);
125 print_generic_expr (dump_file
, name
, TDF_SLIM
);
126 fprintf (dump_file
, "\n");
129 m_cache
->dump (dump_file
);
133 path_range_query::debug ()
138 // Return TRUE if NAME is defined outside the current path.
141 path_range_query::defined_outside_path (tree name
)
143 gimple
*def
= SSA_NAME_DEF_STMT (name
);
144 basic_block bb
= gimple_bb (def
);
146 return !bb
|| !m_path
.contains (bb
);
149 // Return the range of NAME on entry to the path.
152 path_range_query::range_on_path_entry (vrange
&r
, tree name
)
154 gcc_checking_assert (defined_outside_path (name
));
155 basic_block entry
= entry_bb ();
157 // Prefer to use range_of_expr if we have a statement to look at,
158 // since it has better caching than range_on_edge.
159 gimple
*last
= last_stmt (entry
);
162 if (m_ranger
->range_of_expr (r
, name
, last
))
167 // If we have no statement, look at all the incoming ranges to the
168 // block. This can happen when we're querying a block with only an
169 // outgoing edge (no statement but the fall through edge), but for
170 // which we can determine a range on entry to the block.
171 Value_Range
tmp (TREE_TYPE (name
));
172 bool changed
= false;
174 for (unsigned i
= 0; i
< EDGE_COUNT (entry
->preds
); ++i
)
176 edge e
= EDGE_PRED (entry
, i
);
177 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
178 && m_ranger
->range_on_edge (tmp
, e
, name
))
185 // Make sure we don't return UNDEFINED by mistake.
187 r
.set_varying (TREE_TYPE (name
));
190 // Return the range of NAME at the end of the path being analyzed.
193 path_range_query::internal_range_of_expr (vrange
&r
, tree name
, gimple
*stmt
)
195 if (!r
.supports_type_p (TREE_TYPE (name
)))
198 if (get_cache (r
, name
))
201 if (m_resolve
&& defined_outside_path (name
))
203 range_on_path_entry (r
, name
);
209 && range_defined_in_block (r
, name
, gimple_bb (stmt
)))
211 if (TREE_CODE (name
) == SSA_NAME
)
213 Value_Range
glob (TREE_TYPE (name
));
214 gimple_range_global (glob
, name
);
222 gimple_range_global (r
, name
);
227 path_range_query::range_of_expr (vrange
&r
, tree name
, gimple
*stmt
)
229 if (internal_range_of_expr (r
, name
, stmt
))
231 if (r
.undefined_p ())
232 m_undefined_path
= true;
240 path_range_query::unreachable_path_p ()
242 return m_undefined_path
;
245 // Initialize the current path to PATH. The current block is set to
246 // the entry block to the path.
248 // Note that the blocks are in reverse order, so the exit block is
252 path_range_query::set_path (const vec
<basic_block
> &path
)
254 gcc_checking_assert (path
.length () > 1);
255 m_path
= path
.copy ();
256 m_pos
= m_path
.length () - 1;
257 bitmap_clear (m_has_cache_entry
);
261 path_range_query::ssa_defined_in_bb (tree name
, basic_block bb
)
263 return (TREE_CODE (name
) == SSA_NAME
264 && SSA_NAME_DEF_STMT (name
)
265 && gimple_bb (SSA_NAME_DEF_STMT (name
)) == bb
);
268 // Return the range of the result of PHI in R.
270 // Since PHIs are calculated in parallel at the beginning of the
271 // block, we must be careful to never save anything to the cache here.
272 // It is the caller's responsibility to adjust the cache. Also,
273 // calculating the PHI's range must not trigger additional lookups.
276 path_range_query::ssa_range_in_phi (vrange
&r
, gphi
*phi
)
278 tree name
= gimple_phi_result (phi
);
282 if (m_resolve
&& m_ranger
->range_of_expr (r
, name
, phi
))
285 // Try to fold the phi exclusively with global or cached values.
286 // This will get things like PHI <5(99), 6(88)>. We do this by
287 // calling range_of_expr with no context.
288 unsigned nargs
= gimple_phi_num_args (phi
);
289 Value_Range
arg_range (TREE_TYPE (name
));
291 for (size_t i
= 0; i
< nargs
; ++i
)
293 tree arg
= gimple_phi_arg_def (phi
, i
);
294 if (range_of_expr (arg_range
, arg
, /*stmt=*/NULL
))
295 r
.union_ (arg_range
);
298 r
.set_varying (TREE_TYPE (name
));
305 basic_block bb
= gimple_bb (phi
);
306 basic_block prev
= prev_bb ();
307 edge e_in
= find_edge (prev
, bb
);
308 tree arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e_in
);
309 // Avoid using the cache for ARGs defined in this block, as
310 // that could create an ordering problem.
311 if (ssa_defined_in_bb (arg
, bb
) || !get_cache (r
, arg
))
315 Value_Range
tmp (TREE_TYPE (name
));
316 // Using both the range on entry to the path, and the
317 // range on this edge yields significantly better
319 if (TREE_CODE (arg
) == SSA_NAME
320 && defined_outside_path (arg
))
321 range_on_path_entry (r
, arg
);
323 r
.set_varying (TREE_TYPE (name
));
324 m_ranger
->range_on_edge (tmp
, e_in
, arg
);
328 r
.set_varying (TREE_TYPE (name
));
332 // If NAME is defined in BB, set R to the range of NAME, and return
333 // TRUE. Otherwise, return FALSE.
336 path_range_query::range_defined_in_block (vrange
&r
, tree name
, basic_block bb
)
338 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
339 basic_block def_bb
= gimple_bb (def_stmt
);
344 if (get_cache (r
, name
))
347 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
348 ssa_range_in_phi (r
, as_a
<gphi
*> (def_stmt
));
352 get_path_oracle ()->killing_def (name
);
354 if (!range_of_stmt (r
, def_stmt
, name
))
355 r
.set_varying (TREE_TYPE (name
));
358 if (bb
&& POINTER_TYPE_P (TREE_TYPE (name
)))
359 m_ranger
->m_cache
.m_exit
.maybe_adjust_range (r
, name
, bb
);
361 if (DEBUG_SOLVER
&& (bb
|| !r
.varying_p ()))
363 fprintf (dump_file
, "range_defined_in_block (BB%d) for ", bb
? bb
->index
: -1);
364 print_generic_expr (dump_file
, name
, TDF_SLIM
);
365 fprintf (dump_file
, " is ");
367 fprintf (dump_file
, "\n");
373 // Compute ranges defined in the PHIs in this block.
376 path_range_query::compute_ranges_in_phis (basic_block bb
)
380 // PHIs must be resolved simultaneously on entry to the block
381 // because any dependencies must be satistifed with values on entry.
382 // Thus, we calculate all PHIs first, and then update the cache at
385 for (auto iter
= gsi_start_phis (bb
); !gsi_end_p (iter
); gsi_next (&iter
))
387 gphi
*phi
= iter
.phi ();
388 tree name
= gimple_phi_result (phi
);
390 if (!import_p (name
))
393 Value_Range
r (TREE_TYPE (name
));
394 if (range_defined_in_block (r
, name
, bb
))
396 unsigned v
= SSA_NAME_VERSION (name
);
398 bitmap_set_bit (phi_set
, v
);
399 // Pretend we don't have a cache entry for this name until
400 // we're done with all PHIs.
401 bitmap_clear_bit (m_has_cache_entry
, v
);
404 bitmap_ior_into (m_has_cache_entry
, phi_set
);
407 // Return TRUE if relations may be invalidated after crossing edge E.
410 path_range_query::relations_may_be_invalidated (edge e
)
412 // As soon as the path crosses a back edge, we can encounter
413 // definitions of SSA_NAMEs that may have had a use in the path
414 // already, so this will then be a new definition. The relation
415 // code is all designed around seeing things in dominator order, and
416 // crossing a back edge in the path violates this assumption.
417 return (e
->flags
& EDGE_DFS_BACK
);
420 // Compute ranges defined in the current block, or exported to the
424 path_range_query::compute_ranges_in_block (basic_block bb
)
429 if (m_resolve
&& !at_entry ())
430 compute_phi_relations (bb
, prev_bb ());
432 // Force recalculation of any names in the cache that are defined in
433 // this block. This can happen on interdependent SSA/phis in loops.
434 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
436 tree name
= ssa_name (i
);
437 if (ssa_defined_in_bb (name
, bb
))
441 // Solve imports defined in this block, starting with the PHIs...
442 compute_ranges_in_phis (bb
);
443 // ...and then the rest of the imports.
444 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
446 tree name
= ssa_name (i
);
447 Value_Range
r (TREE_TYPE (name
));
449 if (gimple_code (SSA_NAME_DEF_STMT (name
)) != GIMPLE_PHI
450 && range_defined_in_block (r
, name
, bb
))
457 // Solve imports that are exported to the next block.
458 basic_block next
= next_bb ();
459 edge e
= find_edge (bb
, next
);
461 if (m_resolve
&& relations_may_be_invalidated (e
))
465 "Resetting relations as they may be invalidated in %d->%d.\n",
466 e
->src
->index
, e
->dest
->index
);
468 path_oracle
*p
= get_path_oracle ();
470 // ?? Instead of nuking the root oracle altogether, we could
471 // reset the path oracle to search for relations from the top of
472 // the loop with the root oracle. Something for future development.
473 p
->set_root_oracle (nullptr);
476 gori_compute
&g
= m_ranger
->gori ();
477 bitmap exports
= g
.exports (bb
);
478 EXECUTE_IF_AND_IN_BITMAP (m_imports
, exports
, 0, i
, bi
)
480 tree name
= ssa_name (i
);
481 Value_Range
r (TREE_TYPE (name
));
482 if (g
.outgoing_edge_range_p (r
, e
, name
, *this))
484 Value_Range
cached_range (TREE_TYPE (name
));
485 if (get_cache (cached_range
, name
))
486 r
.intersect (cached_range
);
491 fprintf (dump_file
, "outgoing_edge_range_p for ");
492 print_generic_expr (dump_file
, name
, TDF_SLIM
);
493 fprintf (dump_file
, " on edge %d->%d ",
494 e
->src
->index
, e
->dest
->index
);
495 fprintf (dump_file
, "is ");
497 fprintf (dump_file
, "\n");
503 compute_outgoing_relations (bb
, next
);
506 // Adjust all pointer imports in BB with non-null information.
509 path_range_query::adjust_for_non_null_uses (basic_block bb
)
515 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
517 tree name
= ssa_name (i
);
519 if (!POINTER_TYPE_P (TREE_TYPE (name
)))
522 if (get_cache (r
, name
))
528 r
.set_varying (TREE_TYPE (name
));
530 if (m_ranger
->m_cache
.m_exit
.maybe_adjust_range (r
, name
, bb
))
535 // If NAME is a supported SSA_NAME, add it the bitmap in IMPORTS.
538 path_range_query::add_to_imports (tree name
, bitmap imports
)
540 if (TREE_CODE (name
) == SSA_NAME
541 && Value_Range::supports_type_p (TREE_TYPE (name
)))
542 return bitmap_set_bit (imports
, SSA_NAME_VERSION (name
));
546 // Compute the imports to PATH. These are
547 // essentially the SSA names used to calculate the final conditional
550 // They are hints for the solver. Adding more elements doesn't slow
551 // us down, because we don't solve anything that doesn't appear in the
552 // path. On the other hand, not having enough imports will limit what
556 path_range_query::compute_imports (bitmap imports
, const vec
<basic_block
> &path
)
558 // Start with the imports from the exit block...
559 basic_block exit
= path
[0];
560 gori_compute
&gori
= m_ranger
->gori ();
561 bitmap r_imports
= gori
.imports (exit
);
562 bitmap_copy (imports
, r_imports
);
564 auto_vec
<tree
> worklist (bitmap_count_bits (imports
));
567 EXECUTE_IF_SET_IN_BITMAP (imports
, 0, i
, bi
)
569 tree name
= ssa_name (i
);
570 worklist
.quick_push (name
);
573 // ...and add any operands used to define these imports.
574 while (!worklist
.is_empty ())
576 tree name
= worklist
.pop ();
577 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
578 if (SSA_NAME_IS_DEFAULT_DEF (name
)
579 || !path
.contains (gimple_bb (def_stmt
)))
582 if (gphi
*phi
= dyn_cast
<gphi
*> (def_stmt
))
584 for (size_t i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
586 edge e
= gimple_phi_arg_edge (phi
, i
);
587 tree arg
= gimple_phi_arg (phi
, i
)->def
;
589 if (TREE_CODE (arg
) == SSA_NAME
590 && path
.contains (e
->src
)
591 && bitmap_set_bit (imports
, SSA_NAME_VERSION (arg
)))
592 worklist
.safe_push (arg
);
595 else if (gassign
*ass
= dyn_cast
<gassign
*> (def_stmt
))
598 if (range_op_handler (ass
))
600 ssa
[0] = gimple_range_ssa_p (gimple_range_operand1 (ass
));
601 ssa
[1] = gimple_range_ssa_p (gimple_range_operand2 (ass
));
604 else if (gimple_assign_rhs_code (ass
) == COND_EXPR
)
606 ssa
[0] = gimple_range_ssa_p (gimple_assign_rhs1 (ass
));
607 ssa
[1] = gimple_range_ssa_p (gimple_assign_rhs2 (ass
));
608 ssa
[2] = gimple_range_ssa_p (gimple_assign_rhs3 (ass
));
612 for (unsigned j
= 0; j
< 3; ++j
)
615 if (rhs
&& add_to_imports (rhs
, imports
))
616 worklist
.safe_push (rhs
);
620 // Exported booleans along the path, may help conditionals.
622 for (i
= 0; i
< path
.length (); ++i
)
624 basic_block bb
= path
[i
];
626 FOR_EACH_GORI_EXPORT_NAME (gori
, bb
, name
)
627 if (TREE_CODE (TREE_TYPE (name
)) == BOOLEAN_TYPE
)
628 bitmap_set_bit (imports
, SSA_NAME_VERSION (name
));
632 // Compute the ranges for IMPORTS along PATH.
634 // IMPORTS are the set of SSA names, any of which could potentially
635 // change the value of the final conditional in PATH. Default to the
636 // imports of the last block in the path if none is given.
639 path_range_query::compute_ranges (const vec
<basic_block
> &path
,
640 const bitmap_head
*imports
)
643 fprintf (dump_file
, "\n==============================================\n");
646 m_undefined_path
= false;
649 bitmap_copy (m_imports
, imports
);
651 compute_imports (m_imports
, m_path
);
654 get_path_oracle ()->reset_path ();
658 fprintf (dump_file
, "path_range_query: compute_ranges for path: ");
659 for (unsigned i
= path
.length (); i
> 0; --i
)
661 basic_block bb
= path
[i
- 1];
662 fprintf (dump_file
, "%d", bb
->index
);
664 fprintf (dump_file
, "->");
666 fprintf (dump_file
, "\n");
671 basic_block bb
= curr_bb ();
673 compute_ranges_in_block (bb
);
674 adjust_for_non_null_uses (bb
);
684 get_path_oracle ()->dump (dump_file
);
689 // Convenience function to compute ranges along a path consisting of
690 // E->SRC and E->DEST.
693 path_range_query::compute_ranges (edge e
)
695 auto_vec
<basic_block
> bbs (2);
696 bbs
.quick_push (e
->dest
);
697 bbs
.quick_push (e
->src
);
698 compute_ranges (bbs
);
701 // A folding aid used to register and query relations along a path.
702 // When queried, it returns relations as they would appear on exit to
705 // Relations are registered on entry so the path_oracle knows which
706 // block to query the root oracle at when a relation lies outside the
707 // path. However, when queried we return the relation on exit to the
708 // path, since the root_oracle ignores the registered.
710 class jt_fur_source
: public fur_depend
713 jt_fur_source (gimple
*s
, path_range_query
*, gori_compute
*,
714 const vec
<basic_block
> &);
715 relation_kind
query_relation (tree op1
, tree op2
) override
;
716 void register_relation (gimple
*, relation_kind
, tree op1
, tree op2
) override
;
717 void register_relation (edge
, relation_kind
, tree op1
, tree op2
) override
;
722 jt_fur_source::jt_fur_source (gimple
*s
,
723 path_range_query
*query
,
725 const vec
<basic_block
> &path
)
726 : fur_depend (s
, gori
, query
)
728 gcc_checking_assert (!path
.is_empty ());
730 m_entry
= path
[path
.length () - 1];
732 if (dom_info_available_p (CDI_DOMINATORS
))
733 m_oracle
= query
->oracle ();
738 // Ignore statement and register relation on entry to path.
741 jt_fur_source::register_relation (gimple
*, relation_kind k
, tree op1
, tree op2
)
744 m_oracle
->register_relation (m_entry
, k
, op1
, op2
);
747 // Ignore edge and register relation on entry to path.
750 jt_fur_source::register_relation (edge
, relation_kind k
, tree op1
, tree op2
)
753 m_oracle
->register_relation (m_entry
, k
, op1
, op2
);
757 jt_fur_source::query_relation (tree op1
, tree op2
)
762 if (TREE_CODE (op1
) != SSA_NAME
|| TREE_CODE (op2
) != SSA_NAME
)
765 return m_oracle
->query_relation (m_entry
, op1
, op2
);
768 // Return the range of STMT at the end of the path being analyzed.
771 path_range_query::range_of_stmt (vrange
&r
, gimple
*stmt
, tree
)
773 tree type
= gimple_range_type (stmt
);
775 if (!type
|| !r
.supports_type_p (type
))
778 // If resolving unknowns, fold the statement making use of any
779 // relations along the path.
783 jt_fur_source
src (stmt
, this, &m_ranger
->gori (), m_path
);
784 if (!f
.fold_stmt (r
, stmt
, src
))
785 r
.set_varying (type
);
787 // Otherwise, fold without relations.
788 else if (!fold_range (r
, stmt
, this))
789 r
.set_varying (type
);
794 // If possible, register the relation on the incoming edge E into PHI.
797 path_range_query::maybe_register_phi_relation (gphi
*phi
, edge e
)
799 tree arg
= gimple_phi_arg_def (phi
, e
->dest_idx
);
801 if (!gimple_range_ssa_p (arg
))
804 if (relations_may_be_invalidated (e
))
807 basic_block bb
= gimple_bb (phi
);
808 tree result
= gimple_phi_result (phi
);
810 // Avoid recording the equivalence if the arg is defined in this
811 // block, as that could create an ordering problem.
812 if (ssa_defined_in_bb (arg
, bb
))
815 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
816 fprintf (dump_file
, "maybe_register_phi_relation in bb%d:", bb
->index
);
818 get_path_oracle ()->killing_def (result
);
819 m_oracle
->register_relation (entry_bb (), VREL_EQ
, arg
, result
);
822 // Compute relations for each PHI in BB. For example:
824 // x_5 = PHI<y_9(5),...>
826 // If the path flows through BB5, we can register that x_5 == y_9.
829 path_range_query::compute_phi_relations (basic_block bb
, basic_block prev
)
834 edge e_in
= find_edge (prev
, bb
);
836 for (gphi_iterator iter
= gsi_start_phis (bb
); !gsi_end_p (iter
);
839 gphi
*phi
= iter
.phi ();
840 tree result
= gimple_phi_result (phi
);
841 unsigned nargs
= gimple_phi_num_args (phi
);
843 if (!import_p (result
))
846 for (size_t i
= 0; i
< nargs
; ++i
)
847 if (e_in
== gimple_phi_arg_edge (phi
, i
))
849 maybe_register_phi_relation (phi
, e_in
);
855 // Compute outgoing relations from BB to NEXT.
858 path_range_query::compute_outgoing_relations (basic_block bb
, basic_block next
)
860 if (gcond
*cond
= safe_dyn_cast
<gcond
*> (last_stmt (bb
)))
863 edge e0
= EDGE_SUCC (bb
, 0);
864 edge e1
= EDGE_SUCC (bb
, 1);
866 if (e0
->dest
== next
)
867 gcond_edge_range (r
, e0
);
868 else if (e1
->dest
== next
)
869 gcond_edge_range (r
, e1
);
873 jt_fur_source
src (NULL
, this, &m_ranger
->gori (), m_path
);
874 src
.register_outgoing_edges (cond
, r
, e0
, e1
);