1 /* Basic block path solver.
2 Copyright (C) 2021 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "value-range.h"
29 #include "gimple-range.h"
30 #include "tree-pretty-print.h"
31 #include "gimple-range-path.h"
34 #include "gimple-iterator.h"
36 // Internal construct to help facilitate debugging of solver.
37 #define DEBUG_SOLVER (dump_file && dump_flags & TDF_THREADING)
39 path_range_query::path_range_query (gimple_ranger
&ranger
, bool resolve
)
42 m_cache
= new ssa_global_cache
;
43 m_has_cache_entry
= BITMAP_ALLOC (NULL
);
46 m_oracle
= new path_oracle (ranger
.oracle ());
49 path_range_query::~path_range_query ()
51 BITMAP_FREE (m_has_cache_entry
);
56 // Mark cache entry for NAME as unused.
59 path_range_query::clear_cache (tree name
)
61 unsigned v
= SSA_NAME_VERSION (name
);
62 bitmap_clear_bit (m_has_cache_entry
, v
);
65 // If NAME has a cache entry, return it in R, and return TRUE.
68 path_range_query::get_cache (irange
&r
, tree name
)
70 if (!gimple_range_ssa_p (name
))
71 return get_global_range_query ()->range_of_expr (r
, name
);
73 unsigned v
= SSA_NAME_VERSION (name
);
74 if (bitmap_bit_p (m_has_cache_entry
, v
))
75 return m_cache
->get_global_range (r
, name
);
80 // Set the cache entry for NAME to R.
83 path_range_query::set_cache (const irange
&r
, tree name
)
85 unsigned v
= SSA_NAME_VERSION (name
);
86 bitmap_set_bit (m_has_cache_entry
, v
);
87 m_cache
->set_global_range (name
, r
);
91 path_range_query::dump (FILE *dump_file
)
93 push_dump_file
save (dump_file
, dump_flags
& ~TDF_DETAILS
);
95 if (m_path
->is_empty ())
101 fprintf (dump_file
, "\nPath is (length=%d):\n", m_path
->length ());
102 dump_ranger (dump_file
, *m_path
);
104 fprintf (dump_file
, "Imports:\n");
105 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
107 tree name
= ssa_name (i
);
108 print_generic_expr (dump_file
, name
, TDF_SLIM
);
109 fprintf (dump_file
, "\n");
112 m_cache
->dump (dump_file
);
116 path_range_query::debug ()
121 // Return TRUE if NAME is defined outside the current path.
124 path_range_query::defined_outside_path (tree name
)
126 gimple
*def
= SSA_NAME_DEF_STMT (name
);
127 basic_block bb
= gimple_bb (def
);
129 return !bb
|| !m_path
->contains (bb
);
132 // Return the range of NAME on entry to the path.
135 path_range_query::range_on_path_entry (irange
&r
, tree name
)
137 gcc_checking_assert (defined_outside_path (name
));
139 basic_block entry
= entry_bb ();
140 bool changed
= false;
143 for (unsigned i
= 0; i
< EDGE_COUNT (entry
->preds
); ++i
)
145 edge e
= EDGE_PRED (entry
, i
);
146 if (e
->src
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)
147 && m_ranger
.range_on_edge (tmp
, e
, name
))
154 // Make sure we don't return UNDEFINED by mistake.
156 r
.set_varying (TREE_TYPE (name
));
159 // Return the range of NAME at the end of the path being analyzed.
162 path_range_query::internal_range_of_expr (irange
&r
, tree name
, gimple
*stmt
)
164 if (!irange::supports_type_p (TREE_TYPE (name
)))
167 if (get_cache (r
, name
))
170 if (m_resolve
&& defined_outside_path (name
))
172 range_on_path_entry (r
, name
);
177 basic_block bb
= stmt
? gimple_bb (stmt
) : exit_bb ();
178 if (stmt
&& range_defined_in_block (r
, name
, bb
))
180 if (TREE_CODE (name
) == SSA_NAME
)
181 r
.intersect (gimple_range_global (name
));
187 r
.set_varying (TREE_TYPE (name
));
192 path_range_query::range_of_expr (irange
&r
, tree name
, gimple
*stmt
)
194 if (internal_range_of_expr (r
, name
, stmt
))
196 if (r
.undefined_p ())
197 m_undefined_path
= true;
205 path_range_query::unreachable_path_p ()
207 return m_undefined_path
;
210 // Initialize the current path to PATH. The current block is set to
211 // the entry block to the path.
213 // Note that the blocks are in reverse order, so the exit block is
217 path_range_query::set_path (const vec
<basic_block
> &path
)
219 gcc_checking_assert (path
.length () > 1);
221 m_pos
= m_path
->length () - 1;
222 bitmap_clear (m_has_cache_entry
);
225 // Return the range of the result of PHI in R.
228 path_range_query::ssa_range_in_phi (irange
&r
, gphi
*phi
)
230 tree name
= gimple_phi_result (phi
);
231 basic_block bb
= gimple_bb (phi
);
235 if (m_resolve
&& m_ranger
.range_of_expr (r
, name
, phi
))
238 // Try fold just in case we can resolve simple things like PHI <5(99), 6(88)>.
239 if (!fold_range (r
, phi
, this))
240 r
.set_varying (TREE_TYPE (name
));
245 basic_block prev
= prev_bb ();
246 edge e_in
= find_edge (prev
, bb
);
247 unsigned nargs
= gimple_phi_num_args (phi
);
249 for (size_t i
= 0; i
< nargs
; ++i
)
250 if (e_in
== gimple_phi_arg_edge (phi
, i
))
252 tree arg
= gimple_phi_arg_def (phi
, i
);
254 if (!get_cache (r
, arg
))
259 // Using both the range on entry to the path, and the
260 // range on this edge yields significantly better
262 if (defined_outside_path (arg
))
263 range_on_path_entry (r
, arg
);
265 r
.set_varying (TREE_TYPE (name
));
266 m_ranger
.range_on_edge (tmp
, e_in
, arg
);
270 r
.set_varying (TREE_TYPE (name
));
277 // If NAME is defined in BB, set R to the range of NAME, and return
278 // TRUE. Otherwise, return FALSE.
281 path_range_query::range_defined_in_block (irange
&r
, tree name
, basic_block bb
)
283 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
284 basic_block def_bb
= gimple_bb (def_stmt
);
289 if (gimple_code (def_stmt
) == GIMPLE_PHI
)
290 ssa_range_in_phi (r
, as_a
<gphi
*> (def_stmt
));
291 else if (!range_of_stmt (r
, def_stmt
, name
))
292 r
.set_varying (TREE_TYPE (name
));
295 m_non_null
.adjust_range (r
, name
, bb
);
297 if (DEBUG_SOLVER
&& (bb
|| !r
.varying_p ()))
299 fprintf (dump_file
, "range_defined_in_block (BB%d) for ", bb
? bb
->index
: -1);
300 print_generic_expr (dump_file
, name
, TDF_SLIM
);
301 fprintf (dump_file
, " is ");
303 fprintf (dump_file
, "\n");
309 // Compute ranges defined in the current block, or exported to the
313 path_range_query::compute_ranges_in_block (basic_block bb
)
316 int_range_max r
, cached_range
;
319 // Force recalculation of any names in the cache that are defined in
320 // this block. This can happen on interdependent SSA/phis in loops.
321 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
323 tree name
= ssa_name (i
);
324 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
325 basic_block def_bb
= gimple_bb (def_stmt
);
331 // Solve imports defined in this block.
332 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
334 tree name
= ssa_name (i
);
336 if (range_defined_in_block (r
, name
, bb
))
343 // Solve imports that are exported to the next block.
344 edge e
= find_edge (bb
, next_bb ());
345 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
347 tree name
= ssa_name (i
);
348 gori_compute
&g
= m_ranger
.gori ();
349 bitmap exports
= g
.exports (bb
);
351 if (bitmap_bit_p (exports
, i
))
353 if (g
.outgoing_edge_range_p (r
, e
, name
, *this))
355 if (get_cache (cached_range
, name
))
356 r
.intersect (cached_range
);
361 fprintf (dump_file
, "outgoing_edge_range_p for ");
362 print_generic_expr (dump_file
, name
, TDF_SLIM
);
363 fprintf (dump_file
, " on edge %d->%d ",
364 e
->src
->index
, e
->dest
->index
);
365 fprintf (dump_file
, "is ");
367 fprintf (dump_file
, "\n");
374 // Adjust all pointer imports in BB with non-null information.
377 path_range_query::adjust_for_non_null_uses (basic_block bb
)
383 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
385 tree name
= ssa_name (i
);
387 if (!POINTER_TYPE_P (TREE_TYPE (name
)))
390 if (get_cache (r
, name
))
396 r
.set_varying (TREE_TYPE (name
));
398 if (m_non_null
.adjust_range (r
, name
, bb
))
403 // If NAME is a supported SSA_NAME, add it the bitmap in IMPORTS.
406 path_range_query::add_to_imports (tree name
, bitmap imports
)
408 if (TREE_CODE (name
) == SSA_NAME
409 && irange::supports_type_p (TREE_TYPE (name
)))
410 return bitmap_set_bit (imports
, SSA_NAME_VERSION (name
));
414 // Add the copies of any SSA names in IMPORTS to IMPORTS.
416 // These are hints for the solver. Adding more elements (within
417 // reason) doesn't slow us down, because we don't solve anything that
418 // doesn't appear in the path. On the other hand, not having enough
419 // imports will limit what we can solve.
422 path_range_query::add_copies_to_imports ()
424 auto_vec
<tree
> worklist (bitmap_count_bits (m_imports
));
428 EXECUTE_IF_SET_IN_BITMAP (m_imports
, 0, i
, bi
)
430 tree name
= ssa_name (i
);
431 worklist
.quick_push (name
);
434 while (!worklist
.is_empty ())
436 tree name
= worklist
.pop ();
437 gimple
*def_stmt
= SSA_NAME_DEF_STMT (name
);
439 if (is_gimple_assign (def_stmt
))
441 // ?? Adding assignment copies doesn't get us much. At the
442 // time of writing, we got 63 more threaded paths across the
443 // .ii files from a bootstrap.
444 add_to_imports (gimple_assign_rhs1 (def_stmt
), m_imports
);
445 tree rhs
= gimple_assign_rhs2 (def_stmt
);
446 if (rhs
&& add_to_imports (rhs
, m_imports
))
447 worklist
.safe_push (rhs
);
448 rhs
= gimple_assign_rhs3 (def_stmt
);
449 if (rhs
&& add_to_imports (rhs
, m_imports
))
450 worklist
.safe_push (rhs
);
452 else if (gphi
*phi
= dyn_cast
<gphi
*> (def_stmt
))
454 for (size_t i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
456 edge e
= gimple_phi_arg_edge (phi
, i
);
457 tree arg
= gimple_phi_arg (phi
, i
)->def
;
459 if (TREE_CODE (arg
) == SSA_NAME
460 && m_path
->contains (e
->src
)
461 && bitmap_set_bit (m_imports
, SSA_NAME_VERSION (arg
)))
462 worklist
.safe_push (arg
);
468 // Compute the ranges for IMPORTS along PATH.
470 // IMPORTS are the set of SSA names, any of which could potentially
471 // change the value of the final conditional in PATH.
474 path_range_query::compute_ranges (const vec
<basic_block
> &path
,
475 const bitmap_head
*imports
)
478 fprintf (dump_file
, "\n*********** path_range_query ******************\n");
481 bitmap_copy (m_imports
, imports
);
482 m_undefined_path
= false;
486 add_copies_to_imports ();
487 get_path_oracle ()->reset_path ();
488 compute_relations (path
);
493 fprintf (dump_file
, "\npath_range_query: compute_ranges for path: ");
494 for (unsigned i
= path
.length (); i
> 0; --i
)
496 basic_block bb
= path
[i
- 1];
497 fprintf (dump_file
, "BB %d", bb
->index
);
499 fprintf (dump_file
, ", ");
501 fprintf (dump_file
, "\n");
506 basic_block bb
= curr_bb ();
510 gori_compute
&gori
= m_ranger
.gori ();
513 // Exported booleans along the path, may help conditionals.
514 // Add them as interesting imports.
515 FOR_EACH_GORI_EXPORT_NAME (gori
, bb
, name
)
516 if (TREE_CODE (TREE_TYPE (name
)) == BOOLEAN_TYPE
)
517 bitmap_set_bit (m_imports
, SSA_NAME_VERSION (name
));
520 compute_ranges_in_block (bb
);
521 adjust_for_non_null_uses (bb
);
533 // A folding aid used to register and query relations along a path.
534 // When queried, it returns relations as they would appear on exit to
537 // Relations are registered on entry so the path_oracle knows which
538 // block to query the root oracle at when a relation lies outside the
539 // path. However, when queried we return the relation on exit to the
540 // path, since the root_oracle ignores the registered.
542 class jt_fur_source
: public fur_depend
545 jt_fur_source (gimple
*s
, path_range_query
*, gori_compute
*,
546 const vec
<basic_block
> &);
547 relation_kind
query_relation (tree op1
, tree op2
) override
;
548 void register_relation (gimple
*, relation_kind
, tree op1
, tree op2
) override
;
549 void register_relation (edge
, relation_kind
, tree op1
, tree op2
) override
;
554 jt_fur_source::jt_fur_source (gimple
*s
,
555 path_range_query
*query
,
557 const vec
<basic_block
> &path
)
558 : fur_depend (s
, gori
, query
)
560 gcc_checking_assert (!path
.is_empty ());
562 m_entry
= path
[path
.length () - 1];
564 if (dom_info_available_p (CDI_DOMINATORS
))
565 m_oracle
= query
->oracle ();
570 // Ignore statement and register relation on entry to path.
573 jt_fur_source::register_relation (gimple
*, relation_kind k
, tree op1
, tree op2
)
576 m_oracle
->register_relation (m_entry
, k
, op1
, op2
);
579 // Ignore edge and register relation on entry to path.
582 jt_fur_source::register_relation (edge
, relation_kind k
, tree op1
, tree op2
)
585 m_oracle
->register_relation (m_entry
, k
, op1
, op2
);
589 jt_fur_source::query_relation (tree op1
, tree op2
)
594 if (TREE_CODE (op1
) != SSA_NAME
|| TREE_CODE (op2
) != SSA_NAME
)
597 return m_oracle
->query_relation (m_entry
, op1
, op2
);
600 // Return the range of STMT at the end of the path being analyzed.
603 path_range_query::range_of_stmt (irange
&r
, gimple
*stmt
, tree
)
605 tree type
= gimple_range_type (stmt
);
607 if (!irange::supports_type_p (type
))
610 // If resolving unknowns, fold the statement as it would have
611 // appeared at the end of the path.
615 jt_fur_source
src (stmt
, this, &m_ranger
.gori (), *m_path
);
616 if (!f
.fold_stmt (r
, stmt
, src
))
617 r
.set_varying (type
);
619 // Otherwise, use the global ranger to fold it as it would appear in
620 // the original IL. This is more conservative, but faster.
621 else if (!fold_range (r
, stmt
, this))
622 r
.set_varying (type
);
627 // Compute relations on a path. This involves two parts: relations
628 // along the conditionals joining a path, and relations determined by
632 path_range_query::compute_relations (const vec
<basic_block
> &path
)
634 if (!dom_info_available_p (CDI_DOMINATORS
))
637 jt_fur_source
src (NULL
, this, &m_ranger
.gori (), path
);
638 basic_block prev
= NULL
;
639 for (unsigned i
= path
.length (); i
> 0; --i
)
641 basic_block bb
= path
[i
- 1];
642 gimple
*stmt
= last_stmt (bb
);
644 compute_phi_relations (bb
, prev
);
646 // Compute relations in outgoing edges along the path. Skip the
647 // final conditional which we don't know yet.
650 && gimple_code (stmt
) == GIMPLE_COND
651 && irange::supports_type_p (TREE_TYPE (gimple_cond_lhs (stmt
))))
653 basic_block next
= path
[i
- 2];
655 gcond
*cond
= as_a
<gcond
*> (stmt
);
656 edge e0
= EDGE_SUCC (bb
, 0);
657 edge e1
= EDGE_SUCC (bb
, 1);
659 if (e0
->dest
== next
)
660 gcond_edge_range (r
, e0
);
661 else if (e1
->dest
== next
)
662 gcond_edge_range (r
, e1
);
666 src
.register_outgoing_edges (cond
, r
, e0
, e1
);
672 // Compute relations for each PHI in BB. For example:
674 // x_5 = PHI<y_9(5),...>
676 // If the path flows through BB5, we can register that x_5 == y_9.
679 path_range_query::compute_phi_relations (basic_block bb
, basic_block prev
)
684 basic_block entry
= entry_bb ();
685 edge e_in
= find_edge (prev
, bb
);
686 gcc_checking_assert (e_in
);
688 for (gphi_iterator iter
= gsi_start_phis (bb
); !gsi_end_p (iter
);
691 gphi
*phi
= iter
.phi ();
692 tree result
= gimple_phi_result (phi
);
693 unsigned nargs
= gimple_phi_num_args (phi
);
695 for (size_t i
= 0; i
< nargs
; ++i
)
696 if (e_in
== gimple_phi_arg_edge (phi
, i
))
698 tree arg
= gimple_phi_arg_def (phi
, i
);
700 if (gimple_range_ssa_p (arg
))
701 m_oracle
->register_relation (entry
, EQ_EXPR
, arg
, result
);