[Ada] Add adequate guard before calling First_Rep_Item
[official-gcc.git] / gcc / gimple-range-path.cc
blobd052ebd81fcd3f58946f8da2f93134384f0e9415
1 /* Basic block path solver.
2 Copyright (C) 2021 Free Software Foundation, Inc.
3 Contributed by Aldy Hernandez <aldyh@redhat.com>.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "cfganal.h"
28 #include "value-range.h"
29 #include "gimple-range.h"
30 #include "tree-pretty-print.h"
31 #include "gimple-range-path.h"
32 #include "ssa.h"
33 #include "tree-cfg.h"
34 #include "gimple-iterator.h"
36 // Internal construct to help facilitate debugging of solver.
37 #define DEBUG_SOLVER (0 && dump_file)
39 path_range_query::path_range_query (gimple_ranger &ranger, bool resolve)
40 : m_ranger (ranger)
42 if (DEBUG_SOLVER)
43 fprintf (dump_file, "\n*********** path_range_query ******************\n");
45 m_cache = new ssa_global_cache;
46 m_has_cache_entry = BITMAP_ALLOC (NULL);
47 m_path = NULL;
48 m_resolve = resolve;
49 m_oracle = new path_oracle (ranger.oracle ());
52 path_range_query::~path_range_query ()
54 BITMAP_FREE (m_has_cache_entry);
55 delete m_cache;
56 delete m_oracle;
59 // Mark cache entry for NAME as unused.
61 void
62 path_range_query::clear_cache (tree name)
64 unsigned v = SSA_NAME_VERSION (name);
65 bitmap_clear_bit (m_has_cache_entry, v);
68 // If NAME has a cache entry, return it in R, and return TRUE.
70 inline bool
71 path_range_query::get_cache (irange &r, tree name)
73 if (!gimple_range_ssa_p (name))
74 return get_global_range_query ()->range_of_expr (r, name);
76 unsigned v = SSA_NAME_VERSION (name);
77 if (bitmap_bit_p (m_has_cache_entry, v))
78 return m_cache->get_global_range (r, name);
80 return false;
83 // Set the cache entry for NAME to R.
85 void
86 path_range_query::set_cache (const irange &r, tree name)
88 unsigned v = SSA_NAME_VERSION (name);
89 bitmap_set_bit (m_has_cache_entry, v);
90 m_cache->set_global_range (name, r);
93 void
94 path_range_query::dump (FILE *dump_file)
96 push_dump_file save (dump_file, dump_flags & ~TDF_DETAILS);
98 if (m_path->is_empty ())
99 return;
101 unsigned i;
102 bitmap_iterator bi;
104 fprintf (dump_file, "\nPath is (length=%d):\n", m_path->length ());
105 dump_ranger (dump_file, *m_path);
107 fprintf (dump_file, "Imports:\n");
108 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
110 tree name = ssa_name (i);
111 print_generic_expr (dump_file, name, TDF_SLIM);
112 fprintf (dump_file, "\n");
115 m_cache->dump (dump_file);
118 void
119 path_range_query::debug ()
121 dump (stderr);
124 // Return TRUE if NAME is defined outside the current path.
126 bool
127 path_range_query::defined_outside_path (tree name)
129 gimple *def = SSA_NAME_DEF_STMT (name);
130 basic_block bb = gimple_bb (def);
132 return !bb || !m_path->contains (bb);
135 // Return the range of NAME on entry to the path.
137 void
138 path_range_query::range_on_path_entry (irange &r, tree name)
140 int_range_max tmp;
141 basic_block entry = entry_bb ();
142 r.set_undefined ();
143 for (unsigned i = 0; i < EDGE_COUNT (entry->preds); ++i)
145 edge e = EDGE_PRED (entry, i);
146 if (e->src != ENTRY_BLOCK_PTR_FOR_FN (cfun)
147 && m_ranger.range_on_edge (tmp, e, name))
148 r.union_ (tmp);
152 // Return the range of NAME at the end of the path being analyzed.
154 bool
155 path_range_query::internal_range_of_expr (irange &r, tree name, gimple *stmt)
157 if (!irange::supports_type_p (TREE_TYPE (name)))
158 return false;
160 if (get_cache (r, name))
161 return true;
163 if (m_resolve && defined_outside_path (name))
165 range_on_path_entry (r, name);
166 set_cache (r, name);
167 return true;
170 basic_block bb = stmt ? gimple_bb (stmt) : exit_bb ();
171 if (stmt && range_defined_in_block (r, name, bb))
173 if (TREE_CODE (name) == SSA_NAME)
174 r.intersect (gimple_range_global (name));
176 if (m_resolve && r.varying_p ())
177 range_on_path_entry (r, name);
179 set_cache (r, name);
180 return true;
183 r.set_varying (TREE_TYPE (name));
184 return true;
187 bool
188 path_range_query::range_of_expr (irange &r, tree name, gimple *stmt)
190 if (internal_range_of_expr (r, name, stmt))
192 if (r.undefined_p ())
193 m_undefined_path = true;
195 return true;
197 return false;
200 bool
201 path_range_query::unreachable_path_p ()
203 return m_undefined_path;
206 // Initialize the current path to PATH. The current block is set to
207 // the entry block to the path.
209 // Note that the blocks are in reverse order, so the exit block is
210 // path[0].
212 void
213 path_range_query::set_path (const vec<basic_block> &path)
215 gcc_checking_assert (path.length () > 1);
216 m_path = &path;
217 m_pos = m_path->length () - 1;
218 bitmap_clear (m_has_cache_entry);
221 // Return the range of the result of PHI in R.
223 void
224 path_range_query::ssa_range_in_phi (irange &r, gphi *phi)
226 tree name = gimple_phi_result (phi);
227 basic_block bb = gimple_bb (phi);
229 if (at_entry ())
231 if (m_resolve && m_ranger.range_of_expr (r, name, phi))
232 return;
234 // Try fold just in case we can resolve simple things like PHI <5(99), 6(88)>.
235 if (!fold_range (r, phi, this))
236 r.set_varying (TREE_TYPE (name));
238 return;
241 basic_block prev = prev_bb ();
242 edge e_in = find_edge (prev, bb);
243 unsigned nargs = gimple_phi_num_args (phi);
245 for (size_t i = 0; i < nargs; ++i)
246 if (e_in == gimple_phi_arg_edge (phi, i))
248 tree arg = gimple_phi_arg_def (phi, i);
250 if (!get_cache (r, arg))
252 if (m_resolve)
254 int_range_max tmp;
255 // Using both the range on entry to the path, and the
256 // range on this edge yields significantly better
257 // results.
258 range_on_path_entry (r, arg);
259 m_ranger.range_on_edge (tmp, e_in, arg);
260 r.intersect (tmp);
261 return;
263 r.set_varying (TREE_TYPE (name));
265 return;
267 gcc_unreachable ();
270 // If NAME is defined in BB, set R to the range of NAME, and return
271 // TRUE. Otherwise, return FALSE.
273 bool
274 path_range_query::range_defined_in_block (irange &r, tree name, basic_block bb)
276 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
277 basic_block def_bb = gimple_bb (def_stmt);
279 if (def_bb != bb)
280 return false;
282 if (gimple_code (def_stmt) == GIMPLE_PHI)
283 ssa_range_in_phi (r, as_a<gphi *> (def_stmt));
284 else if (!range_of_stmt (r, def_stmt, name))
285 r.set_varying (TREE_TYPE (name));
287 if (bb)
288 m_non_null.adjust_range (r, name, bb);
290 if (DEBUG_SOLVER && (bb || !r.varying_p ()))
292 fprintf (dump_file, "range_defined_in_block (BB%d) for ", bb ? bb->index : -1);
293 print_generic_expr (dump_file, name, TDF_SLIM);
294 fprintf (dump_file, " is ");
295 r.dump (dump_file);
296 fprintf (dump_file, "\n");
299 return true;
302 // Precompute ranges defined in the current block, or ranges
303 // that are exported on an edge to the next block.
305 void
306 path_range_query::precompute_ranges_in_block (basic_block bb)
308 bitmap_iterator bi;
309 int_range_max r, cached_range;
310 unsigned i;
312 // Force recalculation of any names in the cache that are defined in
313 // this block. This can happen on interdependent SSA/phis in loops.
314 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
316 tree name = ssa_name (i);
317 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
318 basic_block def_bb = gimple_bb (def_stmt);
320 if (def_bb == bb)
321 clear_cache (name);
324 // Solve imports defined in this block.
325 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
327 tree name = ssa_name (i);
329 if (range_defined_in_block (r, name, bb))
330 set_cache (r, name);
333 if (at_exit ())
334 return;
336 // Solve imports that are exported to the next block.
337 edge e = find_edge (bb, next_bb ());
338 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
340 tree name = ssa_name (i);
341 gori_compute &g = m_ranger.gori ();
342 bitmap exports = g.exports (bb);
344 if (bitmap_bit_p (exports, i))
346 if (g.outgoing_edge_range_p (r, e, name, *this))
348 if (get_cache (cached_range, name))
349 r.intersect (cached_range);
351 set_cache (r, name);
352 if (DEBUG_SOLVER)
354 fprintf (dump_file, "outgoing_edge_range_p for ");
355 print_generic_expr (dump_file, name, TDF_SLIM);
356 fprintf (dump_file, " on edge %d->%d ",
357 e->src->index, e->dest->index);
358 fprintf (dump_file, "is ");
359 r.dump (dump_file);
360 fprintf (dump_file, "\n");
367 // Adjust all pointer imports in BB with non-null information.
369 void
370 path_range_query::adjust_for_non_null_uses (basic_block bb)
372 int_range_max r;
373 bitmap_iterator bi;
374 unsigned i;
376 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
378 tree name = ssa_name (i);
380 if (!POINTER_TYPE_P (TREE_TYPE (name)))
381 continue;
383 if (get_cache (r, name))
385 if (r.nonzero_p ())
386 continue;
388 else
389 r.set_varying (TREE_TYPE (name));
391 if (m_non_null.adjust_range (r, name, bb))
392 set_cache (r, name);
396 // If NAME is a supported SSA_NAME, add it the bitmap in IMPORTS.
398 bool
399 path_range_query::add_to_imports (tree name, bitmap imports)
401 if (TREE_CODE (name) == SSA_NAME
402 && irange::supports_type_p (TREE_TYPE (name)))
403 return bitmap_set_bit (imports, SSA_NAME_VERSION (name));
404 return false;
407 // Add the copies of any SSA names in IMPORTS to IMPORTS.
409 // These are hints for the solver. Adding more elements (within
410 // reason) doesn't slow us down, because we don't solve anything that
411 // doesn't appear in the path. On the other hand, not having enough
412 // imports will limit what we can solve.
414 void
415 path_range_query::add_copies_to_imports ()
417 auto_vec<tree> worklist (bitmap_count_bits (m_imports));
418 bitmap_iterator bi;
419 unsigned i;
421 EXECUTE_IF_SET_IN_BITMAP (m_imports, 0, i, bi)
423 tree name = ssa_name (i);
424 worklist.quick_push (name);
427 while (!worklist.is_empty ())
429 tree name = worklist.pop ();
430 gimple *def_stmt = SSA_NAME_DEF_STMT (name);
432 if (is_gimple_assign (def_stmt))
434 // ?? Adding assignment copies doesn't get us much. At the
435 // time of writing, we got 63 more threaded paths across the
436 // .ii files from a bootstrap.
437 add_to_imports (gimple_assign_rhs1 (def_stmt), m_imports);
438 tree rhs = gimple_assign_rhs2 (def_stmt);
439 if (rhs && add_to_imports (rhs, m_imports))
440 worklist.safe_push (rhs);
441 rhs = gimple_assign_rhs3 (def_stmt);
442 if (rhs && add_to_imports (rhs, m_imports))
443 worklist.safe_push (rhs);
445 else if (gphi *phi = dyn_cast <gphi *> (def_stmt))
447 for (size_t i = 0; i < gimple_phi_num_args (phi); ++i)
449 edge e = gimple_phi_arg_edge (phi, i);
450 tree arg = gimple_phi_arg (phi, i)->def;
452 if (TREE_CODE (arg) == SSA_NAME
453 && m_path->contains (e->src)
454 && bitmap_set_bit (m_imports, SSA_NAME_VERSION (arg)))
455 worklist.safe_push (arg);
461 // Precompute the ranges for IMPORTS along PATH.
463 // IMPORTS are the set of SSA names, any of which could potentially
464 // change the value of the final conditional in PATH.
466 void
467 path_range_query::precompute_ranges (const vec<basic_block> &path,
468 const bitmap_head *imports)
470 set_path (path);
471 bitmap_copy (m_imports, imports);
472 m_undefined_path = false;
474 if (m_resolve)
476 add_copies_to_imports ();
477 m_oracle->reset_path ();
478 precompute_relations (path);
481 if (DEBUG_SOLVER)
483 fprintf (dump_file, "\npath_range_query: precompute_ranges for path: ");
484 for (unsigned i = path.length (); i > 0; --i)
486 basic_block bb = path[i - 1];
487 fprintf (dump_file, "BB %d", bb->index);
488 if (i > 1)
489 fprintf (dump_file, ", ");
491 fprintf (dump_file, "\n");
494 while (1)
496 basic_block bb = curr_bb ();
498 if (m_resolve)
500 gori_compute &gori = m_ranger.gori ();
501 tree name;
503 // Exported booleans along the path, may help conditionals.
504 // Add them as interesting imports.
505 FOR_EACH_GORI_EXPORT_NAME (gori, bb, name)
506 if (TREE_CODE (TREE_TYPE (name)) == BOOLEAN_TYPE)
507 bitmap_set_bit (m_imports, SSA_NAME_VERSION (name));
510 precompute_ranges_in_block (bb);
511 adjust_for_non_null_uses (bb);
513 if (at_exit ())
514 break;
516 move_next ();
519 if (DEBUG_SOLVER)
520 dump (dump_file);
523 // A folding aid used to register and query relations along a path.
524 // When queried, it returns relations as they would appear on exit to
525 // the path.
527 // Relations are registered on entry so the path_oracle knows which
528 // block to query the root oracle at when a relation lies outside the
529 // path. However, when queried we return the relation on exit to the
530 // path, since the root_oracle ignores the registered.
532 class jt_fur_source : public fur_depend
534 public:
535 jt_fur_source (gimple *s, path_range_query *, gori_compute *,
536 const vec<basic_block> &);
537 relation_kind query_relation (tree op1, tree op2) override;
538 void register_relation (gimple *, relation_kind, tree op1, tree op2) override;
539 void register_relation (edge, relation_kind, tree op1, tree op2) override;
540 private:
541 basic_block m_entry;
544 jt_fur_source::jt_fur_source (gimple *s,
545 path_range_query *query,
546 gori_compute *gori,
547 const vec<basic_block> &path)
548 : fur_depend (s, gori, query)
550 gcc_checking_assert (!path.is_empty ());
552 m_entry = path[path.length () - 1];
554 if (dom_info_available_p (CDI_DOMINATORS))
555 m_oracle = query->oracle ();
556 else
557 m_oracle = NULL;
560 // Ignore statement and register relation on entry to path.
562 void
563 jt_fur_source::register_relation (gimple *, relation_kind k, tree op1, tree op2)
565 if (m_oracle)
566 m_oracle->register_relation (m_entry, k, op1, op2);
569 // Ignore edge and register relation on entry to path.
571 void
572 jt_fur_source::register_relation (edge, relation_kind k, tree op1, tree op2)
574 if (m_oracle)
575 m_oracle->register_relation (m_entry, k, op1, op2);
578 relation_kind
579 jt_fur_source::query_relation (tree op1, tree op2)
581 if (!m_oracle)
582 return VREL_NONE;
584 if (TREE_CODE (op1) != SSA_NAME || TREE_CODE (op2) != SSA_NAME)
585 return VREL_NONE;
587 return m_oracle->query_relation (m_entry, op1, op2);
590 // Return the range of STMT at the end of the path being analyzed.
592 bool
593 path_range_query::range_of_stmt (irange &r, gimple *stmt, tree)
595 tree type = gimple_range_type (stmt);
597 if (!irange::supports_type_p (type))
598 return false;
600 // If resolving unknowns, fold the statement as it would have
601 // appeared at the end of the path.
602 if (m_resolve)
604 fold_using_range f;
605 jt_fur_source src (stmt, this, &m_ranger.gori (), *m_path);
606 if (!f.fold_stmt (r, stmt, src))
607 r.set_varying (type);
609 // Otherwise, use the global ranger to fold it as it would appear in
610 // the original IL. This is more conservative, but faster.
611 else if (!fold_range (r, stmt, this))
612 r.set_varying (type);
614 return true;
617 // Precompute relations on a path. This involves two parts: relations
618 // along the conditionals joining a path, and relations determined by
619 // examining PHIs.
621 void
622 path_range_query::precompute_relations (const vec<basic_block> &path)
624 if (!dom_info_available_p (CDI_DOMINATORS))
625 return;
627 jt_fur_source src (NULL, this, &m_ranger.gori (), path);
628 basic_block prev = NULL;
629 for (unsigned i = path.length (); i > 0; --i)
631 basic_block bb = path[i - 1];
632 gimple *stmt = last_stmt (bb);
634 precompute_phi_relations (bb, prev);
636 // Compute relations in outgoing edges along the path. Skip the
637 // final conditional which we don't know yet.
638 if (i > 1
639 && stmt
640 && gimple_code (stmt) == GIMPLE_COND
641 && irange::supports_type_p (TREE_TYPE (gimple_cond_lhs (stmt))))
643 basic_block next = path[i - 2];
644 int_range<2> r;
645 gcond *cond = as_a<gcond *> (stmt);
647 if (EDGE_SUCC (bb, 0)->dest == next)
648 gcond_edge_range (r, EDGE_SUCC (bb, 0));
649 else if (EDGE_SUCC (bb, 1)->dest == next)
650 gcond_edge_range (r, EDGE_SUCC (bb, 1));
651 else
652 gcc_unreachable ();
654 edge e0 = EDGE_SUCC (bb, 0);
655 edge e1 = EDGE_SUCC (bb, 1);
656 src.register_outgoing_edges (cond, r, e0, e1);
658 prev = bb;
662 // Precompute relations for each PHI in BB. For example:
664 // x_5 = PHI<y_9(5),...>
666 // If the path flows through BB5, we can register that x_5 == y_9.
668 void
669 path_range_query::precompute_phi_relations (basic_block bb, basic_block prev)
671 if (prev == NULL)
672 return;
674 basic_block entry = entry_bb ();
675 edge e_in = find_edge (prev, bb);
676 gcc_checking_assert (e_in);
678 for (gphi_iterator iter = gsi_start_phis (bb); !gsi_end_p (iter);
679 gsi_next (&iter))
681 gphi *phi = iter.phi ();
682 tree result = gimple_phi_result (phi);
683 unsigned nargs = gimple_phi_num_args (phi);
685 for (size_t i = 0; i < nargs; ++i)
686 if (e_in == gimple_phi_arg_edge (phi, i))
688 tree arg = gimple_phi_arg_def (phi, i);
690 if (gimple_range_ssa_p (arg))
691 m_oracle->register_relation (entry, EQ_EXPR, arg, result);
693 break;