Daily bump.
[official-gcc.git] / gcc / gimple-ssa-evrp-analyze.c
blobd78b6f8423cc01db234f68296c046f41e1e2184a
1 /* Support routines for Value Range Propagation (VRP).
2 Copyright (C) 2005-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "cfganal.h"
30 #include "gimple-fold.h"
31 #include "tree-eh.h"
32 #include "gimple-iterator.h"
33 #include "tree-cfg.h"
34 #include "tree-ssa-loop-manip.h"
35 #include "tree-ssa-loop.h"
36 #include "cfgloop.h"
37 #include "tree-scalar-evolution.h"
38 #include "tree-ssa-propagate.h"
39 #include "alloc-pool.h"
40 #include "domwalk.h"
41 #include "tree-cfgcleanup.h"
42 #include "vr-values.h"
43 #include "gimple-ssa-evrp-analyze.h"
45 evrp_range_analyzer::evrp_range_analyzer (bool update_global_ranges)
46 : stack (10), m_update_global_ranges (update_global_ranges)
48 edge e;
49 edge_iterator ei;
50 basic_block bb;
51 FOR_EACH_BB_FN (bb, cfun)
53 bb->flags &= ~BB_VISITED;
54 FOR_EACH_EDGE (e, ei, bb->preds)
55 e->flags |= EDGE_EXECUTABLE;
59 /* Push an unwinding marker onto the unwinding stack. */
61 void
62 evrp_range_analyzer::push_marker ()
64 stack.safe_push (std::make_pair (NULL_TREE, (value_range_equiv *)NULL));
67 /* Analyze ranges as we enter basic block BB. */
69 void
70 evrp_range_analyzer::enter (basic_block bb)
72 if (!optimize)
73 return;
74 push_marker ();
75 record_ranges_from_incoming_edge (bb);
76 record_ranges_from_phis (bb);
77 bb->flags |= BB_VISITED;
80 /* Find new range for NAME such that (OP CODE LIMIT) is true. */
81 value_range_equiv *
82 evrp_range_analyzer::try_find_new_range (tree name,
83 tree op, tree_code code, tree limit)
85 value_range_equiv vr;
86 const value_range_equiv *old_vr = get_value_range (name);
88 /* Discover VR when condition is true. */
89 extract_range_for_var_from_comparison_expr (name, code, op, limit, &vr);
90 /* If we found any usable VR, set the VR to ssa_name and create a
91 PUSH old value in the stack with the old VR. */
92 if (!vr.undefined_p () && !vr.varying_p ())
94 if (old_vr->equal_p (vr, /*ignore_equivs=*/true))
95 return NULL;
96 value_range_equiv *new_vr = allocate_value_range_equiv ();
97 new_vr->move (&vr);
98 return new_vr;
100 return NULL;
103 /* For LHS record VR in the SSA info. */
104 void
105 evrp_range_analyzer::set_ssa_range_info (tree lhs, value_range_equiv *vr)
107 gcc_assert (m_update_global_ranges);
109 /* Set the SSA with the value range. */
110 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
112 if (!vr->varying_p () && vr->constant_p ())
113 set_range_info (lhs, vr->kind (),
114 wi::to_wide (vr->min ()),
115 wi::to_wide (vr->max ()));
117 else if (POINTER_TYPE_P (TREE_TYPE (lhs))
118 && range_includes_zero_p (vr) == 0)
119 set_ptr_nonnull (lhs);
122 /* Return true if all uses of NAME are dominated by STMT or feed STMT
123 via a chain of single immediate uses. */
125 static bool
126 all_uses_feed_or_dominated_by_stmt (tree name, gimple *stmt)
128 use_operand_p use_p, use2_p;
129 imm_use_iterator iter;
130 basic_block stmt_bb = gimple_bb (stmt);
132 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
134 gimple *use_stmt = USE_STMT (use_p), *use_stmt2;
135 if (use_stmt == stmt
136 || is_gimple_debug (use_stmt)
137 || (gimple_bb (use_stmt) != stmt_bb
138 && dominated_by_p (CDI_DOMINATORS,
139 gimple_bb (use_stmt), stmt_bb)))
140 continue;
141 while (use_stmt != stmt
142 && is_gimple_assign (use_stmt)
143 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
144 && single_imm_use (gimple_assign_lhs (use_stmt),
145 &use2_p, &use_stmt2))
146 use_stmt = use_stmt2;
147 if (use_stmt != stmt)
148 return false;
150 return true;
153 void
154 evrp_range_analyzer::record_ranges_from_incoming_edge (basic_block bb)
156 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
157 if (pred_e)
159 gimple *stmt = last_stmt (pred_e->src);
160 tree op0 = NULL_TREE;
162 if (stmt
163 && gimple_code (stmt) == GIMPLE_COND
164 && (op0 = gimple_cond_lhs (stmt))
165 && TREE_CODE (op0) == SSA_NAME
166 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
167 || POINTER_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))))
169 if (dump_file && (dump_flags & TDF_DETAILS))
171 fprintf (dump_file, "Visiting controlling predicate ");
172 print_gimple_stmt (dump_file, stmt, 0);
174 /* Entering a new scope. Try to see if we can find a VR
175 here. */
176 tree op1 = gimple_cond_rhs (stmt);
177 if (TREE_OVERFLOW_P (op1))
178 op1 = drop_tree_overflow (op1);
179 tree_code code = gimple_cond_code (stmt);
181 auto_vec<assert_info, 8> asserts;
182 register_edge_assert_for (op0, pred_e, code, op0, op1, asserts);
183 if (TREE_CODE (op1) == SSA_NAME)
184 register_edge_assert_for (op1, pred_e, code, op0, op1, asserts);
186 auto_vec<std::pair<tree, value_range_equiv *>, 8> vrs;
187 for (unsigned i = 0; i < asserts.length (); ++i)
189 value_range_equiv *vr
190 = try_find_new_range (asserts[i].name,
191 asserts[i].expr,
192 asserts[i].comp_code,
193 asserts[i].val);
194 if (vr)
195 vrs.safe_push (std::make_pair (asserts[i].name, vr));
198 /* If pred_e is really a fallthru we can record value ranges
199 in SSA names as well. */
200 bool is_fallthru = assert_unreachable_fallthru_edge_p (pred_e);
202 /* Push updated ranges only after finding all of them to avoid
203 ordering issues that can lead to worse ranges. */
204 for (unsigned i = 0; i < vrs.length (); ++i)
206 /* But make sure we do not weaken ranges like when
207 getting first [64, +INF] and then ~[0, 0] from
208 conditions like (s & 0x3cc0) == 0). */
209 const value_range_equiv *old_vr
210 = get_value_range (vrs[i].first);
211 value_range tem (*old_vr);
212 tem.intersect (vrs[i].second);
213 if (tem.equal_p (*old_vr))
215 free_value_range (vrs[i].second);
216 continue;
218 push_value_range (vrs[i].first, vrs[i].second);
219 if (is_fallthru
220 && m_update_global_ranges
221 && all_uses_feed_or_dominated_by_stmt (vrs[i].first, stmt)
222 /* The condition must post-dominate the definition point. */
223 && (SSA_NAME_IS_DEFAULT_DEF (vrs[i].first)
224 || (gimple_bb (SSA_NAME_DEF_STMT (vrs[i].first))
225 == pred_e->src)))
227 set_ssa_range_info (vrs[i].first, vrs[i].second);
228 maybe_set_nonzero_bits (pred_e, vrs[i].first);
235 void
236 evrp_range_analyzer::record_ranges_from_phis (basic_block bb)
238 /* Visit PHI stmts and discover any new VRs possible. */
239 bool has_unvisited_preds = false;
240 edge_iterator ei;
241 edge e;
242 FOR_EACH_EDGE (e, ei, bb->preds)
243 if (e->flags & EDGE_EXECUTABLE
244 && !(e->src->flags & BB_VISITED))
246 has_unvisited_preds = true;
247 break;
250 for (gphi_iterator gpi = gsi_start_phis (bb);
251 !gsi_end_p (gpi); gsi_next (&gpi))
253 gphi *phi = gpi.phi ();
254 tree lhs = PHI_RESULT (phi);
255 if (virtual_operand_p (lhs))
256 continue;
258 /* Skips floats and other things we can't represent in a
259 range. */
260 if (!value_range::supports_type_p (TREE_TYPE (lhs)))
261 continue;
263 value_range_equiv vr_result;
264 bool interesting = stmt_interesting_for_vrp (phi);
265 if (!has_unvisited_preds && interesting)
266 extract_range_from_phi_node (phi, &vr_result);
267 else
269 vr_result.set_varying (TREE_TYPE (lhs));
270 /* When we have an unvisited executable predecessor we can't
271 use PHI arg ranges which may be still UNDEFINED but have
272 to use VARYING for them. But we can still resort to
273 SCEV for loop header PHIs. */
274 class loop *l;
275 if (scev_initialized_p ()
276 && interesting
277 && (l = loop_containing_stmt (phi))
278 && l->header == gimple_bb (phi))
279 adjust_range_with_scev (&vr_result, l, phi, lhs);
281 update_value_range (lhs, &vr_result);
283 /* Set the SSA with the value range. */
284 if (m_update_global_ranges)
285 set_ssa_range_info (lhs, &vr_result);
289 /* Record ranges from STMT into our VR_VALUES class. If TEMPORARY is
290 true, then this is a temporary equivalence and should be recorded
291 into the unwind table. Othewise record the equivalence into the
292 global table. */
294 void
295 evrp_range_analyzer::record_ranges_from_stmt (gimple *stmt, bool temporary)
297 tree output = NULL_TREE;
299 if (!optimize)
300 return;
302 if (dyn_cast <gcond *> (stmt))
304 else if (stmt_interesting_for_vrp (stmt))
306 edge taken_edge;
307 value_range_equiv vr;
308 extract_range_from_stmt (stmt, &taken_edge, &output, &vr);
309 if (output)
311 /* Set the SSA with the value range. There are two cases to
312 consider. First (the the most common) is we are processing
313 STMT in a context where its resulting range globally holds
314 and thus it can be reflected into the global ranges and need
315 not be unwound as we leave scope.
317 The second case occurs if we are processing a statement in
318 a context where the resulting range must not be reflected
319 into the global tables and must be unwound as we leave
320 the current context. This happens in jump threading for
321 example. */
322 if (!temporary)
324 /* Case one. We can just update the underlying range
325 information as well as the global information. */
326 update_value_range (output, &vr);
327 if (m_update_global_ranges)
328 set_ssa_range_info (output, &vr);
330 else
332 /* We're going to need to unwind this range. We cannot
333 use VR as that's a stack object. We have to allocate
334 a new range and push the old range onto the stack. We
335 also have to be very careful about sharing the underlying
336 bitmaps. Ugh. */
337 value_range_equiv *new_vr = allocate_value_range_equiv ();
338 new_vr->set (vr.min (), vr.max (), NULL, vr.kind ());
339 vr.equiv_clear ();
340 push_value_range (output, new_vr);
343 else
344 set_defs_to_varying (stmt);
346 else
347 set_defs_to_varying (stmt);
349 /* See if we can derive a range for any of STMT's operands. */
350 tree op;
351 ssa_op_iter i;
352 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
354 tree value;
355 enum tree_code comp_code;
357 /* If OP is used in such a way that we can infer a value
358 range for it, and we don't find a previous assertion for
359 it, create a new assertion location node for OP. */
360 if (infer_value_range (stmt, op, &comp_code, &value))
362 /* If we are able to infer a nonzero value range for OP,
363 then walk backwards through the use-def chain to see if OP
364 was set via a typecast.
365 If so, then we can also infer a nonzero value range
366 for the operand of the NOP_EXPR. */
367 if (comp_code == NE_EXPR && integer_zerop (value))
369 tree t = op;
370 gimple *def_stmt = SSA_NAME_DEF_STMT (t);
371 while (is_gimple_assign (def_stmt)
372 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
373 && TREE_CODE
374 (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
375 && POINTER_TYPE_P
376 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
378 t = gimple_assign_rhs1 (def_stmt);
379 def_stmt = SSA_NAME_DEF_STMT (t);
381 /* Add VR when (T COMP_CODE value) condition is
382 true. */
383 value_range_equiv *op_range
384 = try_find_new_range (t, t, comp_code, value);
385 if (op_range)
386 push_value_range (t, op_range);
389 /* Add VR when (OP COMP_CODE value) condition is true. */
390 value_range_equiv *op_range = try_find_new_range (op, op,
391 comp_code, value);
392 if (op_range)
393 push_value_range (op, op_range);
398 /* Unwind recorded ranges to their most recent state. */
400 void
401 evrp_range_analyzer::pop_to_marker (void)
403 gcc_checking_assert (!stack.is_empty ());
404 while (stack.last ().first != NULL_TREE)
405 pop_value_range ();
406 stack.pop ();
409 /* Restore/pop VRs valid only for BB when we leave BB. */
411 void
412 evrp_range_analyzer::leave (basic_block bb ATTRIBUTE_UNUSED)
414 if (!optimize)
415 return;
416 pop_to_marker ();
420 /* Push the Value Range of VAR to the stack and update it with new VR. */
422 void
423 evrp_range_analyzer::push_value_range (tree var, value_range_equiv *vr)
425 if (dump_file && (dump_flags & TDF_DETAILS))
427 fprintf (dump_file, "pushing new range for ");
428 print_generic_expr (dump_file, var);
429 fprintf (dump_file, ": ");
430 dump_value_range (dump_file, vr);
431 fprintf (dump_file, "\n");
433 value_range_equiv *old_vr = swap_vr_value (var, vr);
434 stack.safe_push (std::make_pair (var, old_vr));
437 /* Pop a Value Range from the vrp_stack. */
439 void
440 evrp_range_analyzer::pop_value_range ()
442 std::pair<tree, value_range_equiv *> e = stack.pop ();
443 tree var = e.first;
444 value_range_equiv *vr = e.second;
445 if (dump_file && (dump_flags & TDF_DETAILS))
447 fprintf (dump_file, "popping range for ");
448 print_generic_expr (dump_file, var);
449 fprintf (dump_file, ", restoring ");
450 dump_value_range (dump_file, vr);
451 fprintf (dump_file, "\n");
453 /* We saved off a lattice entry, now give it back and release
454 the one we popped. */
455 value_range_equiv *popped_vr = swap_vr_value (var, vr);
456 if (popped_vr)
457 free_value_range (popped_vr);