c++: new-expression is potentially constant in C++20
[official-gcc.git] / gcc / gimple-ssa-evrp-analyze.cc
blob16e5a75b1db4ca28be524f7c27a47c6fa4f4021f
1 /* Support routines for Value Range Propagation (VRP).
2 Copyright (C) 2005-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "tree.h"
25 #include "gimple.h"
26 #include "tree-pass.h"
27 #include "ssa.h"
28 #include "gimple-pretty-print.h"
29 #include "cfganal.h"
30 #include "gimple-iterator.h"
31 #include "gimple-fold.h"
32 #include "tree-eh.h"
33 #include "tree-cfg.h"
34 #include "tree-ssa-loop-manip.h"
35 #include "tree-ssa-loop.h"
36 #include "cfgloop.h"
37 #include "tree-scalar-evolution.h"
38 #include "tree-ssa-propagate.h"
39 #include "alloc-pool.h"
40 #include "domwalk.h"
41 #include "tree-cfgcleanup.h"
42 #include "vr-values.h"
43 #include "gimple-ssa-evrp-analyze.h"
45 evrp_range_analyzer::evrp_range_analyzer (bool update_global_ranges)
46 : stack (10), m_update_global_ranges (update_global_ranges)
48 edge e;
49 edge_iterator ei;
50 basic_block bb;
51 FOR_EACH_BB_FN (bb, cfun)
53 bb->flags &= ~BB_VISITED;
54 FOR_EACH_EDGE (e, ei, bb->preds)
55 e->flags |= EDGE_EXECUTABLE;
59 /* Push an unwinding marker onto the unwinding stack. */
61 void
62 evrp_range_analyzer::push_marker ()
64 stack.safe_push (std::make_pair (NULL_TREE, (value_range_equiv *)NULL));
67 /* Analyze ranges as we enter basic block BB. */
69 void
70 evrp_range_analyzer::enter (basic_block bb)
72 if (!optimize)
73 return;
74 push_marker ();
75 record_ranges_from_incoming_edge (bb);
76 record_ranges_from_phis (bb);
77 bb->flags |= BB_VISITED;
80 /* Find new range for NAME such that (OP CODE LIMIT) is true. */
81 value_range_equiv *
82 evrp_range_analyzer::try_find_new_range (tree name,
83 tree op, tree_code code, tree limit)
85 value_range_equiv vr;
86 const value_range_equiv *old_vr = get_value_range (name);
88 /* Discover VR when condition is true. */
89 extract_range_for_var_from_comparison_expr (name, code, op, limit, &vr);
90 /* If we found any usable VR, set the VR to ssa_name and create a
91 PUSH old value in the stack with the old VR. */
92 if (!vr.undefined_p () && !vr.varying_p ())
94 if (old_vr->equal_p (vr, /*ignore_equivs=*/true))
95 return NULL;
96 value_range_equiv *new_vr = allocate_value_range_equiv ();
97 new_vr->move (&vr);
98 return new_vr;
100 return NULL;
103 /* For LHS record VR in the SSA info. */
104 void
105 evrp_range_analyzer::set_ssa_range_info (tree lhs, value_range_equiv *vr)
107 gcc_assert (m_update_global_ranges);
109 /* Set the SSA with the value range. */
110 if (INTEGRAL_TYPE_P (TREE_TYPE (lhs)))
112 if (!vr->varying_p () && vr->constant_p ())
113 set_range_info (lhs, *vr);
115 else if (POINTER_TYPE_P (TREE_TYPE (lhs))
116 && range_includes_zero_p (vr) == 0)
117 set_ptr_nonnull (lhs);
120 /* Return true if all uses of NAME are dominated by STMT or feed STMT
121 via a chain of single immediate uses. */
123 static bool
124 all_uses_feed_or_dominated_by_stmt (tree name, gimple *stmt)
126 use_operand_p use_p, use2_p;
127 imm_use_iterator iter;
128 basic_block stmt_bb = gimple_bb (stmt);
130 FOR_EACH_IMM_USE_FAST (use_p, iter, name)
132 gimple *use_stmt = USE_STMT (use_p), *use_stmt2;
133 if (use_stmt == stmt
134 || is_gimple_debug (use_stmt)
135 || (gimple_bb (use_stmt) != stmt_bb
136 && dominated_by_p (CDI_DOMINATORS,
137 gimple_bb (use_stmt), stmt_bb)))
138 continue;
139 while (use_stmt != stmt
140 && is_gimple_assign (use_stmt)
141 && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
142 && single_imm_use (gimple_assign_lhs (use_stmt),
143 &use2_p, &use_stmt2))
144 use_stmt = use_stmt2;
145 if (use_stmt != stmt)
146 return false;
148 return true;
151 void
152 evrp_range_analyzer::record_ranges_from_incoming_edge (basic_block bb)
154 edge pred_e = single_pred_edge_ignoring_loop_edges (bb, false);
155 if (pred_e)
157 gimple *stmt = last_stmt (pred_e->src);
158 tree op0 = NULL_TREE;
160 if (stmt
161 && gimple_code (stmt) == GIMPLE_COND
162 && (op0 = gimple_cond_lhs (stmt))
163 && TREE_CODE (op0) == SSA_NAME
164 && (INTEGRAL_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))
165 || POINTER_TYPE_P (TREE_TYPE (gimple_cond_lhs (stmt)))))
167 if (dump_file && (dump_flags & TDF_DETAILS))
169 fprintf (dump_file, "Visiting controlling predicate ");
170 print_gimple_stmt (dump_file, stmt, 0);
172 /* Entering a new scope. Try to see if we can find a VR
173 here. */
174 tree op1 = gimple_cond_rhs (stmt);
175 if (TREE_OVERFLOW_P (op1))
176 op1 = drop_tree_overflow (op1);
177 tree_code code = gimple_cond_code (stmt);
179 auto_vec<assert_info, 8> asserts;
180 register_edge_assert_for (op0, pred_e, code, op0, op1, asserts);
181 if (TREE_CODE (op1) == SSA_NAME)
182 register_edge_assert_for (op1, pred_e, code, op0, op1, asserts);
184 auto_vec<std::pair<tree, value_range_equiv *>, 8> vrs;
185 for (unsigned i = 0; i < asserts.length (); ++i)
187 value_range_equiv *vr
188 = try_find_new_range (asserts[i].name,
189 asserts[i].expr,
190 asserts[i].comp_code,
191 asserts[i].val);
192 if (vr)
193 vrs.safe_push (std::make_pair (asserts[i].name, vr));
196 /* If pred_e is really a fallthru we can record value ranges
197 in SSA names as well. */
198 bool is_fallthru = assert_unreachable_fallthru_edge_p (pred_e);
200 /* Push updated ranges only after finding all of them to avoid
201 ordering issues that can lead to worse ranges. */
202 for (unsigned i = 0; i < vrs.length (); ++i)
204 /* But make sure we do not weaken ranges like when
205 getting first [64, +INF] and then ~[0, 0] from
206 conditions like (s & 0x3cc0) == 0). */
207 const value_range_equiv *old_vr
208 = get_value_range (vrs[i].first);
209 value_range tem (*old_vr);
210 tem.legacy_verbose_intersect (vrs[i].second);
211 if (tem.equal_p (*old_vr))
213 free_value_range (vrs[i].second);
214 continue;
216 push_value_range (vrs[i].first, vrs[i].second);
217 if (is_fallthru
218 && m_update_global_ranges
219 && all_uses_feed_or_dominated_by_stmt (vrs[i].first, stmt)
220 /* The condition must post-dominate the definition point. */
221 && (SSA_NAME_IS_DEFAULT_DEF (vrs[i].first)
222 || (gimple_bb (SSA_NAME_DEF_STMT (vrs[i].first))
223 == pred_e->src)))
225 set_ssa_range_info (vrs[i].first, vrs[i].second);
226 maybe_set_nonzero_bits (pred_e, vrs[i].first);
233 void
234 evrp_range_analyzer::record_ranges_from_phis (basic_block bb)
236 /* Visit PHI stmts and discover any new VRs possible. */
237 bool has_unvisited_preds = false;
238 edge_iterator ei;
239 edge e;
240 FOR_EACH_EDGE (e, ei, bb->preds)
241 if (e->flags & EDGE_EXECUTABLE
242 && !(e->src->flags & BB_VISITED))
244 has_unvisited_preds = true;
245 break;
248 for (gphi_iterator gpi = gsi_start_phis (bb);
249 !gsi_end_p (gpi); gsi_next (&gpi))
251 gphi *phi = gpi.phi ();
252 tree lhs = PHI_RESULT (phi);
253 if (virtual_operand_p (lhs))
254 continue;
256 /* Skips floats and other things we can't represent in a
257 range. */
258 if (!value_range::supports_type_p (TREE_TYPE (lhs)))
259 continue;
261 value_range_equiv vr_result;
262 bool interesting = stmt_interesting_for_vrp (phi);
263 if (!has_unvisited_preds && interesting)
264 extract_range_from_phi_node (phi, &vr_result);
265 else
267 vr_result.set_varying (TREE_TYPE (lhs));
268 /* When we have an unvisited executable predecessor we can't
269 use PHI arg ranges which may be still UNDEFINED but have
270 to use VARYING for them. But we can still resort to
271 SCEV for loop header PHIs. */
272 class loop *l;
273 if (scev_initialized_p ()
274 && interesting
275 && (l = loop_containing_stmt (phi))
276 && l->header == gimple_bb (phi))
277 adjust_range_with_scev (&vr_result, l, phi, lhs);
279 update_value_range (lhs, &vr_result);
281 /* Set the SSA with the value range. */
282 if (m_update_global_ranges)
283 set_ssa_range_info (lhs, &vr_result);
287 /* Record ranges from STMT into our VR_VALUES class. If TEMPORARY is
288 true, then this is a temporary equivalence and should be recorded
289 into the unwind table. Othewise record the equivalence into the
290 global table. */
292 void
293 evrp_range_analyzer::record_ranges_from_stmt (gimple *stmt, bool temporary)
295 tree output = NULL_TREE;
297 if (!optimize)
298 return;
300 if (dyn_cast <gcond *> (stmt))
302 else if (stmt_interesting_for_vrp (stmt))
304 edge taken_edge;
305 value_range_equiv vr;
306 extract_range_from_stmt (stmt, &taken_edge, &output, &vr);
307 if (output)
309 /* Set the SSA with the value range. There are two cases to
310 consider. First (the the most common) is we are processing
311 STMT in a context where its resulting range globally holds
312 and thus it can be reflected into the global ranges and need
313 not be unwound as we leave scope.
315 The second case occurs if we are processing a statement in
316 a context where the resulting range must not be reflected
317 into the global tables and must be unwound as we leave
318 the current context. This happens in jump threading for
319 example. */
320 if (!temporary)
322 /* Case one. We can just update the underlying range
323 information as well as the global information. */
324 update_value_range (output, &vr);
325 if (m_update_global_ranges)
326 set_ssa_range_info (output, &vr);
328 else
330 /* We're going to need to unwind this range. We cannot
331 use VR as that's a stack object. We have to allocate
332 a new range and push the old range onto the stack. We
333 also have to be very careful about sharing the underlying
334 bitmaps. Ugh. */
335 value_range_equiv *new_vr = allocate_value_range_equiv ();
336 new_vr->set (vr.min (), vr.max (), NULL, vr.kind ());
337 vr.equiv_clear ();
338 push_value_range (output, new_vr);
341 else
342 set_defs_to_varying (stmt);
344 else
345 set_defs_to_varying (stmt);
347 /* See if we can derive a range for any of STMT's operands. */
348 tree op;
349 ssa_op_iter i;
350 FOR_EACH_SSA_TREE_OPERAND (op, stmt, i, SSA_OP_USE)
352 tree value;
353 enum tree_code comp_code;
355 /* If OP is used in such a way that we can infer a value
356 range for it, and we don't find a previous assertion for
357 it, create a new assertion location node for OP. */
358 if (infer_value_range (stmt, op, &comp_code, &value))
360 /* If we are able to infer a nonzero value range for OP,
361 then walk backwards through the use-def chain to see if OP
362 was set via a typecast.
363 If so, then we can also infer a nonzero value range
364 for the operand of the NOP_EXPR. */
365 if (comp_code == NE_EXPR && integer_zerop (value))
367 tree t = op;
368 gimple *def_stmt = SSA_NAME_DEF_STMT (t);
369 while (is_gimple_assign (def_stmt)
370 && CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))
371 && TREE_CODE
372 (gimple_assign_rhs1 (def_stmt)) == SSA_NAME
373 && POINTER_TYPE_P
374 (TREE_TYPE (gimple_assign_rhs1 (def_stmt))))
376 t = gimple_assign_rhs1 (def_stmt);
377 def_stmt = SSA_NAME_DEF_STMT (t);
379 /* Add VR when (T COMP_CODE value) condition is
380 true. */
381 value_range_equiv *op_range
382 = try_find_new_range (t, t, comp_code, value);
383 if (op_range)
384 push_value_range (t, op_range);
387 /* Add VR when (OP COMP_CODE value) condition is true. */
388 value_range_equiv *op_range = try_find_new_range (op, op,
389 comp_code, value);
390 if (op_range)
391 push_value_range (op, op_range);
396 /* Unwind recorded ranges to their most recent state. */
398 void
399 evrp_range_analyzer::pop_to_marker (void)
401 gcc_checking_assert (!stack.is_empty ());
402 while (stack.last ().first != NULL_TREE)
403 pop_value_range ();
404 stack.pop ();
407 /* Restore/pop VRs valid only for BB when we leave BB. */
409 void
410 evrp_range_analyzer::leave (basic_block bb ATTRIBUTE_UNUSED)
412 if (!optimize)
413 return;
414 pop_to_marker ();
418 /* Push the Value Range of VAR to the stack and update it with new VR. */
420 void
421 evrp_range_analyzer::push_value_range (tree var, value_range_equiv *vr)
423 if (dump_file && (dump_flags & TDF_DETAILS))
425 fprintf (dump_file, "pushing new range for ");
426 print_generic_expr (dump_file, var);
427 fprintf (dump_file, ": ");
428 dump_value_range (dump_file, vr);
429 fprintf (dump_file, "\n");
431 value_range_equiv *old_vr = swap_vr_value (var, vr);
432 stack.safe_push (std::make_pair (var, old_vr));
435 /* Pop a Value Range from the vrp_stack. */
437 void
438 evrp_range_analyzer::pop_value_range ()
440 std::pair<tree, value_range_equiv *> e = stack.pop ();
441 tree var = e.first;
442 value_range_equiv *vr = e.second;
443 if (dump_file && (dump_flags & TDF_DETAILS))
445 fprintf (dump_file, "popping range for ");
446 print_generic_expr (dump_file, var);
447 fprintf (dump_file, ", restoring ");
448 dump_value_range (dump_file, vr);
449 fprintf (dump_file, "\n");
451 /* We saved off a lattice entry, now give it back and release
452 the one we popped. */
453 value_range_equiv *popped_vr = swap_vr_value (var, vr);
454 if (popped_vr)
455 free_value_range (popped_vr);