1 /* Support routines for Value Range Propagation (VRP).
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "tree-pass.h"
28 #include "gimple-pretty-print.h"
30 #include "gimple-fold.h"
32 #include "gimple-iterator.h"
34 #include "tree-ssa-loop-manip.h"
35 #include "tree-ssa-loop.h"
37 #include "tree-scalar-evolution.h"
38 #include "tree-ssa-propagate.h"
39 #include "alloc-pool.h"
41 #include "tree-cfgcleanup.h"
42 #include "vr-values.h"
43 #include "gimple-ssa-evrp-analyze.h"
45 class evrp_folder
: public substitute_and_fold_engine
48 tree
get_value (tree
) FINAL OVERRIDE
;
49 evrp_folder (class vr_values
*vr_values_
) : vr_values (vr_values_
) { }
50 bool simplify_stmt_using_ranges (gimple_stmt_iterator
*gsi
)
51 { return vr_values
->simplify_stmt_using_ranges (gsi
); }
52 class vr_values
*vr_values
;
55 DISABLE_COPY_AND_ASSIGN (evrp_folder
);
59 evrp_folder::get_value (tree op
)
61 return vr_values
->op_with_constant_singleton_value_range (op
);
64 /* evrp_dom_walker visits the basic blocks in the dominance order and set
65 the Value Ranges (VR) for SSA_NAMEs in the scope. Use this VR to
68 class evrp_dom_walker
: public dom_walker
72 : dom_walker (CDI_DOMINATORS
),
73 evrp_range_analyzer (true),
74 evrp_folder (evrp_range_analyzer
.get_vr_values ())
76 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
80 BITMAP_FREE (need_eh_cleanup
);
82 virtual edge
before_dom_children (basic_block
);
83 virtual void after_dom_children (basic_block
);
87 DISABLE_COPY_AND_ASSIGN (evrp_dom_walker
);
88 bitmap need_eh_cleanup
;
89 auto_vec
<gimple
*> stmts_to_fixup
;
90 auto_vec
<gimple
*> stmts_to_remove
;
92 class evrp_range_analyzer evrp_range_analyzer
;
93 class evrp_folder evrp_folder
;
97 evrp_dom_walker::before_dom_children (basic_block bb
)
99 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
100 fprintf (dump_file
, "Visiting BB%d\n", bb
->index
);
102 evrp_range_analyzer
.enter (bb
);
104 for (gphi_iterator gpi
= gsi_start_phis (bb
);
105 !gsi_end_p (gpi
); gsi_next (&gpi
))
107 gphi
*phi
= gpi
.phi ();
108 tree lhs
= PHI_RESULT (phi
);
109 if (virtual_operand_p (lhs
))
112 value_range
*vr
= evrp_range_analyzer
.get_value_range (lhs
);
113 /* Mark PHIs whose lhs we fully propagate for removal. */
115 if (vr
->singleton_p (&val
) && may_propagate_copy (lhs
, val
))
117 stmts_to_remove
.safe_push (phi
);
122 edge taken_edge
= NULL
;
124 /* Visit all other stmts and discover any new VRs possible. */
125 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
126 !gsi_end_p (gsi
); gsi_next (&gsi
))
128 gimple
*stmt
= gsi_stmt (gsi
);
129 tree output
= NULL_TREE
;
130 gimple
*old_stmt
= stmt
;
131 bool was_noreturn
= (is_gimple_call (stmt
)
132 && gimple_call_noreturn_p (stmt
));
134 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
136 fprintf (dump_file
, "Visiting stmt ");
137 print_gimple_stmt (dump_file
, stmt
, 0);
140 evrp_range_analyzer
.record_ranges_from_stmt (stmt
, false);
142 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
144 evrp_range_analyzer
.vrp_visit_cond_stmt (cond
, &taken_edge
);
147 if (taken_edge
->flags
& EDGE_TRUE_VALUE
)
148 gimple_cond_make_true (cond
);
149 else if (taken_edge
->flags
& EDGE_FALSE_VALUE
)
150 gimple_cond_make_false (cond
);
156 else if (stmt_interesting_for_vrp (stmt
))
158 output
= get_output_for_vrp (stmt
);
162 value_range
*vr
= evrp_range_analyzer
.get_value_range (output
);
164 /* Mark stmts whose output we fully propagate for removal. */
165 if (vr
->singleton_p (&val
)
166 && may_propagate_copy (output
, val
)
167 && !stmt_could_throw_p (cfun
, stmt
)
168 && !gimple_has_side_effects (stmt
))
170 stmts_to_remove
.safe_push (stmt
);
176 /* Try folding stmts with the VR discovered. */
177 bool did_replace
= evrp_folder
.replace_uses_in (stmt
);
178 gimple_stmt_iterator prev_gsi
= gsi
;
179 gsi_prev (&prev_gsi
);
180 if (fold_stmt (&gsi
, follow_single_use_edges
)
183 stmt
= gsi_stmt (gsi
);
187 if (evrp_folder
.simplify_stmt_using_ranges (&gsi
))
189 stmt
= gsi_stmt (gsi
);
196 /* If we wound up generating new stmts during folding
197 drop all their defs to VARYING. We can't easily
198 process them because we've already instantiated
199 ranges on uses on STMT that only hold after it. */
200 if (gsi_end_p (prev_gsi
))
201 prev_gsi
= gsi_start_bb (bb
);
203 gsi_next (&prev_gsi
);
204 while (gsi_stmt (prev_gsi
) != gsi_stmt (gsi
))
206 evrp_range_analyzer
.get_vr_values ()
207 ->set_defs_to_varying (gsi_stmt (prev_gsi
));
208 gsi_next (&prev_gsi
);
211 /* If we cleaned up EH information from the statement,
213 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
214 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
216 /* If we turned a not noreturn call into a noreturn one
217 schedule it for fixup. */
219 && is_gimple_call (stmt
)
220 && gimple_call_noreturn_p (stmt
))
221 stmts_to_fixup
.safe_push (stmt
);
223 if (gimple_assign_single_p (stmt
))
225 tree rhs
= gimple_assign_rhs1 (stmt
);
226 if (TREE_CODE (rhs
) == ADDR_EXPR
)
227 recompute_tree_invariant_for_addr_expr (rhs
);
232 /* Visit BB successor PHI nodes and replace PHI args. */
235 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
237 for (gphi_iterator gpi
= gsi_start_phis (e
->dest
);
238 !gsi_end_p (gpi
); gsi_next (&gpi
))
240 gphi
*phi
= gpi
.phi ();
241 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
242 tree arg
= USE_FROM_PTR (use_p
);
243 if (TREE_CODE (arg
) != SSA_NAME
244 || virtual_operand_p (arg
))
246 value_range
*vr
= evrp_range_analyzer
.get_value_range (arg
);
248 if (vr
->singleton_p (&val
) && may_propagate_copy (arg
, val
))
249 propagate_value (use_p
, val
);
257 evrp_dom_walker::after_dom_children (basic_block bb
)
259 evrp_range_analyzer
.leave (bb
);
262 /* Perform any cleanups after the main phase of EVRP has completed. */
265 evrp_dom_walker::cleanup (void)
269 fprintf (dump_file
, "\nValue ranges after Early VRP:\n\n");
270 evrp_range_analyzer
.dump_all_value_ranges (dump_file
);
271 fprintf (dump_file
, "\n");
274 /* Remove stmts in reverse order to make debug stmt creation possible. */
275 while (! stmts_to_remove
.is_empty ())
277 gimple
*stmt
= stmts_to_remove
.pop ();
278 if (dump_file
&& dump_flags
& TDF_DETAILS
)
280 fprintf (dump_file
, "Removing dead stmt ");
281 print_gimple_stmt (dump_file
, stmt
, 0);
282 fprintf (dump_file
, "\n");
284 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
285 if (gimple_code (stmt
) == GIMPLE_PHI
)
286 remove_phi_node (&gsi
, true);
289 unlink_stmt_vdef (stmt
);
290 gsi_remove (&gsi
, true);
295 if (!bitmap_empty_p (need_eh_cleanup
))
296 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
298 /* Fixup stmts that became noreturn calls. This may require splitting
299 blocks and thus isn't possible during the dominator walk. Do this
300 in reverse order so we don't inadvertedly remove a stmt we want to
301 fixup by visiting a dominating now noreturn call first. */
302 while (!stmts_to_fixup
.is_empty ())
304 gimple
*stmt
= stmts_to_fixup
.pop ();
305 fixup_noreturn_call (stmt
);
308 evrp_folder
.vr_values
->cleanup_edges_and_switches ();
311 /* Main entry point for the early vrp pass which is a simplified non-iterative
312 version of vrp where basic blocks are visited in dominance order. Value
313 ranges discovered in early vrp will also be used by ipa-vrp. */
318 /* Ideally this setup code would move into the ctor for the dominator
319 walk. However, this setup can change the number of blocks which
320 invalidates the internal arrays that are set up by the dominator
322 loop_optimizer_init (LOOPS_NORMAL
| LOOPS_HAVE_RECORDED_EXITS
);
323 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
325 calculate_dominance_info (CDI_DOMINATORS
);
327 /* Walk stmts in dominance order and propagate VRP. */
328 evrp_dom_walker walker
;
329 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
333 loop_optimizer_finalize ();
339 const pass_data pass_data_early_vrp
=
341 GIMPLE_PASS
, /* type */
343 OPTGROUP_NONE
, /* optinfo_flags */
344 TV_TREE_EARLY_VRP
, /* tv_id */
345 PROP_ssa
, /* properties_required */
346 0, /* properties_provided */
347 0, /* properties_destroyed */
348 0, /* todo_flags_start */
349 ( TODO_cleanup_cfg
| TODO_update_ssa
| TODO_verify_all
),
352 class pass_early_vrp
: public gimple_opt_pass
355 pass_early_vrp (gcc::context
*ctxt
)
356 : gimple_opt_pass (pass_data_early_vrp
, ctxt
)
359 /* opt_pass methods: */
360 opt_pass
* clone () { return new pass_early_vrp (m_ctxt
); }
361 virtual bool gate (function
*)
363 return flag_tree_vrp
!= 0;
365 virtual unsigned int execute (function
*)
366 { return execute_early_vrp (); }
372 make_pass_early_vrp (gcc::context
*ctxt
)
374 return new pass_early_vrp (ctxt
);