1 /* Support routines for Value Range Propagation (VRP).
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
26 #include "tree-pass.h"
28 #include "gimple-pretty-print.h"
30 #include "gimple-fold.h"
32 #include "gimple-iterator.h"
34 #include "tree-ssa-loop-manip.h"
35 #include "tree-ssa-loop.h"
37 #include "tree-scalar-evolution.h"
38 #include "tree-ssa-propagate.h"
39 #include "alloc-pool.h"
41 #include "tree-cfgcleanup.h"
42 #include "vr-values.h"
43 #include "gimple-ssa-evrp-analyze.h"
45 class evrp_folder
: public substitute_and_fold_engine
48 tree
get_value (tree
) FINAL OVERRIDE
;
49 evrp_folder (class vr_values
*vr_values_
) : vr_values (vr_values_
) { }
50 bool simplify_stmt_using_ranges (gimple_stmt_iterator
*gsi
)
51 { return vr_values
->simplify_stmt_using_ranges (gsi
); }
52 class vr_values
*vr_values
;
55 DISABLE_COPY_AND_ASSIGN (evrp_folder
);
59 evrp_folder::get_value (tree op
)
61 return vr_values
->op_with_constant_singleton_value_range (op
);
64 /* evrp_dom_walker visits the basic blocks in the dominance order and set
65 the Value Ranges (VR) for SSA_NAMEs in the scope. Use this VR to
68 class evrp_dom_walker
: public dom_walker
72 : dom_walker (CDI_DOMINATORS
),
73 evrp_range_analyzer (true),
74 evrp_folder (evrp_range_analyzer
.get_vr_values ())
76 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
80 BITMAP_FREE (need_eh_cleanup
);
82 virtual edge
before_dom_children (basic_block
);
83 virtual void after_dom_children (basic_block
);
87 DISABLE_COPY_AND_ASSIGN (evrp_dom_walker
);
88 bitmap need_eh_cleanup
;
89 auto_vec
<gimple
*> stmts_to_fixup
;
90 auto_vec
<gimple
*> stmts_to_remove
;
92 class evrp_range_analyzer evrp_range_analyzer
;
93 class evrp_folder evrp_folder
;
97 evrp_dom_walker::before_dom_children (basic_block bb
)
99 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
100 fprintf (dump_file
, "Visiting BB%d\n", bb
->index
);
102 evrp_range_analyzer
.enter (bb
);
104 for (gphi_iterator gpi
= gsi_start_phis (bb
);
105 !gsi_end_p (gpi
); gsi_next (&gpi
))
107 gphi
*phi
= gpi
.phi ();
108 tree lhs
= PHI_RESULT (phi
);
109 if (virtual_operand_p (lhs
))
112 const value_range
*vr
= evrp_range_analyzer
.get_value_range (lhs
);
113 /* Mark PHIs whose lhs we fully propagate for removal. */
115 if (vr
->singleton_p (&val
) && may_propagate_copy (lhs
, val
))
117 stmts_to_remove
.safe_push (phi
);
122 edge taken_edge
= NULL
;
124 /* Visit all other stmts and discover any new VRs possible. */
125 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
126 !gsi_end_p (gsi
); gsi_next (&gsi
))
128 gimple
*stmt
= gsi_stmt (gsi
);
129 tree output
= NULL_TREE
;
130 gimple
*old_stmt
= stmt
;
131 bool was_noreturn
= (is_gimple_call (stmt
)
132 && gimple_call_noreturn_p (stmt
));
134 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
136 fprintf (dump_file
, "Visiting stmt ");
137 print_gimple_stmt (dump_file
, stmt
, 0);
140 evrp_range_analyzer
.record_ranges_from_stmt (stmt
, false);
142 if (gcond
*cond
= dyn_cast
<gcond
*> (stmt
))
144 evrp_range_analyzer
.vrp_visit_cond_stmt (cond
, &taken_edge
);
147 if (taken_edge
->flags
& EDGE_TRUE_VALUE
)
148 gimple_cond_make_true (cond
);
149 else if (taken_edge
->flags
& EDGE_FALSE_VALUE
)
150 gimple_cond_make_false (cond
);
156 else if (stmt_interesting_for_vrp (stmt
))
158 output
= get_output_for_vrp (stmt
);
162 const value_range
*vr
163 = evrp_range_analyzer
.get_value_range (output
);
165 /* Mark stmts whose output we fully propagate for removal. */
166 if (vr
->singleton_p (&val
)
167 && may_propagate_copy (output
, val
)
168 && !stmt_could_throw_p (cfun
, stmt
)
169 && !gimple_has_side_effects (stmt
))
171 stmts_to_remove
.safe_push (stmt
);
177 /* Try folding stmts with the VR discovered. */
178 bool did_replace
= evrp_folder
.replace_uses_in (stmt
);
179 gimple_stmt_iterator prev_gsi
= gsi
;
180 gsi_prev (&prev_gsi
);
181 if (fold_stmt (&gsi
, follow_single_use_edges
)
184 stmt
= gsi_stmt (gsi
);
188 if (evrp_folder
.simplify_stmt_using_ranges (&gsi
))
190 stmt
= gsi_stmt (gsi
);
197 /* If we wound up generating new stmts during folding
198 drop all their defs to VARYING. We can't easily
199 process them because we've already instantiated
200 ranges on uses on STMT that only hold after it. */
201 if (gsi_end_p (prev_gsi
))
202 prev_gsi
= gsi_start_bb (bb
);
204 gsi_next (&prev_gsi
);
205 while (gsi_stmt (prev_gsi
) != gsi_stmt (gsi
))
207 evrp_range_analyzer
.get_vr_values ()
208 ->set_defs_to_varying (gsi_stmt (prev_gsi
));
209 gsi_next (&prev_gsi
);
212 /* If we cleaned up EH information from the statement,
214 if (maybe_clean_or_replace_eh_stmt (old_stmt
, stmt
))
215 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
217 /* If we turned a not noreturn call into a noreturn one
218 schedule it for fixup. */
220 && is_gimple_call (stmt
)
221 && gimple_call_noreturn_p (stmt
))
222 stmts_to_fixup
.safe_push (stmt
);
224 if (gimple_assign_single_p (stmt
))
226 tree rhs
= gimple_assign_rhs1 (stmt
);
227 if (TREE_CODE (rhs
) == ADDR_EXPR
)
228 recompute_tree_invariant_for_addr_expr (rhs
);
233 /* Visit BB successor PHI nodes and replace PHI args. */
236 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
238 for (gphi_iterator gpi
= gsi_start_phis (e
->dest
);
239 !gsi_end_p (gpi
); gsi_next (&gpi
))
241 gphi
*phi
= gpi
.phi ();
242 use_operand_p use_p
= PHI_ARG_DEF_PTR_FROM_EDGE (phi
, e
);
243 tree arg
= USE_FROM_PTR (use_p
);
244 if (TREE_CODE (arg
) != SSA_NAME
245 || virtual_operand_p (arg
))
247 const value_range
*vr
= evrp_range_analyzer
.get_value_range (arg
);
249 if (vr
->singleton_p (&val
) && may_propagate_copy (arg
, val
))
250 propagate_value (use_p
, val
);
258 evrp_dom_walker::after_dom_children (basic_block bb
)
260 evrp_range_analyzer
.leave (bb
);
263 /* Perform any cleanups after the main phase of EVRP has completed. */
266 evrp_dom_walker::cleanup (void)
270 fprintf (dump_file
, "\nValue ranges after Early VRP:\n\n");
271 evrp_range_analyzer
.dump_all_value_ranges (dump_file
);
272 fprintf (dump_file
, "\n");
275 /* Remove stmts in reverse order to make debug stmt creation possible. */
276 while (! stmts_to_remove
.is_empty ())
278 gimple
*stmt
= stmts_to_remove
.pop ();
279 if (dump_file
&& dump_flags
& TDF_DETAILS
)
281 fprintf (dump_file
, "Removing dead stmt ");
282 print_gimple_stmt (dump_file
, stmt
, 0);
283 fprintf (dump_file
, "\n");
285 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
286 if (gimple_code (stmt
) == GIMPLE_PHI
)
287 remove_phi_node (&gsi
, true);
290 unlink_stmt_vdef (stmt
);
291 gsi_remove (&gsi
, true);
296 if (!bitmap_empty_p (need_eh_cleanup
))
297 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
299 /* Fixup stmts that became noreturn calls. This may require splitting
300 blocks and thus isn't possible during the dominator walk. Do this
301 in reverse order so we don't inadvertedly remove a stmt we want to
302 fixup by visiting a dominating now noreturn call first. */
303 while (!stmts_to_fixup
.is_empty ())
305 gimple
*stmt
= stmts_to_fixup
.pop ();
306 fixup_noreturn_call (stmt
);
309 evrp_folder
.vr_values
->cleanup_edges_and_switches ();
312 /* Main entry point for the early vrp pass which is a simplified non-iterative
313 version of vrp where basic blocks are visited in dominance order. Value
314 ranges discovered in early vrp will also be used by ipa-vrp. */
319 /* Ideally this setup code would move into the ctor for the dominator
320 walk. However, this setup can change the number of blocks which
321 invalidates the internal arrays that are set up by the dominator
323 loop_optimizer_init (LOOPS_NORMAL
| LOOPS_HAVE_RECORDED_EXITS
);
324 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
326 calculate_dominance_info (CDI_DOMINATORS
);
328 /* Walk stmts in dominance order and propagate VRP. */
329 evrp_dom_walker walker
;
330 walker
.walk (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
334 loop_optimizer_finalize ();
340 const pass_data pass_data_early_vrp
=
342 GIMPLE_PASS
, /* type */
344 OPTGROUP_NONE
, /* optinfo_flags */
345 TV_TREE_EARLY_VRP
, /* tv_id */
346 PROP_ssa
, /* properties_required */
347 0, /* properties_provided */
348 0, /* properties_destroyed */
349 0, /* todo_flags_start */
350 ( TODO_cleanup_cfg
| TODO_update_ssa
| TODO_verify_all
),
353 class pass_early_vrp
: public gimple_opt_pass
356 pass_early_vrp (gcc::context
*ctxt
)
357 : gimple_opt_pass (pass_data_early_vrp
, ctxt
)
360 /* opt_pass methods: */
361 opt_pass
* clone () { return new pass_early_vrp (m_ctxt
); }
362 virtual bool gate (function
*)
364 return flag_tree_vrp
!= 0;
366 virtual unsigned int execute (function
*)
367 { return execute_early_vrp (); }
373 make_pass_early_vrp (gcc::context
*ctxt
)
375 return new pass_early_vrp (ctxt
);