1 /* Backward propagation of indirect loads through PHIs.
2 Copyright (C) 2007-2023 Free Software Foundation, Inc.
3 Contributed by Richard Guenther <rguenther@suse.de>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "tree-pass.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
33 #include "gimple-iterator.h"
34 #include "stor-layout.h"
35 #include "tree-ssa-loop.h"
38 /* This pass propagates indirect loads through the PHI node for its
39 address to make the load source possibly non-addressable and to
40 allow for PHI optimization to trigger.
42 For example the pass changes
44 # addr_1 = PHI <&a, &b>
51 but also handles more complex scenarios like
53 D.2077_2 = &this_1(D)->a1;
56 # b_12 = PHI <&c(2), D.2077_2(3)>
60 # b_15 = PHI <b_12(4), &b(5)>
61 D.2080_5 = &this_1(D)->a0;
64 # b_18 = PHI <D.2080_5(6), &c(7)>
67 # b_21 = PHI <b_15(8), b_18(9)>
70 where the addresses loaded are defined by PHIs itself.
73 std::max(std::min(a0, c), std::min(std::max(a1, c), b))
75 where this pass transforms it to a form later PHI optimization
76 recognizes and transforms it to the simple
78 D.2109_10 = this_1(D)->a1;
80 D.2114_31 = MAX_EXPR <D.2109_10, D.2110_11>;
82 D.2125_17 = MIN_EXPR <D.2115_14, D.2114_31>;
83 D.2119_16 = this_1(D)->a0;
84 D.2124_32 = MIN_EXPR <D.2110_11, D.2119_16>;
85 D.2076_33 = MAX_EXPR <D.2125_17, D.2124_32>;
87 The pass does a dominator walk processing loads using a basic-block
88 local analysis and stores the result for use by transformations on
89 dominated basic-blocks. */
92 /* Structure to keep track of the value of a dereferenced PHI result
93 and the virtual operand used for that dereference. */
101 /* Verify if the value recorded for NAME in PHIVN is still valid at
102 the start of basic block BB. */
105 phivn_valid_p (struct phiprop_d
*phivn
, tree name
, basic_block bb
)
107 tree vuse
= phivn
[SSA_NAME_VERSION (name
)].vuse
;
109 imm_use_iterator ui2
;
112 /* The def stmts of the virtual uses need to be dominated by bb. */
113 gcc_assert (vuse
!= NULL_TREE
);
115 FOR_EACH_IMM_USE_STMT (use_stmt
, ui2
, vuse
)
117 /* If BB does not dominate a VDEF, the value is invalid. */
118 if ((gimple_vdef (use_stmt
) != NULL_TREE
119 || gimple_code (use_stmt
) == GIMPLE_PHI
)
120 && !dominated_by_p (CDI_DOMINATORS
, gimple_bb (use_stmt
), bb
))
130 /* Insert a new phi node for the dereference of PHI at basic_block
131 BB with the virtual operands from USE_STMT. */
134 phiprop_insert_phi (basic_block bb
, gphi
*phi
, gimple
*use_stmt
,
135 struct phiprop_d
*phivn
, size_t n
)
138 gphi
*new_phi
= NULL
;
142 gcc_assert (is_gimple_assign (use_stmt
)
143 && gimple_assign_rhs_code (use_stmt
) == MEM_REF
);
145 /* Build a new PHI node to replace the definition of
146 the indirect reference lhs. */
147 res
= gimple_assign_lhs (use_stmt
);
148 if (TREE_CODE (res
) == SSA_NAME
)
149 new_phi
= create_phi_node (res
, bb
);
151 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
153 fprintf (dump_file
, "Inserting PHI for result of load ");
154 print_gimple_stmt (dump_file
, use_stmt
, 0);
157 gphi
*vphi
= get_virtual_phi (bb
);
159 /* Add PHI arguments for each edge inserting loads of the
160 addressable operands. */
161 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
163 tree old_arg
, new_var
;
167 old_arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
168 locus
= gimple_phi_arg_location_from_edge (phi
, e
);
169 while (TREE_CODE (old_arg
) == SSA_NAME
170 && (SSA_NAME_VERSION (old_arg
) >= n
171 || phivn
[SSA_NAME_VERSION (old_arg
)].value
== NULL_TREE
))
173 gimple
*def_stmt
= SSA_NAME_DEF_STMT (old_arg
);
174 old_arg
= gimple_assign_rhs1 (def_stmt
);
175 locus
= gimple_location (def_stmt
);
178 if (TREE_CODE (old_arg
) == SSA_NAME
)
180 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
182 fprintf (dump_file
, " for edge defining ");
183 print_generic_expr (dump_file
, PHI_ARG_DEF_FROM_EDGE (phi
, e
));
184 fprintf (dump_file
, " reusing PHI result ");
185 print_generic_expr (dump_file
,
186 phivn
[SSA_NAME_VERSION (old_arg
)].value
);
187 fprintf (dump_file
, "\n");
189 /* Reuse a formerly created dereference. */
190 new_var
= phivn
[SSA_NAME_VERSION (old_arg
)].value
;
194 tree rhs
= gimple_assign_rhs1 (use_stmt
);
195 gcc_assert (TREE_CODE (old_arg
) == ADDR_EXPR
);
196 tree vuse
= NULL_TREE
;
197 if (TREE_CODE (res
) == SSA_NAME
)
199 new_var
= make_ssa_name (TREE_TYPE (rhs
));
201 vuse
= PHI_ARG_DEF_FROM_EDGE (vphi
, e
);
203 vuse
= gimple_vuse (use_stmt
);
206 /* For the aggregate copy case updating virtual operands
207 we'd have to possibly insert a virtual PHI and we have
208 to split the existing VUSE lifetime. Leave that to
209 the generic SSA updating. */
210 new_var
= unshare_expr (res
);
211 if (!is_gimple_min_invariant (old_arg
))
212 old_arg
= PHI_ARG_DEF_FROM_EDGE (phi
, e
);
214 old_arg
= unshare_expr (old_arg
);
215 tmp
= gimple_build_assign (new_var
,
216 fold_build2 (MEM_REF
, TREE_TYPE (rhs
),
218 TREE_OPERAND (rhs
, 1)));
219 gimple_set_location (tmp
, locus
);
221 gimple_set_vuse (tmp
, vuse
);
223 gsi_insert_on_edge (e
, tmp
);
226 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
228 fprintf (dump_file
, " for edge defining ");
229 print_generic_expr (dump_file
, PHI_ARG_DEF_FROM_EDGE (phi
, e
));
230 fprintf (dump_file
, " inserting load ");
231 print_gimple_stmt (dump_file
, tmp
, 0);
236 add_phi_arg (new_phi
, new_var
, e
, locus
);
241 update_stmt (new_phi
);
243 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
244 print_gimple_stmt (dump_file
, new_phi
, 0);
250 /* Verify if *idx is available at *DATA. */
253 chk_uses (tree
, tree
*idx
, void *data
)
255 basic_block dom
= (basic_block
) data
;
256 if (TREE_CODE (*idx
) == SSA_NAME
)
257 return (SSA_NAME_IS_DEFAULT_DEF (*idx
)
258 || ! dominated_by_p (CDI_DOMINATORS
,
259 gimple_bb (SSA_NAME_DEF_STMT (*idx
)), dom
));
263 /* Propagate between the phi node arguments of PHI in BB and phi result
264 users. For now this matches
272 Returns true if a transformation was done and edge insertions
273 need to be committed. Global data PHIVN and N is used to track
274 past transformation results. We need to be especially careful here
275 with aliasing issues as we are moving memory reads. */
278 propagate_with_phi (basic_block bb
, gphi
*phi
, struct phiprop_d
*phivn
,
281 tree ptr
= PHI_RESULT (phi
);
283 tree res
= NULL_TREE
;
284 gimple_stmt_iterator gsi
;
286 use_operand_p arg_p
, use
;
290 tree type
= NULL_TREE
;
292 if (!POINTER_TYPE_P (TREE_TYPE (ptr
))
293 || (!is_gimple_reg_type (TREE_TYPE (TREE_TYPE (ptr
)))
294 && TYPE_MODE (TREE_TYPE (TREE_TYPE (ptr
))) == BLKmode
))
297 /* Check if we can "cheaply" dereference all phi arguments. */
298 FOR_EACH_PHI_ARG (arg_p
, phi
, i
, SSA_OP_USE
)
300 tree arg
= USE_FROM_PTR (arg_p
);
301 /* Walk the ssa chain until we reach a ssa name we already
302 created a value for or we reach a definition of the form
303 ssa_name_n = &var; */
304 while (TREE_CODE (arg
) == SSA_NAME
305 && !SSA_NAME_IS_DEFAULT_DEF (arg
)
306 && (SSA_NAME_VERSION (arg
) >= n
307 || phivn
[SSA_NAME_VERSION (arg
)].value
== NULL_TREE
))
309 gimple
*def_stmt
= SSA_NAME_DEF_STMT (arg
);
310 if (!gimple_assign_single_p (def_stmt
))
312 arg
= gimple_assign_rhs1 (def_stmt
);
314 if (TREE_CODE (arg
) != ADDR_EXPR
315 && !(TREE_CODE (arg
) == SSA_NAME
316 && SSA_NAME_VERSION (arg
) < n
317 && phivn
[SSA_NAME_VERSION (arg
)].value
!= NULL_TREE
319 || types_compatible_p
320 (type
, TREE_TYPE (phivn
[SSA_NAME_VERSION (arg
)].value
)))
321 && phivn_valid_p (phivn
, arg
, bb
)))
324 && TREE_CODE (arg
) == SSA_NAME
)
325 type
= TREE_TYPE (phivn
[SSA_NAME_VERSION (arg
)].value
);
328 /* Find a dereferencing use. First follow (single use) ssa
329 copy chains for ptr. */
330 while (single_imm_use (ptr
, &use
, &use_stmt
)
331 && gimple_assign_ssa_name_copy_p (use_stmt
))
332 ptr
= gimple_assign_lhs (use_stmt
);
334 /* Replace the first dereference of *ptr if there is one and if we
335 can move the loads to the place of the ptr phi node. */
336 phi_inserted
= false;
338 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, ptr
)
343 if (!dom_info_available_p (cfun
, CDI_POST_DOMINATORS
))
344 calculate_dominance_info (CDI_POST_DOMINATORS
);
346 /* Only replace loads in blocks that post-dominate the PHI node. That
347 makes sure we don't end up speculating loads. */
348 if (!dominated_by_p (CDI_POST_DOMINATORS
,
349 bb
, gimple_bb (use_stmt
)))
352 /* Check whether this is a load of *ptr. */
353 if (!(is_gimple_assign (use_stmt
)
354 && gimple_assign_rhs_code (use_stmt
) == MEM_REF
355 && TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 0) == ptr
356 && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt
), 1))
358 || types_compatible_p
359 (TREE_TYPE (gimple_assign_lhs (use_stmt
)), type
))
360 /* We cannot replace a load that may throw or is volatile.
361 For volatiles the transform can change the number of
362 executions if the load is inside a loop but the address
363 computations outside (PR91812). We could relax this
364 if we guard against that appropriately. For loads that can
365 throw we could relax things if the moved loads all are
366 known to not throw. */
367 && !stmt_can_throw_internal (cfun
, use_stmt
)
368 && !gimple_has_volatile_ops (use_stmt
)))
371 /* Check if we can move the loads. The def stmt of the virtual use
372 needs to be in a different basic block dominating bb. When the
373 def is an edge-inserted one we know it dominates us. */
374 vuse
= gimple_vuse (use_stmt
);
375 def_stmt
= SSA_NAME_DEF_STMT (vuse
);
376 if (!SSA_NAME_IS_DEFAULT_DEF (vuse
)
377 && (gimple_bb (def_stmt
) == bb
378 || (gimple_bb (def_stmt
)
379 && !dominated_by_p (CDI_DOMINATORS
,
380 bb
, gimple_bb (def_stmt
)))))
383 /* Found a proper dereference with an aggregate copy. Just
384 insert aggregate copies on the edges instead. */
385 if (!is_gimple_reg_type (TREE_TYPE (gimple_assign_lhs (use_stmt
))))
387 if (!gimple_vdef (use_stmt
))
390 /* As we replicate the lhs on each incoming edge all
391 used SSA names have to be available there. */
392 if (! for_each_index (gimple_assign_lhs_ptr (use_stmt
),
394 get_immediate_dominator (CDI_DOMINATORS
,
399 imm_use_iterator vui
;
400 use_operand_p vuse_p
;
401 /* In order to move the aggregate copies earlier, make sure
402 there are no statements that could read from memory
403 aliasing the lhs in between the start of bb and use_stmt.
404 As we require use_stmt to have a VDEF above, loads after
405 use_stmt will use a different virtual SSA_NAME. When
406 we reach an edge inserted load the constraints we place
407 on processing guarantees that program order is preserved
408 so we can avoid checking those. */
409 FOR_EACH_IMM_USE_FAST (vuse_p
, vui
, vuse
)
411 vuse_stmt
= USE_STMT (vuse_p
);
412 if (vuse_stmt
== use_stmt
)
414 if (!gimple_bb (vuse_stmt
)
415 || !dominated_by_p (CDI_DOMINATORS
,
416 gimple_bb (vuse_stmt
), bb
))
418 if (ref_maybe_used_by_stmt_p (vuse_stmt
,
419 gimple_assign_lhs (use_stmt
)))
423 phiprop_insert_phi (bb
, phi
, use_stmt
, phivn
, n
);
425 /* Remove old stmt. The phi is taken care of by DCE. */
426 gsi
= gsi_for_stmt (use_stmt
);
427 /* Unlinking the VDEF here is fine as we are sure that we process
428 stmts in execution order due to aggregate copies having VDEFs
429 and we emit loads on the edges in the very same order.
430 We get multiple copies (or intermediate register loads) handled
431 only by walking PHIs or immediate uses in a lucky order though,
432 so we could signal the caller to re-start iterating over PHIs
433 when we come here which would make it quadratic in the number
435 unlink_stmt_vdef (use_stmt
);
436 gsi_remove (&gsi
, true);
441 /* Found a proper dereference. Insert a phi node if this
442 is the first load transformation. */
443 else if (!phi_inserted
)
445 res
= phiprop_insert_phi (bb
, phi
, use_stmt
, phivn
, n
);
446 type
= TREE_TYPE (res
);
448 /* Remember the value we created for *ptr. */
449 phivn
[SSA_NAME_VERSION (ptr
)].value
= res
;
450 phivn
[SSA_NAME_VERSION (ptr
)].vuse
= vuse
;
452 /* Remove old stmt. The phi is taken care of by DCE, if we
453 want to delete it here we also have to delete all intermediate
455 gsi
= gsi_for_stmt (use_stmt
);
456 gsi_remove (&gsi
, true);
463 /* Further replacements are easy, just make a copy out of the
465 gimple_assign_set_rhs1 (use_stmt
, res
);
466 update_stmt (use_stmt
);
471 /* Continue searching for a proper dereference. */
477 /* Main entry for phiprop pass. */
481 const pass_data pass_data_phiprop
=
483 GIMPLE_PASS
, /* type */
484 "phiprop", /* name */
485 OPTGROUP_NONE
, /* optinfo_flags */
486 TV_TREE_PHIPROP
, /* tv_id */
487 ( PROP_cfg
| PROP_ssa
), /* properties_required */
488 0, /* properties_provided */
489 0, /* properties_destroyed */
490 0, /* todo_flags_start */
491 0, /* todo_flags_finish */
494 class pass_phiprop
: public gimple_opt_pass
497 pass_phiprop (gcc::context
*ctxt
)
498 : gimple_opt_pass (pass_data_phiprop
, ctxt
)
501 /* opt_pass methods: */
502 opt_pass
* clone () final override
{ return new pass_phiprop (m_ctxt
); }
503 bool gate (function
*) final override
{ return flag_tree_phiprop
; }
504 unsigned int execute (function
*) final override
;
506 }; // class pass_phiprop
509 pass_phiprop::execute (function
*fun
)
511 struct phiprop_d
*phivn
;
512 bool did_something
= false;
518 calculate_dominance_info (CDI_DOMINATORS
);
521 phivn
= XCNEWVEC (struct phiprop_d
, n
);
523 /* Walk the dominator tree in preorder. */
524 auto_vec
<basic_block
> bbs
525 = get_all_dominated_blocks (CDI_DOMINATORS
,
526 single_succ (ENTRY_BLOCK_PTR_FOR_FN (fun
)));
527 FOR_EACH_VEC_ELT (bbs
, i
, bb
)
529 /* Since we're going to move dereferences across predecessor
530 edges avoid blocks with abnormal predecessors. */
531 if (bb_has_abnormal_pred (bb
))
533 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
534 did_something
|= propagate_with_phi (bb
, gsi
.phi (), phivn
, n
);
538 gsi_commit_edge_inserts ();
542 free_dominance_info (CDI_POST_DOMINATORS
);
544 return did_something
? TODO_update_ssa_only_virtuals
: 0;
550 make_pass_phiprop (gcc::context
*ctxt
)
552 return new pass_phiprop (ctxt
);