1 /* If-conversion for vectorizer.
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
3 Contributed by Devang Patel <dpatel@apple.com>
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* This pass implements a tree level if-conversion of loops. Its
22 initial goal is to help the vectorizer to vectorize loops with
25 A short description of if-conversion:
27 o Decide if a loop is if-convertible or not.
28 o Walk all loop basic blocks in breadth first order (BFS order).
29 o Remove conditional statements (at the end of basic block)
30 and propagate condition into destination basic blocks'
32 o Replace modify expression with conditional modify expression
33 using current basic block's condition.
34 o Merge all basic blocks
35 o Replace phi nodes with conditional modify expr
36 o Merge all basic blocks into header
38 Sample transformation:
43 # i_23 = PHI <0(0), i_18(10)>;
46 if (j_15 > 41) goto <L1>; else goto <L17>;
53 # iftmp.2_4 = PHI <0(8), 42(2)>;
57 if (i_18 <= 15) goto <L19>; else goto <L18>;
67 # i_23 = PHI <0(0), i_18(10)>;
72 iftmp.2_4 = j_15 > 41 ? 42 : 0;
75 if (i_18 <= 15) goto <L19>; else goto <L18>;
85 #include "coretypes.h"
91 #include "tree-pass.h"
94 #include "optabs-query.h"
95 #include "gimple-pretty-print.h"
97 #include "fold-const.h"
98 #include "stor-layout.h"
99 #include "gimple-fold.h"
100 #include "gimplify.h"
101 #include "gimple-iterator.h"
102 #include "gimplify-me.h"
103 #include "tree-cfg.h"
104 #include "tree-into-ssa.h"
105 #include "tree-ssa.h"
107 #include "tree-data-ref.h"
108 #include "tree-scalar-evolution.h"
109 #include "tree-ssa-loop.h"
110 #include "tree-ssa-loop-niter.h"
111 #include "tree-ssa-loop-ivopts.h"
112 #include "tree-ssa-address.h"
114 #include "tree-hash-traits.h"
116 #include "builtins.h"
118 #include "internal-fn.h"
119 #include "fold-const.h"
120 #include "tree-ssa-sccvn.h"
121 #include "tree-cfgcleanup.h"
122 #include "tree-ssa-dse.h"
124 /* Only handle PHIs with no more arguments unless we are asked to by
126 #define MAX_PHI_ARG_NUM \
127 ((unsigned) param_max_tree_if_conversion_phi_args)
129 /* True if we've converted a statement that was only executed when some
130 condition C was true, and if for correctness we need to predicate the
131 statement to ensure that it is a no-op when C is false. See
132 predicate_statements for the kinds of predication we support. */
133 static bool need_to_predicate
;
135 /* True if we have to rewrite stmts that may invoke undefined behavior
136 when a condition C was false so it doesn't if it is always executed.
137 See predicate_statements for the kinds of predication we support. */
138 static bool need_to_rewrite_undefined
;
140 /* Indicate if there are any complicated PHIs that need to be handled in
141 if-conversion. Complicated PHI has more than two arguments and can't
142 be degenerated to two arguments PHI. See more information in comment
143 before phi_convertible_by_degenerating_args. */
144 static bool any_complicated_phi
;
146 /* Hash for struct innermost_loop_behavior. It depends on the user to
149 struct innermost_loop_behavior_hash
: nofree_ptr_hash
<innermost_loop_behavior
>
151 static inline hashval_t
hash (const value_type
&);
152 static inline bool equal (const value_type
&,
153 const compare_type
&);
157 innermost_loop_behavior_hash::hash (const value_type
&e
)
161 hash
= iterative_hash_expr (e
->base_address
, 0);
162 hash
= iterative_hash_expr (e
->offset
, hash
);
163 hash
= iterative_hash_expr (e
->init
, hash
);
164 return iterative_hash_expr (e
->step
, hash
);
168 innermost_loop_behavior_hash::equal (const value_type
&e1
,
169 const compare_type
&e2
)
171 if ((e1
->base_address
&& !e2
->base_address
)
172 || (!e1
->base_address
&& e2
->base_address
)
173 || (!e1
->offset
&& e2
->offset
)
174 || (e1
->offset
&& !e2
->offset
)
175 || (!e1
->init
&& e2
->init
)
176 || (e1
->init
&& !e2
->init
)
177 || (!e1
->step
&& e2
->step
)
178 || (e1
->step
&& !e2
->step
))
181 if (e1
->base_address
&& e2
->base_address
182 && !operand_equal_p (e1
->base_address
, e2
->base_address
, 0))
184 if (e1
->offset
&& e2
->offset
185 && !operand_equal_p (e1
->offset
, e2
->offset
, 0))
187 if (e1
->init
&& e2
->init
188 && !operand_equal_p (e1
->init
, e2
->init
, 0))
190 if (e1
->step
&& e2
->step
191 && !operand_equal_p (e1
->step
, e2
->step
, 0))
197 /* List of basic blocks in if-conversion-suitable order. */
198 static basic_block
*ifc_bbs
;
200 /* Hash table to store <DR's innermost loop behavior, DR> pairs. */
201 static hash_map
<innermost_loop_behavior_hash
,
202 data_reference_p
> *innermost_DR_map
;
204 /* Hash table to store <base reference, DR> pairs. */
205 static hash_map
<tree_operand_hash
, data_reference_p
> *baseref_DR_map
;
207 /* List of redundant SSA names: the first should be replaced by the second. */
208 static vec
< std::pair
<tree
, tree
> > redundant_ssa_names
;
210 /* Structure used to predicate basic blocks. This is attached to the
211 ->aux field of the BBs in the loop to be if-converted. */
212 struct bb_predicate
{
214 /* The condition under which this basic block is executed. */
217 /* PREDICATE is gimplified, and the sequence of statements is
218 recorded here, in order to avoid the duplication of computations
219 that occur in previous conditions. See PR44483. */
220 gimple_seq predicate_gimplified_stmts
;
223 /* Returns true when the basic block BB has a predicate. */
226 bb_has_predicate (basic_block bb
)
228 return bb
->aux
!= NULL
;
231 /* Returns the gimplified predicate for basic block BB. */
234 bb_predicate (basic_block bb
)
236 return ((struct bb_predicate
*) bb
->aux
)->predicate
;
239 /* Sets the gimplified predicate COND for basic block BB. */
242 set_bb_predicate (basic_block bb
, tree cond
)
244 gcc_assert ((TREE_CODE (cond
) == TRUTH_NOT_EXPR
245 && is_gimple_condexpr (TREE_OPERAND (cond
, 0)))
246 || is_gimple_condexpr (cond
));
247 ((struct bb_predicate
*) bb
->aux
)->predicate
= cond
;
250 /* Returns the sequence of statements of the gimplification of the
251 predicate for basic block BB. */
253 static inline gimple_seq
254 bb_predicate_gimplified_stmts (basic_block bb
)
256 return ((struct bb_predicate
*) bb
->aux
)->predicate_gimplified_stmts
;
259 /* Sets the sequence of statements STMTS of the gimplification of the
260 predicate for basic block BB. */
263 set_bb_predicate_gimplified_stmts (basic_block bb
, gimple_seq stmts
)
265 ((struct bb_predicate
*) bb
->aux
)->predicate_gimplified_stmts
= stmts
;
268 /* Adds the sequence of statements STMTS to the sequence of statements
269 of the predicate for basic block BB. */
272 add_bb_predicate_gimplified_stmts (basic_block bb
, gimple_seq stmts
)
274 /* We might have updated some stmts in STMTS via force_gimple_operand
275 calling fold_stmt and that producing multiple stmts. Delink immediate
276 uses so update_ssa after loop versioning doesn't get confused for
277 the not yet inserted predicates.
278 ??? This should go away once we reliably avoid updating stmts
280 for (gimple_stmt_iterator gsi
= gsi_start (stmts
);
281 !gsi_end_p (gsi
); gsi_next (&gsi
))
283 gimple
*stmt
= gsi_stmt (gsi
);
284 delink_stmt_imm_use (stmt
);
285 gimple_set_modified (stmt
, true);
287 gimple_seq_add_seq_without_update
288 (&(((struct bb_predicate
*) bb
->aux
)->predicate_gimplified_stmts
), stmts
);
291 /* Initializes to TRUE the predicate of basic block BB. */
294 init_bb_predicate (basic_block bb
)
296 bb
->aux
= XNEW (struct bb_predicate
);
297 set_bb_predicate_gimplified_stmts (bb
, NULL
);
298 set_bb_predicate (bb
, boolean_true_node
);
301 /* Release the SSA_NAMEs associated with the predicate of basic block BB. */
304 release_bb_predicate (basic_block bb
)
306 gimple_seq stmts
= bb_predicate_gimplified_stmts (bb
);
309 /* Ensure that these stmts haven't yet been added to a bb. */
311 for (gimple_stmt_iterator i
= gsi_start (stmts
);
312 !gsi_end_p (i
); gsi_next (&i
))
313 gcc_assert (! gimple_bb (gsi_stmt (i
)));
316 gimple_seq_discard (stmts
);
317 set_bb_predicate_gimplified_stmts (bb
, NULL
);
321 /* Free the predicate of basic block BB. */
324 free_bb_predicate (basic_block bb
)
326 if (!bb_has_predicate (bb
))
329 release_bb_predicate (bb
);
334 /* Reinitialize predicate of BB with the true predicate. */
337 reset_bb_predicate (basic_block bb
)
339 if (!bb_has_predicate (bb
))
340 init_bb_predicate (bb
);
343 release_bb_predicate (bb
);
344 set_bb_predicate (bb
, boolean_true_node
);
348 /* Returns a new SSA_NAME of type TYPE that is assigned the value of
349 the expression EXPR. Inserts the statement created for this
350 computation before GSI and leaves the iterator GSI at the same
354 ifc_temp_var (tree type
, tree expr
, gimple_stmt_iterator
*gsi
)
356 tree new_name
= make_temp_ssa_name (type
, NULL
, "_ifc_");
357 gimple
*stmt
= gimple_build_assign (new_name
, expr
);
358 gimple_set_vuse (stmt
, gimple_vuse (gsi_stmt (*gsi
)));
359 gsi_insert_before (gsi
, stmt
, GSI_SAME_STMT
);
363 /* Return true when COND is a false predicate. */
366 is_false_predicate (tree cond
)
368 return (cond
!= NULL_TREE
369 && (cond
== boolean_false_node
370 || integer_zerop (cond
)));
373 /* Return true when COND is a true predicate. */
376 is_true_predicate (tree cond
)
378 return (cond
== NULL_TREE
379 || cond
== boolean_true_node
380 || integer_onep (cond
));
383 /* Returns true when BB has a predicate that is not trivial: true or
387 is_predicated (basic_block bb
)
389 return !is_true_predicate (bb_predicate (bb
));
392 /* Parses the predicate COND and returns its comparison code and
393 operands OP0 and OP1. */
395 static enum tree_code
396 parse_predicate (tree cond
, tree
*op0
, tree
*op1
)
400 if (TREE_CODE (cond
) == SSA_NAME
401 && is_gimple_assign (s
= SSA_NAME_DEF_STMT (cond
)))
403 if (TREE_CODE_CLASS (gimple_assign_rhs_code (s
)) == tcc_comparison
)
405 *op0
= gimple_assign_rhs1 (s
);
406 *op1
= gimple_assign_rhs2 (s
);
407 return gimple_assign_rhs_code (s
);
410 else if (gimple_assign_rhs_code (s
) == TRUTH_NOT_EXPR
)
412 tree op
= gimple_assign_rhs1 (s
);
413 tree type
= TREE_TYPE (op
);
414 enum tree_code code
= parse_predicate (op
, op0
, op1
);
416 return code
== ERROR_MARK
? ERROR_MARK
417 : invert_tree_comparison (code
, HONOR_NANS (type
));
423 if (COMPARISON_CLASS_P (cond
))
425 *op0
= TREE_OPERAND (cond
, 0);
426 *op1
= TREE_OPERAND (cond
, 1);
427 return TREE_CODE (cond
);
433 /* Returns the fold of predicate C1 OR C2 at location LOC. */
436 fold_or_predicates (location_t loc
, tree c1
, tree c2
)
438 tree op1a
, op1b
, op2a
, op2b
;
439 enum tree_code code1
= parse_predicate (c1
, &op1a
, &op1b
);
440 enum tree_code code2
= parse_predicate (c2
, &op2a
, &op2b
);
442 if (code1
!= ERROR_MARK
&& code2
!= ERROR_MARK
)
444 tree t
= maybe_fold_or_comparisons (boolean_type_node
, code1
, op1a
, op1b
,
450 return fold_build2_loc (loc
, TRUTH_OR_EXPR
, boolean_type_node
, c1
, c2
);
453 /* Returns either a COND_EXPR or the folded expression if the folded
454 expression is a MIN_EXPR, a MAX_EXPR, an ABS_EXPR,
455 a constant or a SSA_NAME. */
458 fold_build_cond_expr (tree type
, tree cond
, tree rhs
, tree lhs
)
460 tree rhs1
, lhs1
, cond_expr
;
462 /* If COND is comparison r != 0 and r has boolean type, convert COND
463 to SSA_NAME to accept by vect bool pattern. */
464 if (TREE_CODE (cond
) == NE_EXPR
)
466 tree op0
= TREE_OPERAND (cond
, 0);
467 tree op1
= TREE_OPERAND (cond
, 1);
468 if (TREE_CODE (op0
) == SSA_NAME
469 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
470 && (integer_zerop (op1
)))
473 cond_expr
= fold_ternary (COND_EXPR
, type
, cond
, rhs
, lhs
);
475 if (cond_expr
== NULL_TREE
)
476 return build3 (COND_EXPR
, type
, cond
, rhs
, lhs
);
478 STRIP_USELESS_TYPE_CONVERSION (cond_expr
);
480 if (is_gimple_val (cond_expr
))
483 if (TREE_CODE (cond_expr
) == ABS_EXPR
)
485 rhs1
= TREE_OPERAND (cond_expr
, 1);
486 STRIP_USELESS_TYPE_CONVERSION (rhs1
);
487 if (is_gimple_val (rhs1
))
488 return build1 (ABS_EXPR
, type
, rhs1
);
491 if (TREE_CODE (cond_expr
) == MIN_EXPR
492 || TREE_CODE (cond_expr
) == MAX_EXPR
)
494 lhs1
= TREE_OPERAND (cond_expr
, 0);
495 STRIP_USELESS_TYPE_CONVERSION (lhs1
);
496 rhs1
= TREE_OPERAND (cond_expr
, 1);
497 STRIP_USELESS_TYPE_CONVERSION (rhs1
);
498 if (is_gimple_val (rhs1
) && is_gimple_val (lhs1
))
499 return build2 (TREE_CODE (cond_expr
), type
, lhs1
, rhs1
);
501 return build3 (COND_EXPR
, type
, cond
, rhs
, lhs
);
504 /* Add condition NC to the predicate list of basic block BB. LOOP is
505 the loop to be if-converted. Use predicate of cd-equivalent block
506 for join bb if it exists: we call basic blocks bb1 and bb2
507 cd-equivalent if they are executed under the same condition. */
510 add_to_predicate_list (class loop
*loop
, basic_block bb
, tree nc
)
515 if (is_true_predicate (nc
))
518 /* If dominance tells us this basic block is always executed,
519 don't record any predicates for it. */
520 if (dominated_by_p (CDI_DOMINATORS
, loop
->latch
, bb
))
523 dom_bb
= get_immediate_dominator (CDI_DOMINATORS
, bb
);
524 /* We use notion of cd equivalence to get simpler predicate for
525 join block, e.g. if join block has 2 predecessors with predicates
526 p1 & p2 and p1 & !p2, we'd like to get p1 for it instead of
527 p1 & p2 | p1 & !p2. */
528 if (dom_bb
!= loop
->header
529 && get_immediate_dominator (CDI_POST_DOMINATORS
, dom_bb
) == bb
)
531 gcc_assert (flow_bb_inside_loop_p (loop
, dom_bb
));
532 bc
= bb_predicate (dom_bb
);
533 if (!is_true_predicate (bc
))
534 set_bb_predicate (bb
, bc
);
536 gcc_assert (is_true_predicate (bb_predicate (bb
)));
537 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
538 fprintf (dump_file
, "Use predicate of bb#%d for bb#%d\n",
539 dom_bb
->index
, bb
->index
);
543 if (!is_predicated (bb
))
547 bc
= bb_predicate (bb
);
548 bc
= fold_or_predicates (EXPR_LOCATION (bc
), nc
, bc
);
549 if (is_true_predicate (bc
))
551 reset_bb_predicate (bb
);
556 /* Allow a TRUTH_NOT_EXPR around the main predicate. */
557 if (TREE_CODE (bc
) == TRUTH_NOT_EXPR
)
558 tp
= &TREE_OPERAND (bc
, 0);
561 if (!is_gimple_condexpr (*tp
))
564 *tp
= force_gimple_operand_1 (*tp
, &stmts
, is_gimple_condexpr
, NULL_TREE
);
565 add_bb_predicate_gimplified_stmts (bb
, stmts
);
567 set_bb_predicate (bb
, bc
);
570 /* Add the condition COND to the previous condition PREV_COND, and add
571 this to the predicate list of the destination of edge E. LOOP is
572 the loop to be if-converted. */
575 add_to_dst_predicate_list (class loop
*loop
, edge e
,
576 tree prev_cond
, tree cond
)
578 if (!flow_bb_inside_loop_p (loop
, e
->dest
))
581 if (!is_true_predicate (prev_cond
))
582 cond
= fold_build2 (TRUTH_AND_EXPR
, boolean_type_node
,
585 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, e
->dest
))
586 add_to_predicate_list (loop
, e
->dest
, cond
);
589 /* Return true if one of the successor edges of BB exits LOOP. */
592 bb_with_exit_edge_p (class loop
*loop
, basic_block bb
)
597 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
598 if (loop_exit_edge_p (loop
, e
))
604 /* Given PHI which has more than two arguments, this function checks if
605 it's if-convertible by degenerating its arguments. Specifically, if
606 below two conditions are satisfied:
608 1) Number of PHI arguments with different values equals to 2 and one
609 argument has the only occurrence.
610 2) The edge corresponding to the unique argument isn't critical edge.
612 Such PHI can be handled as PHIs have only two arguments. For example,
615 res = PHI <A_1(e1), A_1(e2), A_2(e3)>;
617 can be transformed into:
619 res = (predicate of e3) ? A_2 : A_1;
621 Return TRUE if it is the case, FALSE otherwise. */
624 phi_convertible_by_degenerating_args (gphi
*phi
)
627 tree arg
, t1
= NULL
, t2
= NULL
;
628 unsigned int i
, i1
= 0, i2
= 0, n1
= 0, n2
= 0;
629 unsigned int num_args
= gimple_phi_num_args (phi
);
631 gcc_assert (num_args
> 2);
633 for (i
= 0; i
< num_args
; i
++)
635 arg
= gimple_phi_arg_def (phi
, i
);
636 if (t1
== NULL
|| operand_equal_p (t1
, arg
, 0))
642 else if (t2
== NULL
|| operand_equal_p (t2
, arg
, 0))
652 if (n1
!= 1 && n2
!= 1)
655 /* Check if the edge corresponding to the unique arg is critical. */
656 e
= gimple_phi_arg_edge (phi
, (n1
== 1) ? i1
: i2
);
657 if (EDGE_COUNT (e
->src
->succs
) > 1)
663 /* Return true when PHI is if-convertible. PHI is part of loop LOOP
664 and it belongs to basic block BB. Note at this point, it is sure
665 that PHI is if-convertible. This function updates global variable
666 ANY_COMPLICATED_PHI if PHI is complicated. */
669 if_convertible_phi_p (class loop
*loop
, basic_block bb
, gphi
*phi
)
671 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
673 fprintf (dump_file
, "-------------------------\n");
674 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
677 if (bb
!= loop
->header
678 && gimple_phi_num_args (phi
) > 2
679 && !phi_convertible_by_degenerating_args (phi
))
680 any_complicated_phi
= true;
685 /* Records the status of a data reference. This struct is attached to
686 each DR->aux field. */
689 bool rw_unconditionally
;
690 bool w_unconditionally
;
691 bool written_at_least_once
;
695 tree base_w_predicate
;
698 #define IFC_DR(DR) ((struct ifc_dr *) (DR)->aux)
699 #define DR_BASE_W_UNCONDITIONALLY(DR) (IFC_DR (DR)->written_at_least_once)
700 #define DR_RW_UNCONDITIONALLY(DR) (IFC_DR (DR)->rw_unconditionally)
701 #define DR_W_UNCONDITIONALLY(DR) (IFC_DR (DR)->w_unconditionally)
703 /* Iterates over DR's and stores refs, DR and base refs, DR pairs in
704 HASH tables. While storing them in HASH table, it checks if the
705 reference is unconditionally read or written and stores that as a flag
706 information. For base reference it checks if it is written atlest once
707 unconditionally and stores it as flag information along with DR.
708 In other words for every data reference A in STMT there exist other
709 accesses to a data reference with the same base with predicates that
710 add up (OR-up) to the true predicate: this ensures that the data
711 reference A is touched (read or written) on every iteration of the
712 if-converted loop. */
714 hash_memrefs_baserefs_and_store_DRs_read_written_info (data_reference_p a
)
717 data_reference_p
*master_dr
, *base_master_dr
;
718 tree base_ref
= DR_BASE_OBJECT (a
);
719 innermost_loop_behavior
*innermost
= &DR_INNERMOST (a
);
720 tree ca
= bb_predicate (gimple_bb (DR_STMT (a
)));
723 master_dr
= &innermost_DR_map
->get_or_insert (innermost
, &exist1
);
729 IFC_DR (*master_dr
)->w_predicate
730 = fold_or_predicates (UNKNOWN_LOCATION
, ca
,
731 IFC_DR (*master_dr
)->w_predicate
);
732 if (is_true_predicate (IFC_DR (*master_dr
)->w_predicate
))
733 DR_W_UNCONDITIONALLY (*master_dr
) = true;
735 IFC_DR (*master_dr
)->rw_predicate
736 = fold_or_predicates (UNKNOWN_LOCATION
, ca
,
737 IFC_DR (*master_dr
)->rw_predicate
);
738 if (is_true_predicate (IFC_DR (*master_dr
)->rw_predicate
))
739 DR_RW_UNCONDITIONALLY (*master_dr
) = true;
743 base_master_dr
= &baseref_DR_map
->get_or_insert (base_ref
, &exist2
);
746 IFC_DR (*base_master_dr
)->base_w_predicate
747 = fold_or_predicates (UNKNOWN_LOCATION
, ca
,
748 IFC_DR (*base_master_dr
)->base_w_predicate
);
749 if (is_true_predicate (IFC_DR (*base_master_dr
)->base_w_predicate
))
750 DR_BASE_W_UNCONDITIONALLY (*base_master_dr
) = true;
754 /* Return TRUE if can prove the index IDX of an array reference REF is
755 within array bound. Return false otherwise. */
758 idx_within_array_bound (tree ref
, tree
*idx
, void *dta
)
760 wi::overflow_type overflow
;
761 widest_int niter
, valid_niter
, delta
, wi_step
;
764 class loop
*loop
= (class loop
*) dta
;
766 /* Only support within-bound access for array references. */
767 if (TREE_CODE (ref
) != ARRAY_REF
)
770 /* For arrays at the end of the structure, we are not guaranteed that they
771 do not really extend over their declared size. However, for arrays of
772 size greater than one, this is unlikely to be intended. */
773 if (array_at_struct_end_p (ref
))
776 ev
= analyze_scalar_evolution (loop
, *idx
);
777 ev
= instantiate_parameters (loop
, ev
);
778 init
= initial_condition (ev
);
779 step
= evolution_part_in_loop_num (ev
, loop
->num
);
781 if (!init
|| TREE_CODE (init
) != INTEGER_CST
782 || (step
&& TREE_CODE (step
) != INTEGER_CST
))
785 low
= array_ref_low_bound (ref
);
786 high
= array_ref_up_bound (ref
);
788 /* The case of nonconstant bounds could be handled, but it would be
790 if (TREE_CODE (low
) != INTEGER_CST
791 || !high
|| TREE_CODE (high
) != INTEGER_CST
)
794 /* Check if the intial idx is within bound. */
795 if (wi::to_widest (init
) < wi::to_widest (low
)
796 || wi::to_widest (init
) > wi::to_widest (high
))
799 /* The idx is always within bound. */
800 if (!step
|| integer_zerop (step
))
803 if (!max_loop_iterations (loop
, &niter
))
806 if (wi::to_widest (step
) < 0)
808 delta
= wi::to_widest (init
) - wi::to_widest (low
);
809 wi_step
= -wi::to_widest (step
);
813 delta
= wi::to_widest (high
) - wi::to_widest (init
);
814 wi_step
= wi::to_widest (step
);
817 valid_niter
= wi::div_floor (delta
, wi_step
, SIGNED
, &overflow
);
818 /* The iteration space of idx is within array bound. */
819 if (!overflow
&& niter
<= valid_niter
)
825 /* Return TRUE if ref is a within bound array reference. */
828 ref_within_array_bound (gimple
*stmt
, tree ref
)
830 class loop
*loop
= loop_containing_stmt (stmt
);
832 gcc_assert (loop
!= NULL
);
833 return for_each_index (&ref
, idx_within_array_bound
, loop
);
837 /* Given a memory reference expression T, return TRUE if base object
838 it refers to is writable. The base object of a memory reference
839 is the main object being referenced, which is returned by function
843 base_object_writable (tree ref
)
845 tree base_tree
= get_base_address (ref
);
848 && DECL_P (base_tree
)
849 && decl_binds_to_current_def_p (base_tree
)
850 && !TREE_READONLY (base_tree
));
853 /* Return true when the memory references of STMT won't trap in the
854 if-converted code. There are two things that we have to check for:
856 - writes to memory occur to writable memory: if-conversion of
857 memory writes transforms the conditional memory writes into
858 unconditional writes, i.e. "if (cond) A[i] = foo" is transformed
859 into "A[i] = cond ? foo : A[i]", and as the write to memory may not
860 be executed at all in the original code, it may be a readonly
861 memory. To check that A is not const-qualified, we check that
862 there exists at least an unconditional write to A in the current
865 - reads or writes to memory are valid memory accesses for every
866 iteration. To check that the memory accesses are correctly formed
867 and that we are allowed to read and write in these locations, we
868 check that the memory accesses to be if-converted occur at every
869 iteration unconditionally.
871 Returns true for the memory reference in STMT, same memory reference
872 is read or written unconditionally atleast once and the base memory
873 reference is written unconditionally once. This is to check reference
874 will not write fault. Also retuns true if the memory reference is
875 unconditionally read once then we are conditionally writing to memory
876 which is defined as read and write and is bound to the definition
879 ifcvt_memrefs_wont_trap (gimple
*stmt
, vec
<data_reference_p
> drs
)
881 /* If DR didn't see a reference here we can't use it to tell
882 whether the ref traps or not. */
883 if (gimple_uid (stmt
) == 0)
886 data_reference_p
*master_dr
, *base_master_dr
;
887 data_reference_p a
= drs
[gimple_uid (stmt
) - 1];
889 tree base
= DR_BASE_OBJECT (a
);
890 innermost_loop_behavior
*innermost
= &DR_INNERMOST (a
);
892 gcc_assert (DR_STMT (a
) == stmt
);
893 gcc_assert (DR_BASE_ADDRESS (a
) || DR_OFFSET (a
)
894 || DR_INIT (a
) || DR_STEP (a
));
896 master_dr
= innermost_DR_map
->get (innermost
);
897 gcc_assert (master_dr
!= NULL
);
899 base_master_dr
= baseref_DR_map
->get (base
);
901 /* If a is unconditionally written to it doesn't trap. */
902 if (DR_W_UNCONDITIONALLY (*master_dr
))
905 /* If a is unconditionally accessed then ...
907 Even a is conditional access, we can treat it as an unconditional
908 one if it's an array reference and all its index are within array
910 if (DR_RW_UNCONDITIONALLY (*master_dr
)
911 || ref_within_array_bound (stmt
, DR_REF (a
)))
913 /* an unconditional read won't trap. */
917 /* an unconditionaly write won't trap if the base is written
918 to unconditionally. */
920 && DR_BASE_W_UNCONDITIONALLY (*base_master_dr
))
921 return flag_store_data_races
;
922 /* or the base is known to be not readonly. */
923 else if (base_object_writable (DR_REF (a
)))
924 return flag_store_data_races
;
930 /* Return true if STMT could be converted into a masked load or store
931 (conditional load or store based on a mask computed from bb predicate). */
934 ifcvt_can_use_mask_load_store (gimple
*stmt
)
936 /* Check whether this is a load or store. */
937 tree lhs
= gimple_assign_lhs (stmt
);
940 if (gimple_store_p (stmt
))
942 if (!is_gimple_val (gimple_assign_rhs1 (stmt
)))
947 else if (gimple_assign_load_p (stmt
))
950 ref
= gimple_assign_rhs1 (stmt
);
955 if (may_be_nonaddressable_p (ref
))
958 /* Mask should be integer mode of the same size as the load/store
960 machine_mode mode
= TYPE_MODE (TREE_TYPE (lhs
));
961 if (!int_mode_for_mode (mode
).exists () || VECTOR_MODE_P (mode
))
964 if (can_vec_mask_load_store_p (mode
, VOIDmode
, is_load
))
970 /* Return true if STMT could be converted from an operation that is
971 unconditional to one that is conditional on a bb predicate mask. */
974 ifcvt_can_predicate (gimple
*stmt
)
976 basic_block bb
= gimple_bb (stmt
);
978 if (!(flag_tree_loop_vectorize
|| bb
->loop_father
->force_vectorize
)
979 || bb
->loop_father
->dont_vectorize
980 || gimple_has_volatile_ops (stmt
))
983 if (gimple_assign_single_p (stmt
))
984 return ifcvt_can_use_mask_load_store (stmt
);
986 tree_code code
= gimple_assign_rhs_code (stmt
);
987 tree lhs_type
= TREE_TYPE (gimple_assign_lhs (stmt
));
988 tree rhs_type
= TREE_TYPE (gimple_assign_rhs1 (stmt
));
989 if (!types_compatible_p (lhs_type
, rhs_type
))
991 internal_fn cond_fn
= get_conditional_internal_fn (code
);
992 return (cond_fn
!= IFN_LAST
993 && vectorized_internal_fn_supported_p (cond_fn
, lhs_type
));
996 /* Return true when STMT is if-convertible.
998 GIMPLE_ASSIGN statement is not if-convertible if,
1001 - LHS is not var decl. */
1004 if_convertible_gimple_assign_stmt_p (gimple
*stmt
,
1005 vec
<data_reference_p
> refs
)
1007 tree lhs
= gimple_assign_lhs (stmt
);
1009 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1011 fprintf (dump_file
, "-------------------------\n");
1012 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1015 if (!is_gimple_reg_type (TREE_TYPE (lhs
)))
1018 /* Some of these constrains might be too conservative. */
1019 if (stmt_ends_bb_p (stmt
)
1020 || gimple_has_volatile_ops (stmt
)
1021 || (TREE_CODE (lhs
) == SSA_NAME
1022 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
1023 || gimple_has_side_effects (stmt
))
1025 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1026 fprintf (dump_file
, "stmt not suitable for ifcvt\n");
1030 /* tree-into-ssa.c uses GF_PLF_1, so avoid it, because
1031 in between if_convertible_loop_p and combine_blocks
1032 we can perform loop versioning. */
1033 gimple_set_plf (stmt
, GF_PLF_2
, false);
1035 if ((! gimple_vuse (stmt
)
1036 || gimple_could_trap_p_1 (stmt
, false, false)
1037 || ! ifcvt_memrefs_wont_trap (stmt
, refs
))
1038 && gimple_could_trap_p (stmt
))
1040 if (ifcvt_can_predicate (stmt
))
1042 gimple_set_plf (stmt
, GF_PLF_2
, true);
1043 need_to_predicate
= true;
1046 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1047 fprintf (dump_file
, "tree could trap...\n");
1050 else if (INTEGRAL_TYPE_P (TREE_TYPE (lhs
))
1051 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (lhs
))
1052 && arith_code_with_undefined_signed_overflow
1053 (gimple_assign_rhs_code (stmt
)))
1054 /* We have to rewrite stmts with undefined overflow. */
1055 need_to_rewrite_undefined
= true;
1057 /* When if-converting stores force versioning, likewise if we
1058 ended up generating store data races. */
1059 if (gimple_vdef (stmt
))
1060 need_to_predicate
= true;
1065 /* Return true when STMT is if-convertible.
1067 A statement is if-convertible if:
1068 - it is an if-convertible GIMPLE_ASSIGN,
1069 - it is a GIMPLE_LABEL or a GIMPLE_COND,
1070 - it is builtins call. */
1073 if_convertible_stmt_p (gimple
*stmt
, vec
<data_reference_p
> refs
)
1075 switch (gimple_code (stmt
))
1083 return if_convertible_gimple_assign_stmt_p (stmt
, refs
);
1087 tree fndecl
= gimple_call_fndecl (stmt
);
1090 int flags
= gimple_call_flags (stmt
);
1091 if ((flags
& ECF_CONST
)
1092 && !(flags
& ECF_LOOPING_CONST_OR_PURE
)
1093 /* We can only vectorize some builtins at the moment,
1094 so restrict if-conversion to those. */
1095 && fndecl_built_in_p (fndecl
))
1102 /* Don't know what to do with 'em so don't do anything. */
1103 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1105 fprintf (dump_file
, "don't know what to do\n");
1106 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
1114 /* Assumes that BB has more than 1 predecessors.
1115 Returns false if at least one successor is not on critical edge
1116 and true otherwise. */
1119 all_preds_critical_p (basic_block bb
)
1124 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1125 if (EDGE_COUNT (e
->src
->succs
) == 1)
1130 /* Return true when BB is if-convertible. This routine does not check
1131 basic block's statements and phis.
1133 A basic block is not if-convertible if:
1134 - it is non-empty and it is after the exit block (in BFS order),
1135 - it is after the exit block but before the latch,
1136 - its edges are not normal.
1138 EXIT_BB is the basic block containing the exit of the LOOP. BB is
1142 if_convertible_bb_p (class loop
*loop
, basic_block bb
, basic_block exit_bb
)
1147 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1148 fprintf (dump_file
, "----------[%d]-------------\n", bb
->index
);
1150 if (EDGE_COUNT (bb
->succs
) > 2)
1153 gimple
*last
= last_stmt (bb
);
1154 if (gcall
*call
= safe_dyn_cast
<gcall
*> (last
))
1155 if (gimple_call_ctrl_altering_p (call
))
1160 if (bb
!= loop
->latch
)
1162 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1163 fprintf (dump_file
, "basic block after exit bb but before latch\n");
1166 else if (!empty_block_p (bb
))
1168 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1169 fprintf (dump_file
, "non empty basic block after exit bb\n");
1172 else if (bb
== loop
->latch
1174 && !dominated_by_p (CDI_DOMINATORS
, bb
, exit_bb
))
1176 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1177 fprintf (dump_file
, "latch is not dominated by exit_block\n");
1182 /* Be less adventurous and handle only normal edges. */
1183 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1184 if (e
->flags
& (EDGE_EH
| EDGE_ABNORMAL
| EDGE_IRREDUCIBLE_LOOP
))
1186 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1187 fprintf (dump_file
, "Difficult to handle edges\n");
1194 /* Return true when all predecessor blocks of BB are visited. The
1195 VISITED bitmap keeps track of the visited blocks. */
1198 pred_blocks_visited_p (basic_block bb
, bitmap
*visited
)
1202 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
1203 if (!bitmap_bit_p (*visited
, e
->src
->index
))
1209 /* Get body of a LOOP in suitable order for if-conversion. It is
1210 caller's responsibility to deallocate basic block list.
1211 If-conversion suitable order is, breadth first sort (BFS) order
1212 with an additional constraint: select a block only if all its
1213 predecessors are already selected. */
1215 static basic_block
*
1216 get_loop_body_in_if_conv_order (const class loop
*loop
)
1218 basic_block
*blocks
, *blocks_in_bfs_order
;
1221 unsigned int index
= 0;
1222 unsigned int visited_count
= 0;
1224 gcc_assert (loop
->num_nodes
);
1225 gcc_assert (loop
->latch
!= EXIT_BLOCK_PTR_FOR_FN (cfun
));
1227 blocks
= XCNEWVEC (basic_block
, loop
->num_nodes
);
1228 visited
= BITMAP_ALLOC (NULL
);
1230 blocks_in_bfs_order
= get_loop_body_in_bfs_order (loop
);
1233 while (index
< loop
->num_nodes
)
1235 bb
= blocks_in_bfs_order
[index
];
1237 if (bb
->flags
& BB_IRREDUCIBLE_LOOP
)
1239 free (blocks_in_bfs_order
);
1240 BITMAP_FREE (visited
);
1245 if (!bitmap_bit_p (visited
, bb
->index
))
1247 if (pred_blocks_visited_p (bb
, &visited
)
1248 || bb
== loop
->header
)
1250 /* This block is now visited. */
1251 bitmap_set_bit (visited
, bb
->index
);
1252 blocks
[visited_count
++] = bb
;
1258 if (index
== loop
->num_nodes
1259 && visited_count
!= loop
->num_nodes
)
1263 free (blocks_in_bfs_order
);
1264 BITMAP_FREE (visited
);
1268 /* Returns true when the analysis of the predicates for all the basic
1269 blocks in LOOP succeeded.
1271 predicate_bbs first allocates the predicates of the basic blocks.
1272 These fields are then initialized with the tree expressions
1273 representing the predicates under which a basic block is executed
1274 in the LOOP. As the loop->header is executed at each iteration, it
1275 has the "true" predicate. Other statements executed under a
1276 condition are predicated with that condition, for example
1283 S1 will be predicated with "x", and
1284 S2 will be predicated with "!x". */
1287 predicate_bbs (loop_p loop
)
1291 for (i
= 0; i
< loop
->num_nodes
; i
++)
1292 init_bb_predicate (ifc_bbs
[i
]);
1294 for (i
= 0; i
< loop
->num_nodes
; i
++)
1296 basic_block bb
= ifc_bbs
[i
];
1300 /* The loop latch and loop exit block are always executed and
1301 have no extra conditions to be processed: skip them. */
1302 if (bb
== loop
->latch
1303 || bb_with_exit_edge_p (loop
, bb
))
1305 reset_bb_predicate (bb
);
1309 cond
= bb_predicate (bb
);
1310 stmt
= last_stmt (bb
);
1311 if (stmt
&& gimple_code (stmt
) == GIMPLE_COND
)
1314 edge true_edge
, false_edge
;
1315 location_t loc
= gimple_location (stmt
);
1316 tree c
= build2_loc (loc
, gimple_cond_code (stmt
),
1318 gimple_cond_lhs (stmt
),
1319 gimple_cond_rhs (stmt
));
1321 /* Add new condition into destination's predicate list. */
1322 extract_true_false_edges_from_block (gimple_bb (stmt
),
1323 &true_edge
, &false_edge
);
1325 /* If C is true, then TRUE_EDGE is taken. */
1326 add_to_dst_predicate_list (loop
, true_edge
, unshare_expr (cond
),
1329 /* If C is false, then FALSE_EDGE is taken. */
1330 c2
= build1_loc (loc
, TRUTH_NOT_EXPR
, boolean_type_node
,
1332 add_to_dst_predicate_list (loop
, false_edge
,
1333 unshare_expr (cond
), c2
);
1338 /* If current bb has only one successor, then consider it as an
1339 unconditional goto. */
1340 if (single_succ_p (bb
))
1342 basic_block bb_n
= single_succ (bb
);
1344 /* The successor bb inherits the predicate of its
1345 predecessor. If there is no predicate in the predecessor
1346 bb, then consider the successor bb as always executed. */
1347 if (cond
== NULL_TREE
)
1348 cond
= boolean_true_node
;
1350 add_to_predicate_list (loop
, bb_n
, cond
);
1354 /* The loop header is always executed. */
1355 reset_bb_predicate (loop
->header
);
1356 gcc_assert (bb_predicate_gimplified_stmts (loop
->header
) == NULL
1357 && bb_predicate_gimplified_stmts (loop
->latch
) == NULL
);
1360 /* Build region by adding loop pre-header and post-header blocks. */
1362 static vec
<basic_block
>
1363 build_region (class loop
*loop
)
1365 vec
<basic_block
> region
= vNULL
;
1366 basic_block exit_bb
= NULL
;
1368 gcc_assert (ifc_bbs
);
1369 /* The first element is loop pre-header. */
1370 region
.safe_push (loop_preheader_edge (loop
)->src
);
1372 for (unsigned int i
= 0; i
< loop
->num_nodes
; i
++)
1374 basic_block bb
= ifc_bbs
[i
];
1375 region
.safe_push (bb
);
1376 /* Find loop postheader. */
1379 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
1380 if (loop_exit_edge_p (loop
, e
))
1386 /* The last element is loop post-header. */
1387 gcc_assert (exit_bb
);
1388 region
.safe_push (exit_bb
);
1392 /* Return true when LOOP is if-convertible. This is a helper function
1393 for if_convertible_loop_p. REFS and DDRS are initialized and freed
1394 in if_convertible_loop_p. */
1397 if_convertible_loop_p_1 (class loop
*loop
, vec
<data_reference_p
> *refs
)
1400 basic_block exit_bb
= NULL
;
1401 vec
<basic_block
> region
;
1403 if (find_data_references_in_loop (loop
, refs
) == chrec_dont_know
)
1406 calculate_dominance_info (CDI_DOMINATORS
);
1408 /* Allow statements that can be handled during if-conversion. */
1409 ifc_bbs
= get_loop_body_in_if_conv_order (loop
);
1412 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1413 fprintf (dump_file
, "Irreducible loop\n");
1417 for (i
= 0; i
< loop
->num_nodes
; i
++)
1419 basic_block bb
= ifc_bbs
[i
];
1421 if (!if_convertible_bb_p (loop
, bb
, exit_bb
))
1424 if (bb_with_exit_edge_p (loop
, bb
))
1428 for (i
= 0; i
< loop
->num_nodes
; i
++)
1430 basic_block bb
= ifc_bbs
[i
];
1431 gimple_stmt_iterator gsi
;
1433 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1434 switch (gimple_code (gsi_stmt (gsi
)))
1441 gimple_set_uid (gsi_stmt (gsi
), 0);
1448 data_reference_p dr
;
1451 = new hash_map
<innermost_loop_behavior_hash
, data_reference_p
>;
1452 baseref_DR_map
= new hash_map
<tree_operand_hash
, data_reference_p
>;
1454 /* Compute post-dominator tree locally. */
1455 region
= build_region (loop
);
1456 calculate_dominance_info_for_region (CDI_POST_DOMINATORS
, region
);
1458 predicate_bbs (loop
);
1460 /* Free post-dominator tree since it is not used after predication. */
1461 free_dominance_info_for_region (cfun
, CDI_POST_DOMINATORS
, region
);
1464 for (i
= 0; refs
->iterate (i
, &dr
); i
++)
1466 tree ref
= DR_REF (dr
);
1468 dr
->aux
= XNEW (struct ifc_dr
);
1469 DR_BASE_W_UNCONDITIONALLY (dr
) = false;
1470 DR_RW_UNCONDITIONALLY (dr
) = false;
1471 DR_W_UNCONDITIONALLY (dr
) = false;
1472 IFC_DR (dr
)->rw_predicate
= boolean_false_node
;
1473 IFC_DR (dr
)->w_predicate
= boolean_false_node
;
1474 IFC_DR (dr
)->base_w_predicate
= boolean_false_node
;
1475 if (gimple_uid (DR_STMT (dr
)) == 0)
1476 gimple_set_uid (DR_STMT (dr
), i
+ 1);
1478 /* If DR doesn't have innermost loop behavior or it's a compound
1479 memory reference, we synthesize its innermost loop behavior
1481 if (TREE_CODE (ref
) == COMPONENT_REF
1482 || TREE_CODE (ref
) == IMAGPART_EXPR
1483 || TREE_CODE (ref
) == REALPART_EXPR
1484 || !(DR_BASE_ADDRESS (dr
) || DR_OFFSET (dr
)
1485 || DR_INIT (dr
) || DR_STEP (dr
)))
1487 while (TREE_CODE (ref
) == COMPONENT_REF
1488 || TREE_CODE (ref
) == IMAGPART_EXPR
1489 || TREE_CODE (ref
) == REALPART_EXPR
)
1490 ref
= TREE_OPERAND (ref
, 0);
1492 memset (&DR_INNERMOST (dr
), 0, sizeof (DR_INNERMOST (dr
)));
1493 DR_BASE_ADDRESS (dr
) = ref
;
1495 hash_memrefs_baserefs_and_store_DRs_read_written_info (dr
);
1498 for (i
= 0; i
< loop
->num_nodes
; i
++)
1500 basic_block bb
= ifc_bbs
[i
];
1501 gimple_stmt_iterator itr
;
1503 /* Check the if-convertibility of statements in predicated BBs. */
1504 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, bb
))
1505 for (itr
= gsi_start_bb (bb
); !gsi_end_p (itr
); gsi_next (&itr
))
1506 if (!if_convertible_stmt_p (gsi_stmt (itr
), *refs
))
1510 /* Checking PHIs needs to be done after stmts, as the fact whether there
1511 are any masked loads or stores affects the tests. */
1512 for (i
= 0; i
< loop
->num_nodes
; i
++)
1514 basic_block bb
= ifc_bbs
[i
];
1517 for (itr
= gsi_start_phis (bb
); !gsi_end_p (itr
); gsi_next (&itr
))
1518 if (!if_convertible_phi_p (loop
, bb
, itr
.phi ()))
1523 fprintf (dump_file
, "Applying if-conversion\n");
1528 /* Return true when LOOP is if-convertible.
1529 LOOP is if-convertible if:
1531 - it has two or more basic blocks,
1532 - it has only one exit,
1533 - loop header is not the exit edge,
1534 - if its basic blocks and phi nodes are if convertible. */
1537 if_convertible_loop_p (class loop
*loop
)
1542 vec
<data_reference_p
> refs
;
1544 /* Handle only innermost loop. */
1545 if (!loop
|| loop
->inner
)
1547 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1548 fprintf (dump_file
, "not innermost loop\n");
1552 /* If only one block, no need for if-conversion. */
1553 if (loop
->num_nodes
<= 2)
1555 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1556 fprintf (dump_file
, "less than 2 basic blocks\n");
1560 /* More than one loop exit is too much to handle. */
1561 if (!single_exit (loop
))
1563 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1564 fprintf (dump_file
, "multiple exits\n");
1568 /* If one of the loop header's edge is an exit edge then do not
1569 apply if-conversion. */
1570 FOR_EACH_EDGE (e
, ei
, loop
->header
->succs
)
1571 if (loop_exit_edge_p (loop
, e
))
1575 res
= if_convertible_loop_p_1 (loop
, &refs
);
1577 data_reference_p dr
;
1579 for (i
= 0; refs
.iterate (i
, &dr
); i
++)
1582 free_data_refs (refs
);
1584 delete innermost_DR_map
;
1585 innermost_DR_map
= NULL
;
1587 delete baseref_DR_map
;
1588 baseref_DR_map
= NULL
;
1593 /* Return reduc_1 if has_nop.
1596 tmp1 = (unsigned type) reduc_1;
1598 reduc_3 = (signed type) tmp2. */
1600 strip_nop_cond_scalar_reduction (bool has_nop
, tree op
)
1605 if (TREE_CODE (op
) != SSA_NAME
)
1608 gassign
*stmt
= safe_dyn_cast
<gassign
*> (SSA_NAME_DEF_STMT (op
));
1610 || !CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (stmt
))
1611 || !tree_nop_conversion_p (TREE_TYPE (op
), TREE_TYPE
1612 (gimple_assign_rhs1 (stmt
))))
1615 return gimple_assign_rhs1 (stmt
);
1618 /* Returns true if def-stmt for phi argument ARG is simple increment/decrement
1619 which is in predicated basic block.
1620 In fact, the following PHI pattern is searching:
1622 reduc_1 = PHI <..., reduc_2>
1626 reduc_2 = PHI <reduc_1, reduc_3>
1628 ARG_0 and ARG_1 are correspondent PHI arguments.
1629 REDUC, OP0 and OP1 contain reduction stmt and its operands.
1630 EXTENDED is true if PHI has > 2 arguments. */
1633 is_cond_scalar_reduction (gimple
*phi
, gimple
**reduc
, tree arg_0
, tree arg_1
,
1634 tree
*op0
, tree
*op1
, bool extended
, bool* has_nop
,
1637 tree lhs
, r_op1
, r_op2
, r_nop1
, r_nop2
;
1639 gimple
*header_phi
= NULL
;
1640 enum tree_code reduction_op
;
1641 basic_block bb
= gimple_bb (phi
);
1642 class loop
*loop
= bb
->loop_father
;
1643 edge latch_e
= loop_latch_edge (loop
);
1644 imm_use_iterator imm_iter
;
1645 use_operand_p use_p
;
1648 bool result
= *has_nop
= false;
1649 if (TREE_CODE (arg_0
) != SSA_NAME
|| TREE_CODE (arg_1
) != SSA_NAME
)
1652 if (!extended
&& gimple_code (SSA_NAME_DEF_STMT (arg_0
)) == GIMPLE_PHI
)
1655 header_phi
= SSA_NAME_DEF_STMT (arg_0
);
1656 stmt
= SSA_NAME_DEF_STMT (arg_1
);
1658 else if (gimple_code (SSA_NAME_DEF_STMT (arg_1
)) == GIMPLE_PHI
)
1661 header_phi
= SSA_NAME_DEF_STMT (arg_1
);
1662 stmt
= SSA_NAME_DEF_STMT (arg_0
);
1666 if (gimple_bb (header_phi
) != loop
->header
)
1669 if (PHI_ARG_DEF_FROM_EDGE (header_phi
, latch_e
) != PHI_RESULT (phi
))
1672 if (gimple_code (stmt
) != GIMPLE_ASSIGN
1673 || gimple_has_volatile_ops (stmt
))
1676 if (!flow_bb_inside_loop_p (loop
, gimple_bb (stmt
)))
1679 if (!is_predicated (gimple_bb (stmt
)))
1682 /* Check that stmt-block is predecessor of phi-block. */
1683 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->succs
)
1692 if (!has_single_use (lhs
))
1695 reduction_op
= gimple_assign_rhs_code (stmt
);
1697 /* Catch something like below
1700 reduc_1 = PHI <..., reduc_2>
1703 tmp1 = (unsigned type) reduc_1;
1705 reduc_3 = (signed type) tmp2;
1707 reduc_2 = PHI <reduc_1, reduc_3>
1711 reduc_2 = PHI <0, reduc_3>
1712 tmp1 = (unsigned type)reduce_1;
1713 ifcvt = cond_expr ? rhs2 : 0
1714 tmp2 = tmp1 +/- ifcvt;
1715 reduce_1 = (signed type)tmp2; */
1717 if (CONVERT_EXPR_CODE_P (reduction_op
))
1719 lhs
= gimple_assign_rhs1 (stmt
);
1720 if (TREE_CODE (lhs
) != SSA_NAME
1721 || !has_single_use (lhs
))
1725 stmt
= SSA_NAME_DEF_STMT (lhs
);
1726 if (gimple_bb (stmt
) != gimple_bb (*nop_reduc
)
1727 || !is_gimple_assign (stmt
))
1731 reduction_op
= gimple_assign_rhs_code (stmt
);
1734 if (reduction_op
!= PLUS_EXPR
&& reduction_op
!= MINUS_EXPR
)
1736 r_op1
= gimple_assign_rhs1 (stmt
);
1737 r_op2
= gimple_assign_rhs2 (stmt
);
1739 r_nop1
= strip_nop_cond_scalar_reduction (*has_nop
, r_op1
);
1740 r_nop2
= strip_nop_cond_scalar_reduction (*has_nop
, r_op2
);
1742 /* Make R_OP1 to hold reduction variable. */
1743 if (r_nop2
== PHI_RESULT (header_phi
)
1744 && reduction_op
== PLUS_EXPR
)
1746 std::swap (r_op1
, r_op2
);
1747 std::swap (r_nop1
, r_nop2
);
1749 else if (r_nop1
!= PHI_RESULT (header_phi
))
1754 /* Check that R_NOP1 is used in nop_stmt or in PHI only. */
1755 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, r_nop1
)
1757 gimple
*use_stmt
= USE_STMT (use_p
);
1758 if (is_gimple_debug (use_stmt
))
1760 if (use_stmt
== SSA_NAME_DEF_STMT (r_op1
))
1762 if (use_stmt
!= phi
)
1767 /* Check that R_OP1 is used in reduction stmt or in PHI only. */
1768 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, r_op1
)
1770 gimple
*use_stmt
= USE_STMT (use_p
);
1771 if (is_gimple_debug (use_stmt
))
1773 if (use_stmt
== stmt
)
1775 if (gimple_code (use_stmt
) != GIMPLE_PHI
)
1779 *op0
= r_op1
; *op1
= r_op2
;
1784 /* Converts conditional scalar reduction into unconditional form, e.g.
1786 if (_5 != 0) goto bb_5 else goto bb_6
1792 # res_2 = PHI <res_13(4), res_6(5)>
1795 will be converted into sequence
1796 _ifc__1 = _5 != 0 ? 1 : 0;
1797 res_2 = res_13 + _ifc__1;
1798 Argument SWAP tells that arguments of conditional expression should be
1800 Returns rhs of resulting PHI assignment. */
1803 convert_scalar_cond_reduction (gimple
*reduc
, gimple_stmt_iterator
*gsi
,
1804 tree cond
, tree op0
, tree op1
, bool swap
,
1805 bool has_nop
, gimple
* nop_reduc
)
1807 gimple_stmt_iterator stmt_it
;
1810 tree rhs1
= gimple_assign_rhs1 (reduc
);
1811 tree tmp
= make_temp_ssa_name (TREE_TYPE (rhs1
), NULL
, "_ifc_");
1813 tree zero
= build_zero_cst (TREE_TYPE (rhs1
));
1814 gimple_seq stmts
= NULL
;
1816 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1818 fprintf (dump_file
, "Found cond scalar reduction.\n");
1819 print_gimple_stmt (dump_file
, reduc
, 0, TDF_SLIM
);
1822 /* Build cond expression using COND and constant operand
1823 of reduction rhs. */
1824 c
= fold_build_cond_expr (TREE_TYPE (rhs1
),
1825 unshare_expr (cond
),
1829 /* Create assignment stmt and insert it at GSI. */
1830 new_assign
= gimple_build_assign (tmp
, c
);
1831 gsi_insert_before (gsi
, new_assign
, GSI_SAME_STMT
);
1832 /* Build rhs for unconditional increment/decrement. */
1833 rhs
= gimple_build (&stmts
, gimple_assign_rhs_code (reduc
),
1834 TREE_TYPE (rhs1
), op0
, tmp
);
1838 rhs
= gimple_convert (&stmts
,
1839 TREE_TYPE (gimple_assign_lhs (nop_reduc
)), rhs
);
1840 stmt_it
= gsi_for_stmt (nop_reduc
);
1841 gsi_remove (&stmt_it
, true);
1842 release_defs (nop_reduc
);
1844 gsi_insert_seq_before (gsi
, stmts
, GSI_SAME_STMT
);
1846 /* Delete original reduction stmt. */
1847 stmt_it
= gsi_for_stmt (reduc
);
1848 gsi_remove (&stmt_it
, true);
1849 release_defs (reduc
);
1853 /* Produce condition for all occurrences of ARG in PHI node. */
1856 gen_phi_arg_condition (gphi
*phi
, vec
<int> *occur
,
1857 gimple_stmt_iterator
*gsi
)
1861 tree cond
= NULL_TREE
;
1865 len
= occur
->length ();
1866 gcc_assert (len
> 0);
1867 for (i
= 0; i
< len
; i
++)
1869 e
= gimple_phi_arg_edge (phi
, (*occur
)[i
]);
1870 c
= bb_predicate (e
->src
);
1871 if (is_true_predicate (c
))
1876 c
= force_gimple_operand_gsi_1 (gsi
, unshare_expr (c
),
1877 is_gimple_condexpr
, NULL_TREE
,
1878 true, GSI_SAME_STMT
);
1879 if (cond
!= NULL_TREE
)
1881 /* Must build OR expression. */
1882 cond
= fold_or_predicates (EXPR_LOCATION (c
), c
, cond
);
1883 cond
= force_gimple_operand_gsi_1 (gsi
, unshare_expr (cond
),
1884 is_gimple_condexpr
, NULL_TREE
,
1885 true, GSI_SAME_STMT
);
1890 gcc_assert (cond
!= NULL_TREE
);
1894 /* Local valueization callback that follows all-use SSA edges. */
1897 ifcvt_follow_ssa_use_edges (tree val
)
1902 /* Replace a scalar PHI node with a COND_EXPR using COND as condition.
1903 This routine can handle PHI nodes with more than two arguments.
1906 S1: A = PHI <x1(1), x2(5)>
1908 S2: A = cond ? x1 : x2;
1910 The generated code is inserted at GSI that points to the top of
1911 basic block's statement list.
1912 If PHI node has more than two arguments a chain of conditional
1913 expression is produced. */
1917 predicate_scalar_phi (gphi
*phi
, gimple_stmt_iterator
*gsi
)
1919 gimple
*new_stmt
= NULL
, *reduc
, *nop_reduc
;
1920 tree rhs
, res
, arg0
, arg1
, op0
, op1
, scev
;
1922 unsigned int index0
;
1923 unsigned int max
, args_len
;
1929 res
= gimple_phi_result (phi
);
1930 if (virtual_operand_p (res
))
1933 if ((rhs
= degenerate_phi_result (phi
))
1934 || ((scev
= analyze_scalar_evolution (gimple_bb (phi
)->loop_father
,
1936 && !chrec_contains_undetermined (scev
)
1938 && (rhs
= gimple_phi_arg_def (phi
, 0))))
1940 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1942 fprintf (dump_file
, "Degenerate phi!\n");
1943 print_gimple_stmt (dump_file
, phi
, 0, TDF_SLIM
);
1945 new_stmt
= gimple_build_assign (res
, rhs
);
1946 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
1947 update_stmt (new_stmt
);
1951 bb
= gimple_bb (phi
);
1952 if (EDGE_COUNT (bb
->preds
) == 2)
1954 /* Predicate ordinary PHI node with 2 arguments. */
1955 edge first_edge
, second_edge
;
1956 basic_block true_bb
;
1957 first_edge
= EDGE_PRED (bb
, 0);
1958 second_edge
= EDGE_PRED (bb
, 1);
1959 cond
= bb_predicate (first_edge
->src
);
1960 if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
1961 std::swap (first_edge
, second_edge
);
1962 if (EDGE_COUNT (first_edge
->src
->succs
) > 1)
1964 cond
= bb_predicate (second_edge
->src
);
1965 if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
1966 cond
= TREE_OPERAND (cond
, 0);
1968 first_edge
= second_edge
;
1971 cond
= bb_predicate (first_edge
->src
);
1972 /* Gimplify the condition to a valid cond-expr conditonal operand. */
1973 cond
= force_gimple_operand_gsi_1 (gsi
, unshare_expr (cond
),
1974 is_gimple_condexpr
, NULL_TREE
,
1975 true, GSI_SAME_STMT
);
1976 true_bb
= first_edge
->src
;
1977 if (EDGE_PRED (bb
, 1)->src
== true_bb
)
1979 arg0
= gimple_phi_arg_def (phi
, 1);
1980 arg1
= gimple_phi_arg_def (phi
, 0);
1984 arg0
= gimple_phi_arg_def (phi
, 0);
1985 arg1
= gimple_phi_arg_def (phi
, 1);
1987 if (is_cond_scalar_reduction (phi
, &reduc
, arg0
, arg1
,
1988 &op0
, &op1
, false, &has_nop
,
1991 /* Convert reduction stmt into vectorizable form. */
1992 rhs
= convert_scalar_cond_reduction (reduc
, gsi
, cond
, op0
, op1
,
1993 true_bb
!= gimple_bb (reduc
),
1994 has_nop
, nop_reduc
);
1995 redundant_ssa_names
.safe_push (std::make_pair (res
, rhs
));
1998 /* Build new RHS using selected condition and arguments. */
1999 rhs
= fold_build_cond_expr (TREE_TYPE (res
), unshare_expr (cond
),
2001 new_stmt
= gimple_build_assign (res
, rhs
);
2002 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2003 gimple_stmt_iterator new_gsi
= gsi_for_stmt (new_stmt
);
2004 if (fold_stmt (&new_gsi
, ifcvt_follow_ssa_use_edges
))
2006 new_stmt
= gsi_stmt (new_gsi
);
2007 update_stmt (new_stmt
);
2010 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2012 fprintf (dump_file
, "new phi replacement stmt\n");
2013 print_gimple_stmt (dump_file
, new_stmt
, 0, TDF_SLIM
);
2018 /* Create hashmap for PHI node which contain vector of argument indexes
2019 having the same value. */
2021 hash_map
<tree_operand_hash
, auto_vec
<int> > phi_arg_map
;
2022 unsigned int num_args
= gimple_phi_num_args (phi
);
2024 /* Vector of different PHI argument values. */
2025 auto_vec
<tree
> args (num_args
);
2027 /* Compute phi_arg_map. */
2028 for (i
= 0; i
< num_args
; i
++)
2032 arg
= gimple_phi_arg_def (phi
, i
);
2033 if (!phi_arg_map
.get (arg
))
2034 args
.quick_push (arg
);
2035 phi_arg_map
.get_or_insert (arg
).safe_push (i
);
2038 /* Determine element with max number of occurrences. */
2041 args_len
= args
.length ();
2042 for (i
= 0; i
< args_len
; i
++)
2045 if ((len
= phi_arg_map
.get (args
[i
])->length ()) > max
)
2052 /* Put element with max number of occurences to the end of ARGS. */
2053 if (max_ind
!= -1 && max_ind
+1 != (int) args_len
)
2054 std::swap (args
[args_len
- 1], args
[max_ind
]);
2056 /* Handle one special case when number of arguments with different values
2057 is equal 2 and one argument has the only occurrence. Such PHI can be
2058 handled as if would have only 2 arguments. */
2059 if (args_len
== 2 && phi_arg_map
.get (args
[0])->length () == 1)
2062 indexes
= phi_arg_map
.get (args
[0]);
2063 index0
= (*indexes
)[0];
2066 e
= gimple_phi_arg_edge (phi
, index0
);
2067 cond
= bb_predicate (e
->src
);
2068 if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
2071 cond
= TREE_OPERAND (cond
, 0);
2073 /* Gimplify the condition to a valid cond-expr conditonal operand. */
2074 cond
= force_gimple_operand_gsi_1 (gsi
, unshare_expr (cond
),
2075 is_gimple_condexpr
, NULL_TREE
,
2076 true, GSI_SAME_STMT
);
2077 if (!(is_cond_scalar_reduction (phi
, &reduc
, arg0
, arg1
,
2078 &op0
, &op1
, true, &has_nop
, &nop_reduc
)))
2079 rhs
= fold_build_cond_expr (TREE_TYPE (res
), unshare_expr (cond
),
2084 /* Convert reduction stmt into vectorizable form. */
2085 rhs
= convert_scalar_cond_reduction (reduc
, gsi
, cond
, op0
, op1
,
2086 swap
,has_nop
, nop_reduc
);
2087 redundant_ssa_names
.safe_push (std::make_pair (res
, rhs
));
2089 new_stmt
= gimple_build_assign (res
, rhs
);
2090 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2091 update_stmt (new_stmt
);
2097 tree type
= TREE_TYPE (gimple_phi_result (phi
));
2100 for (i
= 0; i
< args_len
; i
++)
2103 indexes
= phi_arg_map
.get (args
[i
]);
2104 if (i
!= args_len
- 1)
2105 lhs
= make_temp_ssa_name (type
, NULL
, "_ifc_");
2108 cond
= gen_phi_arg_condition (phi
, indexes
, gsi
);
2109 rhs
= fold_build_cond_expr (type
, unshare_expr (cond
),
2111 new_stmt
= gimple_build_assign (lhs
, rhs
);
2112 gsi_insert_before (gsi
, new_stmt
, GSI_SAME_STMT
);
2113 update_stmt (new_stmt
);
2118 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2120 fprintf (dump_file
, "new extended phi replacement stmt\n");
2121 print_gimple_stmt (dump_file
, new_stmt
, 0, TDF_SLIM
);
2125 /* Replaces in LOOP all the scalar phi nodes other than those in the
2126 LOOP->header block with conditional modify expressions. */
2129 predicate_all_scalar_phis (class loop
*loop
)
2132 unsigned int orig_loop_num_nodes
= loop
->num_nodes
;
2135 for (i
= 1; i
< orig_loop_num_nodes
; i
++)
2138 gimple_stmt_iterator gsi
;
2139 gphi_iterator phi_gsi
;
2142 if (bb
== loop
->header
)
2145 phi_gsi
= gsi_start_phis (bb
);
2146 if (gsi_end_p (phi_gsi
))
2149 gsi
= gsi_after_labels (bb
);
2150 while (!gsi_end_p (phi_gsi
))
2152 phi
= phi_gsi
.phi ();
2153 if (virtual_operand_p (gimple_phi_result (phi
)))
2154 gsi_next (&phi_gsi
);
2157 predicate_scalar_phi (phi
, &gsi
);
2158 remove_phi_node (&phi_gsi
, false);
2164 /* Insert in each basic block of LOOP the statements produced by the
2165 gimplification of the predicates. */
2168 insert_gimplified_predicates (loop_p loop
)
2172 for (i
= 0; i
< loop
->num_nodes
; i
++)
2174 basic_block bb
= ifc_bbs
[i
];
2176 if (!is_predicated (bb
))
2177 gcc_assert (bb_predicate_gimplified_stmts (bb
) == NULL
);
2178 if (!is_predicated (bb
))
2180 /* Do not insert statements for a basic block that is not
2181 predicated. Also make sure that the predicate of the
2182 basic block is set to true. */
2183 reset_bb_predicate (bb
);
2187 stmts
= bb_predicate_gimplified_stmts (bb
);
2190 if (need_to_predicate
)
2192 /* Insert the predicate of the BB just after the label,
2193 as the if-conversion of memory writes will use this
2195 gimple_stmt_iterator gsi
= gsi_after_labels (bb
);
2196 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
2200 /* Insert the predicate of the BB at the end of the BB
2201 as this would reduce the register pressure: the only
2202 use of this predicate will be in successor BBs. */
2203 gimple_stmt_iterator gsi
= gsi_last_bb (bb
);
2206 || stmt_ends_bb_p (gsi_stmt (gsi
)))
2207 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
2209 gsi_insert_seq_after (&gsi
, stmts
, GSI_SAME_STMT
);
2212 /* Once the sequence is code generated, set it to NULL. */
2213 set_bb_predicate_gimplified_stmts (bb
, NULL
);
2218 /* Helper function for predicate_statements. Returns index of existent
2219 mask if it was created for given SIZE and -1 otherwise. */
2222 mask_exists (int size
, const vec
<int> &vec
)
2226 FOR_EACH_VEC_ELT (vec
, ix
, v
)
2232 /* Helper function for predicate_statements. STMT is a memory read or
2233 write and it needs to be predicated by MASK. Return a statement
2237 predicate_load_or_store (gimple_stmt_iterator
*gsi
, gassign
*stmt
, tree mask
)
2241 tree lhs
= gimple_assign_lhs (stmt
);
2242 tree rhs
= gimple_assign_rhs1 (stmt
);
2243 tree ref
= TREE_CODE (lhs
) == SSA_NAME
? rhs
: lhs
;
2244 mark_addressable (ref
);
2245 tree addr
= force_gimple_operand_gsi (gsi
, build_fold_addr_expr (ref
),
2246 true, NULL_TREE
, true, GSI_SAME_STMT
);
2247 tree ptr
= build_int_cst (reference_alias_ptr_type (ref
),
2248 get_object_alignment (ref
));
2249 /* Copy points-to info if possible. */
2250 if (TREE_CODE (addr
) == SSA_NAME
&& !SSA_NAME_PTR_INFO (addr
))
2251 copy_ref_info (build2 (MEM_REF
, TREE_TYPE (ref
), addr
, ptr
),
2253 if (TREE_CODE (lhs
) == SSA_NAME
)
2256 = gimple_build_call_internal (IFN_MASK_LOAD
, 3, addr
,
2258 gimple_call_set_lhs (new_stmt
, lhs
);
2259 gimple_set_vuse (new_stmt
, gimple_vuse (stmt
));
2264 = gimple_build_call_internal (IFN_MASK_STORE
, 4, addr
, ptr
,
2266 gimple_move_vops (new_stmt
, stmt
);
2268 gimple_call_set_nothrow (new_stmt
, true);
2272 /* STMT uses OP_LHS. Check whether it is equivalent to:
2274 ... = OP_MASK ? OP_LHS : X;
2276 Return X if so, otherwise return null. OP_MASK is an SSA_NAME that is
2277 known to have value OP_COND. */
2280 check_redundant_cond_expr (gimple
*stmt
, tree op_mask
, tree op_cond
,
2283 gassign
*assign
= dyn_cast
<gassign
*> (stmt
);
2284 if (!assign
|| gimple_assign_rhs_code (assign
) != COND_EXPR
)
2287 tree use_cond
= gimple_assign_rhs1 (assign
);
2288 tree if_true
= gimple_assign_rhs2 (assign
);
2289 tree if_false
= gimple_assign_rhs3 (assign
);
2291 if ((use_cond
== op_mask
|| operand_equal_p (use_cond
, op_cond
, 0))
2292 && if_true
== op_lhs
)
2295 if (inverse_conditions_p (use_cond
, op_cond
) && if_false
== op_lhs
)
2301 /* Return true if VALUE is available for use at STMT. SSA_NAMES is
2302 the set of SSA names defined earlier in STMT's block. */
2305 value_available_p (gimple
*stmt
, hash_set
<tree_ssa_name_hash
> *ssa_names
,
2308 if (is_gimple_min_invariant (value
))
2311 if (TREE_CODE (value
) == SSA_NAME
)
2313 if (SSA_NAME_IS_DEFAULT_DEF (value
))
2316 basic_block def_bb
= gimple_bb (SSA_NAME_DEF_STMT (value
));
2317 basic_block use_bb
= gimple_bb (stmt
);
2318 return (def_bb
== use_bb
2319 ? ssa_names
->contains (value
)
2320 : dominated_by_p (CDI_DOMINATORS
, use_bb
, def_bb
));
2326 /* Helper function for predicate_statements. STMT is a potentially-trapping
2327 arithmetic operation that needs to be predicated by MASK, an SSA_NAME that
2328 has value COND. Return a statement that does so. SSA_NAMES is the set of
2329 SSA names defined earlier in STMT's block. */
2332 predicate_rhs_code (gassign
*stmt
, tree mask
, tree cond
,
2333 hash_set
<tree_ssa_name_hash
> *ssa_names
)
2335 tree lhs
= gimple_assign_lhs (stmt
);
2336 tree_code code
= gimple_assign_rhs_code (stmt
);
2337 unsigned int nops
= gimple_num_ops (stmt
);
2338 internal_fn cond_fn
= get_conditional_internal_fn (code
);
2340 /* Construct the arguments to the conditional internal function. */
2341 auto_vec
<tree
, 8> args
;
2342 args
.safe_grow (nops
+ 1, true);
2344 for (unsigned int i
= 1; i
< nops
; ++i
)
2345 args
[i
] = gimple_op (stmt
, i
);
2346 args
[nops
] = NULL_TREE
;
2348 /* Look for uses of the result to see whether they are COND_EXPRs that can
2349 be folded into the conditional call. */
2350 imm_use_iterator imm_iter
;
2352 FOR_EACH_IMM_USE_STMT (use_stmt
, imm_iter
, lhs
)
2354 tree new_else
= check_redundant_cond_expr (use_stmt
, mask
, cond
, lhs
);
2355 if (new_else
&& value_available_p (stmt
, ssa_names
, new_else
))
2358 args
[nops
] = new_else
;
2359 if (operand_equal_p (new_else
, args
[nops
], 0))
2363 LHS = IFN_COND (MASK, ..., ELSE);
2364 X = MASK ? LHS : ELSE;
2366 which makes X equivalent to LHS. */
2367 tree use_lhs
= gimple_assign_lhs (use_stmt
);
2368 redundant_ssa_names
.safe_push (std::make_pair (use_lhs
, lhs
));
2373 args
[nops
] = targetm
.preferred_else_value (cond_fn
, TREE_TYPE (lhs
),
2374 nops
- 1, &args
[1]);
2376 /* Create and insert the call. */
2377 gcall
*new_stmt
= gimple_build_call_internal_vec (cond_fn
, args
);
2378 gimple_call_set_lhs (new_stmt
, lhs
);
2379 gimple_call_set_nothrow (new_stmt
, true);
2384 /* Predicate each write to memory in LOOP.
2386 This function transforms control flow constructs containing memory
2389 | for (i = 0; i < N; i++)
2393 into the following form that does not contain control flow:
2395 | for (i = 0; i < N; i++)
2396 | A[i] = cond ? expr : A[i];
2398 The original CFG looks like this:
2405 | if (i < N) goto bb_5 else goto bb_2
2409 | cond = some_computation;
2410 | if (cond) goto bb_3 else goto bb_4
2422 insert_gimplified_predicates inserts the computation of the COND
2423 expression at the beginning of the destination basic block:
2430 | if (i < N) goto bb_5 else goto bb_2
2434 | cond = some_computation;
2435 | if (cond) goto bb_3 else goto bb_4
2439 | cond = some_computation;
2448 predicate_statements is then predicating the memory write as follows:
2455 | if (i < N) goto bb_5 else goto bb_2
2459 | if (cond) goto bb_3 else goto bb_4
2463 | cond = some_computation;
2464 | A[i] = cond ? expr : A[i];
2472 and finally combine_blocks removes the basic block boundaries making
2473 the loop vectorizable:
2477 | if (i < N) goto bb_5 else goto bb_1
2481 | cond = some_computation;
2482 | A[i] = cond ? expr : A[i];
2483 | if (i < N) goto bb_5 else goto bb_4
2492 predicate_statements (loop_p loop
)
2494 unsigned int i
, orig_loop_num_nodes
= loop
->num_nodes
;
2495 auto_vec
<int, 1> vect_sizes
;
2496 auto_vec
<tree
, 1> vect_masks
;
2497 hash_set
<tree_ssa_name_hash
> ssa_names
;
2499 for (i
= 1; i
< orig_loop_num_nodes
; i
++)
2501 gimple_stmt_iterator gsi
;
2502 basic_block bb
= ifc_bbs
[i
];
2503 tree cond
= bb_predicate (bb
);
2507 if (is_true_predicate (cond
))
2511 if (TREE_CODE (cond
) == TRUTH_NOT_EXPR
)
2514 cond
= TREE_OPERAND (cond
, 0);
2517 vect_sizes
.truncate (0);
2518 vect_masks
.truncate (0);
2520 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
);)
2522 gassign
*stmt
= dyn_cast
<gassign
*> (gsi_stmt (gsi
));
2525 else if (is_false_predicate (cond
)
2526 && gimple_vdef (stmt
))
2528 unlink_stmt_vdef (stmt
);
2529 gsi_remove (&gsi
, true);
2530 release_defs (stmt
);
2533 else if (gimple_plf (stmt
, GF_PLF_2
))
2535 tree lhs
= gimple_assign_lhs (stmt
);
2538 gimple_seq stmts
= NULL
;
2539 machine_mode mode
= TYPE_MODE (TREE_TYPE (lhs
));
2540 /* We checked before setting GF_PLF_2 that an equivalent
2541 integer mode exists. */
2542 int bitsize
= GET_MODE_BITSIZE (mode
).to_constant ();
2543 if (!vect_sizes
.is_empty ()
2544 && (index
= mask_exists (bitsize
, vect_sizes
)) != -1)
2545 /* Use created mask. */
2546 mask
= vect_masks
[index
];
2549 if (COMPARISON_CLASS_P (cond
))
2550 mask
= gimple_build (&stmts
, TREE_CODE (cond
),
2552 TREE_OPERAND (cond
, 0),
2553 TREE_OPERAND (cond
, 1));
2560 = constant_boolean_node (true, TREE_TYPE (mask
));
2561 mask
= gimple_build (&stmts
, BIT_XOR_EXPR
,
2562 TREE_TYPE (mask
), mask
, true_val
);
2564 gsi_insert_seq_before (&gsi
, stmts
, GSI_SAME_STMT
);
2566 /* Save mask and its size for further use. */
2567 vect_sizes
.safe_push (bitsize
);
2568 vect_masks
.safe_push (mask
);
2570 if (gimple_assign_single_p (stmt
))
2571 new_stmt
= predicate_load_or_store (&gsi
, stmt
, mask
);
2573 new_stmt
= predicate_rhs_code (stmt
, mask
, cond
, &ssa_names
);
2575 gsi_replace (&gsi
, new_stmt
, true);
2577 else if (INTEGRAL_TYPE_P (TREE_TYPE (gimple_assign_lhs (stmt
)))
2578 && TYPE_OVERFLOW_UNDEFINED
2579 (TREE_TYPE (gimple_assign_lhs (stmt
)))
2580 && arith_code_with_undefined_signed_overflow
2581 (gimple_assign_rhs_code (stmt
)))
2583 gsi_remove (&gsi
, true);
2584 gsi_insert_seq_before (&gsi
, rewrite_to_defined_overflow (stmt
),
2587 else if (gimple_vdef (stmt
))
2589 tree lhs
= gimple_assign_lhs (stmt
);
2590 tree rhs
= gimple_assign_rhs1 (stmt
);
2591 tree type
= TREE_TYPE (lhs
);
2593 lhs
= ifc_temp_var (type
, unshare_expr (lhs
), &gsi
);
2594 rhs
= ifc_temp_var (type
, unshare_expr (rhs
), &gsi
);
2596 std::swap (lhs
, rhs
);
2597 cond
= force_gimple_operand_gsi_1 (&gsi
, unshare_expr (cond
),
2598 is_gimple_condexpr
, NULL_TREE
,
2599 true, GSI_SAME_STMT
);
2600 rhs
= fold_build_cond_expr (type
, unshare_expr (cond
), rhs
, lhs
);
2601 gimple_assign_set_rhs1 (stmt
, ifc_temp_var (type
, rhs
, &gsi
));
2604 tree lhs
= gimple_get_lhs (gsi_stmt (gsi
));
2605 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
2606 ssa_names
.add (lhs
);
2613 /* Remove all GIMPLE_CONDs and GIMPLE_LABELs of all the basic blocks
2614 other than the exit and latch of the LOOP. Also resets the
2615 GIMPLE_DEBUG information. */
2618 remove_conditions_and_labels (loop_p loop
)
2620 gimple_stmt_iterator gsi
;
2623 for (i
= 0; i
< loop
->num_nodes
; i
++)
2625 basic_block bb
= ifc_bbs
[i
];
2627 if (bb_with_exit_edge_p (loop
, bb
)
2628 || bb
== loop
->latch
)
2631 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); )
2632 switch (gimple_code (gsi_stmt (gsi
)))
2636 gsi_remove (&gsi
, true);
2640 /* ??? Should there be conditional GIMPLE_DEBUG_BINDs? */
2641 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
2643 gimple_debug_bind_reset_value (gsi_stmt (gsi
));
2644 update_stmt (gsi_stmt (gsi
));
2655 /* Combine all the basic blocks from LOOP into one or two super basic
2656 blocks. Replace PHI nodes with conditional modify expressions. */
2659 combine_blocks (class loop
*loop
)
2661 basic_block bb
, exit_bb
, merge_target_bb
;
2662 unsigned int orig_loop_num_nodes
= loop
->num_nodes
;
2667 remove_conditions_and_labels (loop
);
2668 insert_gimplified_predicates (loop
);
2669 predicate_all_scalar_phis (loop
);
2671 if (need_to_predicate
|| need_to_rewrite_undefined
)
2672 predicate_statements (loop
);
2674 /* Merge basic blocks. */
2676 bool *predicated
= XNEWVEC (bool, orig_loop_num_nodes
);
2677 for (i
= 0; i
< orig_loop_num_nodes
; i
++)
2680 predicated
[i
] = !is_true_predicate (bb_predicate (bb
));
2681 free_bb_predicate (bb
);
2682 if (bb_with_exit_edge_p (loop
, bb
))
2684 gcc_assert (exit_bb
== NULL
);
2688 gcc_assert (exit_bb
!= loop
->latch
);
2690 merge_target_bb
= loop
->header
;
2692 /* Get at the virtual def valid for uses starting at the first block
2693 we merge into the header. Without a virtual PHI the loop has the
2694 same virtual use on all stmts. */
2695 gphi
*vphi
= get_virtual_phi (loop
->header
);
2696 tree last_vdef
= NULL_TREE
;
2699 last_vdef
= gimple_phi_result (vphi
);
2700 for (gimple_stmt_iterator gsi
= gsi_start_bb (loop
->header
);
2701 ! gsi_end_p (gsi
); gsi_next (&gsi
))
2702 if (gimple_vdef (gsi_stmt (gsi
)))
2703 last_vdef
= gimple_vdef (gsi_stmt (gsi
));
2705 for (i
= 1; i
< orig_loop_num_nodes
; i
++)
2707 gimple_stmt_iterator gsi
;
2708 gimple_stmt_iterator last
;
2712 if (bb
== exit_bb
|| bb
== loop
->latch
)
2715 /* We release virtual PHIs late because we have to propagate them
2716 out using the current VUSE. The def might be the one used
2718 vphi
= get_virtual_phi (bb
);
2721 /* When there's just loads inside the loop a stray virtual
2722 PHI merging the uses can appear, update last_vdef from
2725 last_vdef
= gimple_phi_arg_def (vphi
, 0);
2726 imm_use_iterator iter
;
2727 use_operand_p use_p
;
2729 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, gimple_phi_result (vphi
))
2731 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2732 SET_USE (use_p
, last_vdef
);
2734 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (vphi
)))
2735 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (last_vdef
) = 1;
2736 gsi
= gsi_for_stmt (vphi
);
2737 remove_phi_node (&gsi
, true);
2740 /* Make stmts member of loop->header and clear range info from all stmts
2741 in BB which is now no longer executed conditional on a predicate we
2742 could have derived it from. */
2743 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2745 gimple
*stmt
= gsi_stmt (gsi
);
2746 gimple_set_bb (stmt
, merge_target_bb
);
2747 /* Update virtual operands. */
2750 use_operand_p use_p
= ssa_vuse_operand (stmt
);
2752 && USE_FROM_PTR (use_p
) != last_vdef
)
2753 SET_USE (use_p
, last_vdef
);
2754 if (gimple_vdef (stmt
))
2755 last_vdef
= gimple_vdef (stmt
);
2758 /* If this is the first load we arrive at update last_vdef
2759 so we handle stray PHIs correctly. */
2760 last_vdef
= gimple_vuse (stmt
);
2765 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, i
, SSA_OP_DEF
)
2766 reset_flow_sensitive_info (op
);
2770 /* Update stmt list. */
2771 last
= gsi_last_bb (merge_target_bb
);
2772 gsi_insert_seq_after_without_update (&last
, bb_seq (bb
), GSI_NEW_STMT
);
2773 set_bb_seq (bb
, NULL
);
2776 /* Fixup virtual operands in the exit block. */
2778 && exit_bb
!= loop
->header
)
2780 /* We release virtual PHIs late because we have to propagate them
2781 out using the current VUSE. The def might be the one used
2783 vphi
= get_virtual_phi (exit_bb
);
2786 /* When there's just loads inside the loop a stray virtual
2787 PHI merging the uses can appear, update last_vdef from
2790 last_vdef
= gimple_phi_arg_def (vphi
, 0);
2791 imm_use_iterator iter
;
2792 use_operand_p use_p
;
2794 FOR_EACH_IMM_USE_STMT (use_stmt
, iter
, gimple_phi_result (vphi
))
2796 FOR_EACH_IMM_USE_ON_STMT (use_p
, iter
)
2797 SET_USE (use_p
, last_vdef
);
2799 if (SSA_NAME_OCCURS_IN_ABNORMAL_PHI (gimple_phi_result (vphi
)))
2800 SSA_NAME_OCCURS_IN_ABNORMAL_PHI (last_vdef
) = 1;
2801 gimple_stmt_iterator gsi
= gsi_for_stmt (vphi
);
2802 remove_phi_node (&gsi
, true);
2806 /* Now remove all the edges in the loop, except for those from the exit
2807 block and delete the blocks we elided. */
2808 for (i
= 1; i
< orig_loop_num_nodes
; i
++)
2812 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
));)
2814 if (e
->src
== exit_bb
)
2820 for (i
= 1; i
< orig_loop_num_nodes
; i
++)
2824 if (bb
== exit_bb
|| bb
== loop
->latch
)
2827 delete_basic_block (bb
);
2830 /* Re-connect the exit block. */
2831 if (exit_bb
!= NULL
)
2833 if (exit_bb
!= loop
->header
)
2835 /* Connect this node to loop header. */
2836 make_single_succ_edge (loop
->header
, exit_bb
, EDGE_FALLTHRU
);
2837 set_immediate_dominator (CDI_DOMINATORS
, exit_bb
, loop
->header
);
2840 /* Redirect non-exit edges to loop->latch. */
2841 FOR_EACH_EDGE (e
, ei
, exit_bb
->succs
)
2843 if (!loop_exit_edge_p (loop
, e
))
2844 redirect_edge_and_branch (e
, loop
->latch
);
2846 set_immediate_dominator (CDI_DOMINATORS
, loop
->latch
, exit_bb
);
2850 /* If the loop does not have an exit, reconnect header and latch. */
2851 make_edge (loop
->header
, loop
->latch
, EDGE_FALLTHRU
);
2852 set_immediate_dominator (CDI_DOMINATORS
, loop
->latch
, loop
->header
);
2855 /* If possible, merge loop header to the block with the exit edge.
2856 This reduces the number of basic blocks to two, to please the
2857 vectorizer that handles only loops with two nodes. */
2859 && exit_bb
!= loop
->header
)
2861 if (can_merge_blocks_p (loop
->header
, exit_bb
))
2862 merge_blocks (loop
->header
, exit_bb
);
2870 /* Version LOOP before if-converting it; the original loop
2871 will be if-converted, the new copy of the loop will not,
2872 and the LOOP_VECTORIZED internal call will be guarding which
2873 loop to execute. The vectorizer pass will fold this
2874 internal call into either true or false.
2876 Note that this function intentionally invalidates profile. Both edges
2877 out of LOOP_VECTORIZED must have 100% probability so the profile remains
2878 consistent after the condition is folded in the vectorizer. */
2881 version_loop_for_if_conversion (class loop
*loop
, vec
<gimple
*> *preds
)
2883 basic_block cond_bb
;
2884 tree cond
= make_ssa_name (boolean_type_node
);
2885 class loop
*new_loop
;
2887 gimple_stmt_iterator gsi
;
2888 unsigned int save_length
;
2890 g
= gimple_build_call_internal (IFN_LOOP_VECTORIZED
, 2,
2891 build_int_cst (integer_type_node
, loop
->num
),
2893 gimple_call_set_lhs (g
, cond
);
2895 /* Save BB->aux around loop_version as that uses the same field. */
2896 save_length
= loop
->inner
? loop
->inner
->num_nodes
: loop
->num_nodes
;
2897 void **saved_preds
= XALLOCAVEC (void *, save_length
);
2898 for (unsigned i
= 0; i
< save_length
; i
++)
2899 saved_preds
[i
] = ifc_bbs
[i
]->aux
;
2901 initialize_original_copy_tables ();
2902 /* At this point we invalidate porfile confistency until IFN_LOOP_VECTORIZED
2903 is re-merged in the vectorizer. */
2904 new_loop
= loop_version (loop
, cond
, &cond_bb
,
2905 profile_probability::always (),
2906 profile_probability::always (),
2907 profile_probability::always (),
2908 profile_probability::always (), true);
2909 free_original_copy_tables ();
2911 for (unsigned i
= 0; i
< save_length
; i
++)
2912 ifc_bbs
[i
]->aux
= saved_preds
[i
];
2914 if (new_loop
== NULL
)
2917 new_loop
->dont_vectorize
= true;
2918 new_loop
->force_vectorize
= false;
2919 gsi
= gsi_last_bb (cond_bb
);
2920 gimple_call_set_arg (g
, 1, build_int_cst (integer_type_node
, new_loop
->num
));
2922 preds
->safe_push (g
);
2923 gsi_insert_before (&gsi
, g
, GSI_SAME_STMT
);
2924 update_ssa (TODO_update_ssa
);
2928 /* Return true when LOOP satisfies the follow conditions that will
2929 allow it to be recognized by the vectorizer for outer-loop
2931 - The loop is not the root node of the loop tree.
2932 - The loop has exactly one inner loop.
2933 - The loop has a single exit.
2934 - The loop header has a single successor, which is the inner
2936 - Each of the inner and outer loop latches have a single
2938 - The loop exit block has a single predecessor, which is the
2939 inner loop's exit block. */
2942 versionable_outer_loop_p (class loop
*loop
)
2944 if (!loop_outer (loop
)
2945 || loop
->dont_vectorize
2947 || loop
->inner
->next
2948 || !single_exit (loop
)
2949 || !single_succ_p (loop
->header
)
2950 || single_succ (loop
->header
) != loop
->inner
->header
2951 || !single_pred_p (loop
->latch
)
2952 || !single_pred_p (loop
->inner
->latch
))
2955 basic_block outer_exit
= single_pred (loop
->latch
);
2956 basic_block inner_exit
= single_pred (loop
->inner
->latch
);
2958 if (!single_pred_p (outer_exit
) || single_pred (outer_exit
) != inner_exit
)
2962 fprintf (dump_file
, "Found vectorizable outer loop for versioning\n");
2967 /* Performs splitting of critical edges. Skip splitting and return false
2968 if LOOP will not be converted because:
2970 - LOOP is not well formed.
2971 - LOOP has PHI with more than MAX_PHI_ARG_NUM arguments.
2973 Last restriction is valid only if AGGRESSIVE_IF_CONV is false. */
2976 ifcvt_split_critical_edges (class loop
*loop
, bool aggressive_if_conv
)
2980 unsigned int num
= loop
->num_nodes
;
2985 auto_vec
<edge
> critical_edges
;
2987 /* Loop is not well formed. */
2988 if (num
<= 2 || loop
->inner
|| !single_exit (loop
))
2991 body
= get_loop_body (loop
);
2992 for (i
= 0; i
< num
; i
++)
2995 if (!aggressive_if_conv
2997 && EDGE_COUNT (bb
->preds
) > MAX_PHI_ARG_NUM
)
2999 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3001 "BB %d has complicated PHI with more than %u args.\n",
3002 bb
->index
, MAX_PHI_ARG_NUM
);
3007 if (bb
== loop
->latch
|| bb_with_exit_edge_p (loop
, bb
))
3010 stmt
= last_stmt (bb
);
3011 /* Skip basic blocks not ending with conditional branch. */
3012 if (!stmt
|| gimple_code (stmt
) != GIMPLE_COND
)
3015 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3016 if (EDGE_CRITICAL_P (e
) && e
->dest
->loop_father
== loop
)
3017 critical_edges
.safe_push (e
);
3021 while (critical_edges
.length () > 0)
3023 e
= critical_edges
.pop ();
3024 /* Don't split if bb can be predicated along non-critical edge. */
3025 if (EDGE_COUNT (e
->dest
->preds
) > 2 || all_preds_critical_p (e
->dest
))
3032 /* Delete redundant statements produced by predication which prevents
3033 loop vectorization. */
3036 ifcvt_local_dce (class loop
*loop
)
3041 gimple_stmt_iterator gsi
;
3042 auto_vec
<gimple
*> worklist
;
3043 enum gimple_code code
;
3044 use_operand_p use_p
;
3045 imm_use_iterator imm_iter
;
3047 /* The loop has a single BB only. */
3048 basic_block bb
= loop
->header
;
3049 tree latch_vdef
= NULL_TREE
;
3051 worklist
.create (64);
3052 /* Consider all phi as live statements. */
3053 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3055 phi
= gsi_stmt (gsi
);
3056 gimple_set_plf (phi
, GF_PLF_2
, true);
3057 worklist
.safe_push (phi
);
3058 if (virtual_operand_p (gimple_phi_result (phi
)))
3059 latch_vdef
= PHI_ARG_DEF_FROM_EDGE (phi
, loop_latch_edge (loop
));
3061 /* Consider load/store statements, CALL and COND as live. */
3062 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
3064 stmt
= gsi_stmt (gsi
);
3065 if (is_gimple_debug (stmt
))
3067 gimple_set_plf (stmt
, GF_PLF_2
, true);
3070 if (gimple_store_p (stmt
) || gimple_assign_load_p (stmt
))
3072 gimple_set_plf (stmt
, GF_PLF_2
, true);
3073 worklist
.safe_push (stmt
);
3076 code
= gimple_code (stmt
);
3077 if (code
== GIMPLE_COND
|| code
== GIMPLE_CALL
)
3079 gimple_set_plf (stmt
, GF_PLF_2
, true);
3080 worklist
.safe_push (stmt
);
3083 gimple_set_plf (stmt
, GF_PLF_2
, false);
3085 if (code
== GIMPLE_ASSIGN
)
3087 tree lhs
= gimple_assign_lhs (stmt
);
3088 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, lhs
)
3090 stmt1
= USE_STMT (use_p
);
3091 if (!is_gimple_debug (stmt1
) && gimple_bb (stmt1
) != bb
)
3093 gimple_set_plf (stmt
, GF_PLF_2
, true);
3094 worklist
.safe_push (stmt
);
3100 /* Propagate liveness through arguments of live stmt. */
3101 while (worklist
.length () > 0)
3104 use_operand_p use_p
;
3107 stmt
= worklist
.pop ();
3108 FOR_EACH_PHI_OR_STMT_USE (use_p
, stmt
, iter
, SSA_OP_USE
)
3110 use
= USE_FROM_PTR (use_p
);
3111 if (TREE_CODE (use
) != SSA_NAME
)
3113 stmt1
= SSA_NAME_DEF_STMT (use
);
3114 if (gimple_bb (stmt1
) != bb
|| gimple_plf (stmt1
, GF_PLF_2
))
3116 gimple_set_plf (stmt1
, GF_PLF_2
, true);
3117 worklist
.safe_push (stmt1
);
3120 /* Delete dead statements. */
3121 gsi
= gsi_last_bb (bb
);
3122 while (!gsi_end_p (gsi
))
3124 gimple_stmt_iterator gsiprev
= gsi
;
3125 gsi_prev (&gsiprev
);
3126 stmt
= gsi_stmt (gsi
);
3127 if (gimple_store_p (stmt
))
3129 tree lhs
= gimple_get_lhs (stmt
);
3131 ao_ref_init (&write
, lhs
);
3133 if (dse_classify_store (&write
, stmt
, false, NULL
, NULL
, latch_vdef
)
3135 delete_dead_or_redundant_assignment (&gsi
, "dead");
3140 if (gimple_plf (stmt
, GF_PLF_2
))
3145 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3147 fprintf (dump_file
, "Delete dead stmt in bb#%d\n", bb
->index
);
3148 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
3150 gsi_remove (&gsi
, true);
3151 release_defs (stmt
);
3156 /* If-convert LOOP when it is legal. For the moment this pass has no
3157 profitability analysis. Returns non-zero todo flags when something
3161 tree_if_conversion (class loop
*loop
, vec
<gimple
*> *preds
)
3163 unsigned int todo
= 0;
3164 bool aggressive_if_conv
;
3171 need_to_predicate
= false;
3172 need_to_rewrite_undefined
= false;
3173 any_complicated_phi
= false;
3175 /* Apply more aggressive if-conversion when loop or its outer loop were
3176 marked with simd pragma. When that's the case, we try to if-convert
3177 loop containing PHIs with more than MAX_PHI_ARG_NUM arguments. */
3178 aggressive_if_conv
= loop
->force_vectorize
;
3179 if (!aggressive_if_conv
)
3181 class loop
*outer_loop
= loop_outer (loop
);
3182 if (outer_loop
&& outer_loop
->force_vectorize
)
3183 aggressive_if_conv
= true;
3186 if (!ifcvt_split_critical_edges (loop
, aggressive_if_conv
))
3189 if (!if_convertible_loop_p (loop
)
3190 || !dbg_cnt (if_conversion_tree
))
3193 if ((need_to_predicate
|| any_complicated_phi
)
3194 && ((!flag_tree_loop_vectorize
&& !loop
->force_vectorize
)
3195 || loop
->dont_vectorize
))
3198 /* Since we have no cost model, always version loops unless the user
3199 specified -ftree-loop-if-convert or unless versioning is required.
3200 Either version this loop, or if the pattern is right for outer-loop
3201 vectorization, version the outer loop. In the latter case we will
3202 still if-convert the original inner loop. */
3203 if (need_to_predicate
3204 || any_complicated_phi
3205 || flag_tree_loop_if_convert
!= 1)
3208 = (versionable_outer_loop_p (loop_outer (loop
))
3209 ? loop_outer (loop
) : loop
);
3210 class loop
*nloop
= version_loop_for_if_conversion (vloop
, preds
);
3215 /* If versionable_outer_loop_p decided to version the
3216 outer loop, version also the inner loop of the non-vectorized
3217 loop copy. So we transform:
3221 if (LOOP_VECTORIZED (1, 3))
3227 loop3 (copy of loop1)
3228 if (LOOP_VECTORIZED (4, 5))
3229 loop4 (copy of loop2)
3231 loop5 (copy of loop4) */
3232 gcc_assert (nloop
->inner
&& nloop
->inner
->next
== NULL
);
3233 rloop
= nloop
->inner
;
3237 /* Now all statements are if-convertible. Combine all the basic
3238 blocks into one huge basic block doing the if-conversion
3240 combine_blocks (loop
);
3242 /* Perform local CSE, this esp. helps the vectorizer analysis if loads
3243 and stores are involved. CSE only the loop body, not the entry
3244 PHIs, those are to be kept in sync with the non-if-converted copy.
3245 ??? We'll still keep dead stores though. */
3246 exit_bbs
= BITMAP_ALLOC (NULL
);
3247 bitmap_set_bit (exit_bbs
, single_exit (loop
)->dest
->index
);
3248 bitmap_set_bit (exit_bbs
, loop
->latch
->index
);
3250 std::pair
<tree
, tree
> *name_pair
;
3251 unsigned ssa_names_idx
;
3252 FOR_EACH_VEC_ELT (redundant_ssa_names
, ssa_names_idx
, name_pair
)
3253 replace_uses_by (name_pair
->first
, name_pair
->second
);
3254 redundant_ssa_names
.release ();
3256 todo
|= do_rpo_vn (cfun
, loop_preheader_edge (loop
), exit_bbs
);
3258 /* Delete dead predicate computations. */
3259 ifcvt_local_dce (loop
);
3260 BITMAP_FREE (exit_bbs
);
3262 todo
|= TODO_cleanup_cfg
;
3269 for (i
= 0; i
< loop
->num_nodes
; i
++)
3270 free_bb_predicate (ifc_bbs
[i
]);
3284 /* Tree if-conversion pass management. */
3288 const pass_data pass_data_if_conversion
=
3290 GIMPLE_PASS
, /* type */
3292 OPTGROUP_NONE
, /* optinfo_flags */
3293 TV_TREE_LOOP_IFCVT
, /* tv_id */
3294 ( PROP_cfg
| PROP_ssa
), /* properties_required */
3295 0, /* properties_provided */
3296 0, /* properties_destroyed */
3297 0, /* todo_flags_start */
3298 0, /* todo_flags_finish */
3301 class pass_if_conversion
: public gimple_opt_pass
3304 pass_if_conversion (gcc::context
*ctxt
)
3305 : gimple_opt_pass (pass_data_if_conversion
, ctxt
)
3308 /* opt_pass methods: */
3309 virtual bool gate (function
*);
3310 virtual unsigned int execute (function
*);
3312 }; // class pass_if_conversion
3315 pass_if_conversion::gate (function
*fun
)
3317 return (((flag_tree_loop_vectorize
|| fun
->has_force_vectorize_loops
)
3318 && flag_tree_loop_if_convert
!= 0)
3319 || flag_tree_loop_if_convert
== 1);
3323 pass_if_conversion::execute (function
*fun
)
3327 if (number_of_loops (fun
) <= 1)
3330 auto_vec
<gimple
*> preds
;
3331 for (auto loop
: loops_list (cfun
, 0))
3332 if (flag_tree_loop_if_convert
== 1
3333 || ((flag_tree_loop_vectorize
|| loop
->force_vectorize
)
3334 && !loop
->dont_vectorize
))
3335 todo
|= tree_if_conversion (loop
, &preds
);
3339 free_numbers_of_iterations_estimates (fun
);
3346 FOR_EACH_BB_FN (bb
, fun
)
3347 gcc_assert (!bb
->aux
);
3350 /* Perform IL update now, it might elide some loops. */
3351 if (todo
& TODO_cleanup_cfg
)
3353 cleanup_tree_cfg ();
3354 if (need_ssa_update_p (fun
))
3355 todo
|= TODO_update_ssa
;
3357 if (todo
& TODO_update_ssa_any
)
3358 update_ssa (todo
& TODO_update_ssa_any
);
3360 /* If if-conversion elided the loop fall back to the original one. */
3361 for (unsigned i
= 0; i
< preds
.length (); ++i
)
3363 gimple
*g
= preds
[i
];
3366 unsigned ifcvt_loop
= tree_to_uhwi (gimple_call_arg (g
, 0));
3367 if (!get_loop (fun
, ifcvt_loop
))
3370 fprintf (dump_file
, "If-converted loop vanished\n");
3371 fold_loop_internal_call (g
, boolean_false_node
);
3381 make_pass_if_conversion (gcc::context
*ctxt
)
3383 return new pass_if_conversion (ctxt
);