1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
38 #include "tree-pass.h"
42 #include "tree-affine.h"
43 #include "pointer-set.h"
44 #include "tree-ssa-propagate.h"
46 /* TODO: Support for predicated code motion. I.e.
57 Where COND and INV are is invariants, but evaluating INV may trap or be
58 invalid from some other reason if !COND. This may be transformed to
68 /* A type for the list of statements that have to be moved in order to be able
69 to hoist an invariant computation. */
77 /* The auxiliary data kept for each statement. */
81 struct loop
*max_loop
; /* The outermost loop in that the statement
84 struct loop
*tgt_loop
; /* The loop out of that we want to move the
87 struct loop
*always_executed_in
;
88 /* The outermost loop for that we are sure
89 the statement is executed if the loop
92 unsigned cost
; /* Cost of the computation performed by the
95 struct depend
*depends
; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
102 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
104 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
106 /* Description of a memory reference location. */
108 typedef struct mem_ref_loc
110 tree
*ref
; /* The reference itself. */
111 tree stmt
; /* The statement in that it occurs. */
114 DEF_VEC_P(mem_ref_loc_p
);
115 DEF_VEC_ALLOC_P(mem_ref_loc_p
, heap
);
117 /* The list of memory reference locations in a loop. */
119 typedef struct mem_ref_locs
121 VEC (mem_ref_loc_p
, heap
) *locs
;
124 DEF_VEC_P(mem_ref_locs_p
);
125 DEF_VEC_ALLOC_P(mem_ref_locs_p
, heap
);
127 /* Description of a memory reference. */
129 typedef struct mem_ref
131 tree mem
; /* The memory itself. */
132 unsigned id
; /* ID assigned to the memory reference
133 (its index in memory_accesses.refs_list) */
134 hashval_t hash
; /* Its hash value. */
135 bitmap stored
; /* The set of loops in that this memory location
137 VEC (mem_ref_locs_p
, heap
) *accesses_in_loop
;
138 /* The locations of the accesses. Vector
139 indexed by the loop number. */
140 bitmap vops
; /* Vops corresponding to this memory
143 /* The following sets are computed on demand. We keep both set and
144 its complement, so that we know whether the information was
145 already computed or not. */
146 bitmap indep_loop
; /* The set of loops in that the memory
147 reference is independent, meaning:
148 If it is stored in the loop, this store
149 is independent on all other loads and
151 If it is only loaded, then it is independent
152 on all stores in the loop. */
153 bitmap dep_loop
; /* The complement of INDEP_LOOP. */
155 bitmap indep_ref
; /* The set of memory references on that
156 this reference is independent. */
157 bitmap dep_ref
; /* The complement of DEP_REF. */
160 DEF_VEC_P(mem_ref_p
);
161 DEF_VEC_ALLOC_P(mem_ref_p
, heap
);
164 DEF_VEC_ALLOC_P(bitmap
, heap
);
167 DEF_VEC_ALLOC_P(htab_t
, heap
);
169 /* Description of memory accesses in loops. */
173 /* The hash table of memory references accessed in loops. */
176 /* The list of memory references. */
177 VEC (mem_ref_p
, heap
) *refs_list
;
179 /* The set of memory references accessed in each loop. */
180 VEC (bitmap
, heap
) *refs_in_loop
;
182 /* The set of memory references accessed in each loop, including
184 VEC (bitmap
, heap
) *all_refs_in_loop
;
186 /* The set of virtual operands clobbered in a given loop. */
187 VEC (bitmap
, heap
) *clobbered_vops
;
189 /* Map from the pair (loop, virtual operand) to the set of refs that
190 touch the virtual operand in the loop. */
191 VEC (htab_t
, heap
) *vop_ref_map
;
193 /* Cache for expanding memory addresses. */
194 struct pointer_map_t
*ttae_cache
;
197 static bool ref_indep_loop_p (struct loop
*, mem_ref_p
);
199 /* Minimum cost of an expensive expression. */
200 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
202 /* The outermost loop for that execution of the header guarantees that the
203 block will be executed. */
204 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
206 /* Calls CBCK for each index in memory reference ADDR_P. There are two
207 kinds situations handled; in each of these cases, the memory reference
208 and DATA are passed to the callback:
210 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
211 pass the pointer to the index to the callback.
213 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
214 pointer to addr to the callback.
216 If the callback returns false, the whole search stops and false is returned.
217 Otherwise the function returns true after traversing through the whole
218 reference *ADDR_P. */
221 for_each_index (tree
*addr_p
, bool (*cbck
) (tree
, tree
*, void *), void *data
)
225 for (; ; addr_p
= nxt
)
227 switch (TREE_CODE (*addr_p
))
230 return cbck (*addr_p
, addr_p
, data
);
232 case MISALIGNED_INDIRECT_REF
:
233 case ALIGN_INDIRECT_REF
:
235 nxt
= &TREE_OPERAND (*addr_p
, 0);
236 return cbck (*addr_p
, nxt
, data
);
239 case VIEW_CONVERT_EXPR
:
242 nxt
= &TREE_OPERAND (*addr_p
, 0);
246 /* If the component has varying offset, it behaves like index
248 idx
= &TREE_OPERAND (*addr_p
, 2);
250 && !cbck (*addr_p
, idx
, data
))
253 nxt
= &TREE_OPERAND (*addr_p
, 0);
257 case ARRAY_RANGE_REF
:
258 nxt
= &TREE_OPERAND (*addr_p
, 0);
259 if (!cbck (*addr_p
, &TREE_OPERAND (*addr_p
, 1), data
))
276 gcc_assert (is_gimple_min_invariant (*addr_p
));
280 idx
= &TMR_BASE (*addr_p
);
282 && !cbck (*addr_p
, idx
, data
))
284 idx
= &TMR_INDEX (*addr_p
);
286 && !cbck (*addr_p
, idx
, data
))
296 /* If it is possible to hoist the statement STMT unconditionally,
297 returns MOVE_POSSIBLE.
298 If it is possible to hoist the statement STMT, but we must avoid making
299 it executed if it would not be executed in the original program (e.g.
300 because it may trap), return MOVE_PRESERVE_EXECUTION.
301 Otherwise return MOVE_IMPOSSIBLE. */
304 movement_possibility (tree stmt
)
308 if (flag_unswitch_loops
309 && TREE_CODE (stmt
) == COND_EXPR
)
311 /* If we perform unswitching, force the operands of the invariant
312 condition to be moved out of the loop. */
313 return MOVE_POSSIBLE
;
316 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
)
317 return MOVE_IMPOSSIBLE
;
319 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_VIRTUAL_DEFS
))
320 return MOVE_IMPOSSIBLE
;
322 if (stmt_ends_bb_p (stmt
))
323 return MOVE_IMPOSSIBLE
;
325 if (stmt_ann (stmt
)->has_volatile_ops
)
326 return MOVE_IMPOSSIBLE
;
328 lhs
= GIMPLE_STMT_OPERAND (stmt
, 0);
329 if (TREE_CODE (lhs
) == SSA_NAME
330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs
))
331 return MOVE_IMPOSSIBLE
;
333 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
335 if (TREE_SIDE_EFFECTS (rhs
)
336 || tree_could_throw_p (rhs
))
337 return MOVE_IMPOSSIBLE
;
339 if (TREE_CODE (lhs
) != SSA_NAME
340 || tree_could_trap_p (rhs
))
341 return MOVE_PRESERVE_EXECUTION
;
343 if (get_call_expr_in (stmt
))
345 /* While pure or const call is guaranteed to have no side effects, we
346 cannot move it arbitrarily. Consider code like
348 char *s = something ();
358 Here the strlen call cannot be moved out of the loop, even though
359 s is invariant. In addition to possibly creating a call with
360 invalid arguments, moving out a function call that is not executed
361 may cause performance regressions in case the call is costly and
362 not executed at all. */
363 return MOVE_PRESERVE_EXECUTION
;
365 return MOVE_POSSIBLE
;
368 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
369 loop to that we could move the expression using DEF if it did not have
370 other operands, i.e. the outermost loop enclosing LOOP in that the value
371 of DEF is invariant. */
374 outermost_invariant_loop (tree def
, struct loop
*loop
)
378 struct loop
*max_loop
;
380 if (TREE_CODE (def
) != SSA_NAME
)
381 return superloop_at_depth (loop
, 1);
383 def_stmt
= SSA_NAME_DEF_STMT (def
);
384 def_bb
= bb_for_stmt (def_stmt
);
386 return superloop_at_depth (loop
, 1);
388 max_loop
= find_common_loop (loop
, def_bb
->loop_father
);
390 if (LIM_DATA (def_stmt
) && LIM_DATA (def_stmt
)->max_loop
)
391 max_loop
= find_common_loop (max_loop
,
392 loop_outer (LIM_DATA (def_stmt
)->max_loop
));
393 if (max_loop
== loop
)
395 max_loop
= superloop_at_depth (loop
, loop_depth (max_loop
) + 1);
400 /* Returns the outermost superloop of LOOP in that the expression EXPR is
404 outermost_invariant_loop_expr (tree expr
, struct loop
*loop
)
406 enum tree_code_class codeclass
= TREE_CODE_CLASS (TREE_CODE (expr
));
408 struct loop
*max_loop
= superloop_at_depth (loop
, 1), *aloop
;
410 if (TREE_CODE (expr
) == SSA_NAME
411 || TREE_CODE (expr
) == INTEGER_CST
412 || is_gimple_min_invariant (expr
))
413 return outermost_invariant_loop (expr
, loop
);
415 if (codeclass
!= tcc_unary
416 && codeclass
!= tcc_binary
417 && codeclass
!= tcc_expression
418 && codeclass
!= tcc_vl_exp
419 && codeclass
!= tcc_comparison
)
422 nops
= TREE_OPERAND_LENGTH (expr
);
423 for (i
= 0; i
< nops
; i
++)
425 aloop
= outermost_invariant_loop_expr (TREE_OPERAND (expr
, i
), loop
);
429 if (flow_loop_nested_p (max_loop
, aloop
))
436 /* DATA is a structure containing information associated with a statement
437 inside LOOP. DEF is one of the operands of this statement.
439 Find the outermost loop enclosing LOOP in that value of DEF is invariant
440 and record this in DATA->max_loop field. If DEF itself is defined inside
441 this loop as well (i.e. we need to hoist it out of the loop if we want
442 to hoist the statement represented by DATA), record the statement in that
443 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
444 add the cost of the computation of DEF to the DATA->cost.
446 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
449 add_dependency (tree def
, struct lim_aux_data
*data
, struct loop
*loop
,
452 tree def_stmt
= SSA_NAME_DEF_STMT (def
);
453 basic_block def_bb
= bb_for_stmt (def_stmt
);
454 struct loop
*max_loop
;
460 max_loop
= outermost_invariant_loop (def
, loop
);
464 if (flow_loop_nested_p (data
->max_loop
, max_loop
))
465 data
->max_loop
= max_loop
;
467 if (!LIM_DATA (def_stmt
))
471 /* Only add the cost if the statement defining DEF is inside LOOP,
472 i.e. if it is likely that by moving the invariants dependent
473 on it, we will be able to avoid creating a new register for
474 it (since it will be only used in these dependent invariants). */
475 && def_bb
->loop_father
== loop
)
476 data
->cost
+= LIM_DATA (def_stmt
)->cost
;
478 dep
= XNEW (struct depend
);
479 dep
->stmt
= def_stmt
;
480 dep
->next
= data
->depends
;
486 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
487 are just ad-hoc constants. The estimates should be based on target-specific
491 stmt_cost (tree stmt
)
496 /* Always try to create possibilities for unswitching. */
497 if (TREE_CODE (stmt
) == COND_EXPR
)
498 return LIM_EXPENSIVE
;
500 rhs
= GENERIC_TREE_OPERAND (stmt
, 1);
502 /* Hoisting memory references out should almost surely be a win. */
503 if (stmt_references_memory_p (stmt
))
506 switch (TREE_CODE (rhs
))
509 /* We should be hoisting calls if possible. */
511 /* Unless the call is a builtin_constant_p; this always folds to a
512 constant, so moving it is useless. */
513 rhs
= get_callee_fndecl (rhs
);
514 if (DECL_BUILT_IN_CLASS (rhs
) == BUILT_IN_NORMAL
515 && DECL_FUNCTION_CODE (rhs
) == BUILT_IN_CONSTANT_P
)
532 /* Division and multiplication are usually expensive. */
548 /* Finds the outermost loop between OUTER and LOOP in that the memory reference
549 REF is independent. If REF is not independent in LOOP, NULL is returned
553 outermost_indep_loop (struct loop
*outer
, struct loop
*loop
, mem_ref_p ref
)
557 if (bitmap_bit_p (ref
->stored
, loop
->num
))
562 aloop
= superloop_at_depth (loop
, loop_depth (aloop
) + 1))
563 if (!bitmap_bit_p (ref
->stored
, aloop
->num
)
564 && ref_indep_loop_p (aloop
, ref
))
567 if (ref_indep_loop_p (loop
, ref
))
573 /* If there is a simple load or store to a memory reference in STMT, returns
574 the location of the memory reference, and sets IS_STORE according to whether
575 it is a store or load. Otherwise, returns NULL. */
578 simple_mem_ref_in_stmt (tree stmt
, bool *is_store
)
582 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
583 if (TREE_CODE (stmt
) != GIMPLE_MODIFY_STMT
)
586 lhs
= &GIMPLE_STMT_OPERAND (stmt
, 0);
587 rhs
= &GIMPLE_STMT_OPERAND (stmt
, 1);
589 if (TREE_CODE (*lhs
) == SSA_NAME
)
591 if (!is_gimple_addressable (*rhs
))
597 else if (TREE_CODE (*rhs
) == SSA_NAME
598 || is_gimple_min_invariant (*rhs
))
607 /* Returns the memory reference contained in STMT. */
610 mem_ref_in_stmt (tree stmt
)
613 tree
*mem
= simple_mem_ref_in_stmt (stmt
, &store
);
621 hash
= iterative_hash_expr (*mem
, 0);
622 ref
= (mem_ref_p
) htab_find_with_hash (memory_accesses
.refs
, *mem
, hash
);
624 gcc_assert (ref
!= NULL
);
628 /* Determine the outermost loop to that it is possible to hoist a statement
629 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
630 the outermost loop in that the value computed by STMT is invariant.
631 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
632 we preserve the fact whether STMT is executed. It also fills other related
633 information to LIM_DATA (STMT).
635 The function returns false if STMT cannot be hoisted outside of the loop it
636 is defined in, and true otherwise. */
639 determine_max_movement (tree stmt
, bool must_preserve_exec
)
641 basic_block bb
= bb_for_stmt (stmt
);
642 struct loop
*loop
= bb
->loop_father
;
644 struct lim_aux_data
*lim_data
= LIM_DATA (stmt
);
648 if (must_preserve_exec
)
649 level
= ALWAYS_EXECUTED_IN (bb
);
651 level
= superloop_at_depth (loop
, 1);
652 lim_data
->max_loop
= level
;
654 FOR_EACH_SSA_TREE_OPERAND (val
, stmt
, iter
, SSA_OP_USE
)
655 if (!add_dependency (val
, lim_data
, loop
, true))
658 if (!ZERO_SSA_OPERANDS (stmt
, SSA_OP_VIRTUAL_USES
))
660 mem_ref_p ref
= mem_ref_in_stmt (stmt
);
665 = outermost_indep_loop (lim_data
->max_loop
, loop
, ref
);
666 if (!lim_data
->max_loop
)
671 FOR_EACH_SSA_TREE_OPERAND (val
, stmt
, iter
, SSA_OP_VIRTUAL_USES
)
673 if (!add_dependency (val
, lim_data
, loop
, false))
679 lim_data
->cost
+= stmt_cost (stmt
);
684 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
685 and that one of the operands of this statement is computed by STMT.
686 Ensure that STMT (together with all the statements that define its
687 operands) is hoisted at least out of the loop LEVEL. */
690 set_level (tree stmt
, struct loop
*orig_loop
, struct loop
*level
)
692 struct loop
*stmt_loop
= bb_for_stmt (stmt
)->loop_father
;
695 stmt_loop
= find_common_loop (orig_loop
, stmt_loop
);
696 if (LIM_DATA (stmt
) && LIM_DATA (stmt
)->tgt_loop
)
697 stmt_loop
= find_common_loop (stmt_loop
,
698 loop_outer (LIM_DATA (stmt
)->tgt_loop
));
699 if (flow_loop_nested_p (stmt_loop
, level
))
702 gcc_assert (LIM_DATA (stmt
));
703 gcc_assert (level
== LIM_DATA (stmt
)->max_loop
704 || flow_loop_nested_p (LIM_DATA (stmt
)->max_loop
, level
));
706 LIM_DATA (stmt
)->tgt_loop
= level
;
707 for (dep
= LIM_DATA (stmt
)->depends
; dep
; dep
= dep
->next
)
708 set_level (dep
->stmt
, orig_loop
, level
);
711 /* Determines an outermost loop from that we want to hoist the statement STMT.
712 For now we chose the outermost possible loop. TODO -- use profiling
713 information to set it more sanely. */
716 set_profitable_level (tree stmt
)
718 set_level (stmt
, bb_for_stmt (stmt
)->loop_father
, LIM_DATA (stmt
)->max_loop
);
721 /* Returns true if STMT is not a pure call. */
724 nonpure_call_p (tree stmt
)
726 tree call
= get_call_expr_in (stmt
);
731 return TREE_SIDE_EFFECTS (call
) != 0;
734 /* Releases the memory occupied by DATA. */
737 free_lim_aux_data (struct lim_aux_data
*data
)
739 struct depend
*dep
, *next
;
741 for (dep
= data
->depends
; dep
; dep
= next
)
749 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
752 rewrite_reciprocal (block_stmt_iterator
*bsi
)
754 tree stmt
, lhs
, rhs
, stmt1
, stmt2
, var
, name
, tmp
;
756 stmt
= bsi_stmt (*bsi
);
757 lhs
= GENERIC_TREE_OPERAND (stmt
, 0);
758 rhs
= GENERIC_TREE_OPERAND (stmt
, 1);
760 /* stmt must be GIMPLE_MODIFY_STMT. */
761 var
= create_tmp_var (TREE_TYPE (rhs
), "reciptmp");
762 add_referenced_var (var
);
764 tmp
= build2 (RDIV_EXPR
, TREE_TYPE (rhs
),
765 build_real (TREE_TYPE (rhs
), dconst1
),
766 TREE_OPERAND (rhs
, 1));
767 stmt1
= build_gimple_modify_stmt (var
, tmp
);
768 name
= make_ssa_name (var
, stmt1
);
769 GIMPLE_STMT_OPERAND (stmt1
, 0) = name
;
770 tmp
= build2 (MULT_EXPR
, TREE_TYPE (rhs
),
771 name
, TREE_OPERAND (rhs
, 0));
772 stmt2
= build_gimple_modify_stmt (lhs
, tmp
);
774 /* Replace division stmt with reciprocal and multiply stmts.
775 The multiply stmt is not invariant, so update iterator
776 and avoid rescanning. */
777 bsi_replace (bsi
, stmt1
, true);
778 bsi_insert_after (bsi
, stmt2
, BSI_NEW_STMT
);
779 SSA_NAME_DEF_STMT (lhs
) = stmt2
;
781 /* Continue processing with invariant reciprocal statement. */
785 /* Check if the pattern at *BSI is a bittest of the form
786 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
789 rewrite_bittest (block_stmt_iterator
*bsi
)
791 tree stmt
, lhs
, rhs
, var
, name
, use_stmt
, stmt1
, stmt2
, t
;
794 stmt
= bsi_stmt (*bsi
);
795 lhs
= GENERIC_TREE_OPERAND (stmt
, 0);
796 rhs
= GENERIC_TREE_OPERAND (stmt
, 1);
798 /* Verify that the single use of lhs is a comparison against zero. */
799 if (TREE_CODE (lhs
) != SSA_NAME
800 || !single_imm_use (lhs
, &use
, &use_stmt
)
801 || TREE_CODE (use_stmt
) != COND_EXPR
)
803 t
= COND_EXPR_COND (use_stmt
);
804 if (TREE_OPERAND (t
, 0) != lhs
805 || (TREE_CODE (t
) != NE_EXPR
806 && TREE_CODE (t
) != EQ_EXPR
)
807 || !integer_zerop (TREE_OPERAND (t
, 1)))
810 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
811 stmt1
= SSA_NAME_DEF_STMT (TREE_OPERAND (rhs
, 0));
812 if (TREE_CODE (stmt1
) != GIMPLE_MODIFY_STMT
)
815 /* There is a conversion in between possibly inserted by fold. */
816 t
= GIMPLE_STMT_OPERAND (stmt1
, 1);
817 if (CONVERT_EXPR_P (t
))
819 t
= TREE_OPERAND (t
, 0);
820 if (TREE_CODE (t
) != SSA_NAME
821 || !has_single_use (t
))
823 stmt1
= SSA_NAME_DEF_STMT (t
);
824 if (TREE_CODE (stmt1
) != GIMPLE_MODIFY_STMT
)
826 t
= GIMPLE_STMT_OPERAND (stmt1
, 1);
829 /* Verify that B is loop invariant but A is not. Verify that with
830 all the stmt walking we are still in the same loop. */
831 if (TREE_CODE (t
) == RSHIFT_EXPR
832 && loop_containing_stmt (stmt1
) == loop_containing_stmt (stmt
)
833 && outermost_invariant_loop_expr (TREE_OPERAND (t
, 1),
834 loop_containing_stmt (stmt1
)) != NULL
835 && outermost_invariant_loop_expr (TREE_OPERAND (t
, 0),
836 loop_containing_stmt (stmt1
)) == NULL
)
838 tree a
= TREE_OPERAND (t
, 0);
839 tree b
= TREE_OPERAND (t
, 1);
842 var
= create_tmp_var (TREE_TYPE (a
), "shifttmp");
843 add_referenced_var (var
);
844 t
= fold_build2 (LSHIFT_EXPR
, TREE_TYPE (a
),
845 build_int_cst (TREE_TYPE (a
), 1), b
);
846 stmt1
= build_gimple_modify_stmt (var
, t
);
847 name
= make_ssa_name (var
, stmt1
);
848 GIMPLE_STMT_OPERAND (stmt1
, 0) = name
;
851 t
= fold_build2 (BIT_AND_EXPR
, TREE_TYPE (a
), a
, name
);
852 stmt2
= build_gimple_modify_stmt (var
, t
);
853 name
= make_ssa_name (var
, stmt2
);
854 GIMPLE_STMT_OPERAND (stmt2
, 0) = name
;
856 /* Replace the SSA_NAME we compare against zero. Adjust
857 the type of zero accordingly. */
859 TREE_OPERAND (COND_EXPR_COND (use_stmt
), 1)
860 = build_int_cst_type (TREE_TYPE (name
), 0);
862 bsi_insert_before (bsi
, stmt1
, BSI_SAME_STMT
);
863 bsi_replace (bsi
, stmt2
, true);
872 /* Determine the outermost loops in that statements in basic block BB are
873 invariant, and record them to the LIM_DATA associated with the statements.
874 Callback for walk_dominator_tree. */
877 determine_invariantness_stmt (struct dom_walk_data
*dw_data ATTRIBUTE_UNUSED
,
881 block_stmt_iterator bsi
;
883 bool maybe_never
= ALWAYS_EXECUTED_IN (bb
) == NULL
;
884 struct loop
*outermost
= ALWAYS_EXECUTED_IN (bb
);
886 if (!loop_outer (bb
->loop_father
))
889 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
890 fprintf (dump_file
, "Basic block %d (loop %d -- depth %d):\n\n",
891 bb
->index
, bb
->loop_father
->num
, loop_depth (bb
->loop_father
));
893 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
895 stmt
= bsi_stmt (bsi
);
897 pos
= movement_possibility (stmt
);
898 if (pos
== MOVE_IMPOSSIBLE
)
900 if (nonpure_call_p (stmt
))
905 /* Make sure to note always_executed_in for stores to make
906 store-motion work. */
907 else if (stmt_makes_single_store (stmt
))
909 stmt_ann (stmt
)->common
.aux
910 = xcalloc (1, sizeof (struct lim_aux_data
));
911 LIM_DATA (stmt
)->always_executed_in
= outermost
;
916 if (TREE_CODE (stmt
) == GIMPLE_MODIFY_STMT
)
918 rhs
= GIMPLE_STMT_OPERAND (stmt
, 1);
920 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
921 to be hoisted out of loop, saving expensive divide. */
922 if (pos
== MOVE_POSSIBLE
923 && TREE_CODE (rhs
) == RDIV_EXPR
924 && flag_unsafe_math_optimizations
925 && !flag_trapping_math
926 && outermost_invariant_loop_expr (TREE_OPERAND (rhs
, 1),
927 loop_containing_stmt (stmt
)) != NULL
928 && outermost_invariant_loop_expr (rhs
,
929 loop_containing_stmt (stmt
)) == NULL
)
930 stmt
= rewrite_reciprocal (&bsi
);
932 /* If the shift count is invariant, convert (A >> B) & 1 to
933 A & (1 << B) allowing the bit mask to be hoisted out of the loop
934 saving an expensive shift. */
935 if (pos
== MOVE_POSSIBLE
936 && TREE_CODE (rhs
) == BIT_AND_EXPR
937 && integer_onep (TREE_OPERAND (rhs
, 1))
938 && TREE_CODE (TREE_OPERAND (rhs
, 0)) == SSA_NAME
939 && has_single_use (TREE_OPERAND (rhs
, 0)))
940 stmt
= rewrite_bittest (&bsi
);
943 stmt_ann (stmt
)->common
.aux
= xcalloc (1, sizeof (struct lim_aux_data
));
944 LIM_DATA (stmt
)->always_executed_in
= outermost
;
946 if (maybe_never
&& pos
== MOVE_PRESERVE_EXECUTION
)
949 if (!determine_max_movement (stmt
, pos
== MOVE_PRESERVE_EXECUTION
))
951 LIM_DATA (stmt
)->max_loop
= NULL
;
955 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
957 print_generic_stmt_indented (dump_file
, stmt
, 0, 2);
958 fprintf (dump_file
, " invariant up to level %d, cost %d.\n\n",
959 loop_depth (LIM_DATA (stmt
)->max_loop
),
960 LIM_DATA (stmt
)->cost
);
963 if (LIM_DATA (stmt
)->cost
>= LIM_EXPENSIVE
)
964 set_profitable_level (stmt
);
968 /* For each statement determines the outermost loop in that it is invariant,
969 statements on whose motion it depends and the cost of the computation.
970 This information is stored to the LIM_DATA structure associated with
974 determine_invariantness (void)
976 struct dom_walk_data walk_data
;
978 memset (&walk_data
, 0, sizeof (struct dom_walk_data
));
979 walk_data
.dom_direction
= CDI_DOMINATORS
;
980 walk_data
.before_dom_children_before_stmts
= determine_invariantness_stmt
;
982 init_walk_dominator_tree (&walk_data
);
983 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
984 fini_walk_dominator_tree (&walk_data
);
987 /* Hoist the statements in basic block BB out of the loops prescribed by
988 data stored in LIM_DATA structures associated with each statement. Callback
989 for walk_dominator_tree. */
992 move_computations_stmt (struct dom_walk_data
*dw_data ATTRIBUTE_UNUSED
,
996 block_stmt_iterator bsi
;
1000 if (!loop_outer (bb
->loop_father
))
1003 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); )
1005 stmt
= bsi_stmt (bsi
);
1007 if (!LIM_DATA (stmt
))
1013 cost
= LIM_DATA (stmt
)->cost
;
1014 level
= LIM_DATA (stmt
)->tgt_loop
;
1015 free_lim_aux_data (LIM_DATA (stmt
));
1016 stmt_ann (stmt
)->common
.aux
= NULL
;
1024 /* We do not really want to move conditionals out of the loop; we just
1025 placed it here to force its operands to be moved if necessary. */
1026 if (TREE_CODE (stmt
) == COND_EXPR
)
1029 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1031 fprintf (dump_file
, "Moving statement\n");
1032 print_generic_stmt (dump_file
, stmt
, 0);
1033 fprintf (dump_file
, "(cost %u) out of loop %d.\n\n",
1037 mark_virtual_ops_for_renaming (stmt
);
1038 bsi_insert_on_edge (loop_preheader_edge (level
), stmt
);
1039 bsi_remove (&bsi
, false);
1043 /* Hoist the statements out of the loops prescribed by data stored in
1044 LIM_DATA structures associated with each statement.*/
1047 move_computations (void)
1049 struct dom_walk_data walk_data
;
1051 memset (&walk_data
, 0, sizeof (struct dom_walk_data
));
1052 walk_data
.dom_direction
= CDI_DOMINATORS
;
1053 walk_data
.before_dom_children_before_stmts
= move_computations_stmt
;
1055 init_walk_dominator_tree (&walk_data
);
1056 walk_dominator_tree (&walk_data
, ENTRY_BLOCK_PTR
);
1057 fini_walk_dominator_tree (&walk_data
);
1059 bsi_commit_edge_inserts ();
1060 if (need_ssa_update_p ())
1061 rewrite_into_loop_closed_ssa (NULL
, TODO_update_ssa
);
1064 /* Checks whether the statement defining variable *INDEX can be hoisted
1065 out of the loop passed in DATA. Callback for for_each_index. */
1068 may_move_till (tree ref
, tree
*index
, void *data
)
1070 struct loop
*loop
= (struct loop
*) data
, *max_loop
;
1072 /* If REF is an array reference, check also that the step and the lower
1073 bound is invariant in LOOP. */
1074 if (TREE_CODE (ref
) == ARRAY_REF
)
1076 tree step
= array_ref_element_size (ref
);
1077 tree lbound
= array_ref_low_bound (ref
);
1079 max_loop
= outermost_invariant_loop_expr (step
, loop
);
1083 max_loop
= outermost_invariant_loop_expr (lbound
, loop
);
1088 max_loop
= outermost_invariant_loop (*index
, loop
);
1095 /* Forces statements defining (invariant) SSA names in expression EXPR to be
1096 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
1099 force_move_till_expr (tree expr
, struct loop
*orig_loop
, struct loop
*loop
)
1101 enum tree_code_class codeclass
= TREE_CODE_CLASS (TREE_CODE (expr
));
1104 if (TREE_CODE (expr
) == SSA_NAME
)
1106 tree stmt
= SSA_NAME_DEF_STMT (expr
);
1107 if (IS_EMPTY_STMT (stmt
))
1110 set_level (stmt
, orig_loop
, loop
);
1114 if (codeclass
!= tcc_unary
1115 && codeclass
!= tcc_binary
1116 && codeclass
!= tcc_expression
1117 && codeclass
!= tcc_vl_exp
1118 && codeclass
!= tcc_comparison
)
1121 nops
= TREE_OPERAND_LENGTH (expr
);
1122 for (i
= 0; i
< nops
; i
++)
1123 force_move_till_expr (TREE_OPERAND (expr
, i
), orig_loop
, loop
);
1126 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1127 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1133 struct loop
*orig_loop
;
1137 force_move_till (tree ref
, tree
*index
, void *data
)
1140 struct fmt_data
*fmt_data
= (struct fmt_data
*) data
;
1142 if (TREE_CODE (ref
) == ARRAY_REF
)
1144 tree step
= array_ref_element_size (ref
);
1145 tree lbound
= array_ref_low_bound (ref
);
1147 force_move_till_expr (step
, fmt_data
->orig_loop
, fmt_data
->loop
);
1148 force_move_till_expr (lbound
, fmt_data
->orig_loop
, fmt_data
->loop
);
1151 if (TREE_CODE (*index
) != SSA_NAME
)
1154 stmt
= SSA_NAME_DEF_STMT (*index
);
1155 if (IS_EMPTY_STMT (stmt
))
1158 set_level (stmt
, fmt_data
->orig_loop
, fmt_data
->loop
);
1163 /* A hash function for struct mem_ref object OBJ. */
1166 memref_hash (const void *obj
)
1168 const struct mem_ref
*const mem
= (const struct mem_ref
*) obj
;
1173 /* An equality function for struct mem_ref object OBJ1 with
1174 memory reference OBJ2. */
1177 memref_eq (const void *obj1
, const void *obj2
)
1179 const struct mem_ref
*const mem1
= (const struct mem_ref
*) obj1
;
1181 return operand_equal_p (mem1
->mem
, (const_tree
) obj2
, 0);
1184 /* Releases list of memory reference locations ACCS. */
1187 free_mem_ref_locs (mem_ref_locs_p accs
)
1195 for (i
= 0; VEC_iterate (mem_ref_loc_p
, accs
->locs
, i
, loc
); i
++)
1197 VEC_free (mem_ref_loc_p
, heap
, accs
->locs
);
1201 /* A function to free the mem_ref object OBJ. */
1204 memref_free (void *obj
)
1206 struct mem_ref
*const mem
= (struct mem_ref
*) obj
;
1208 mem_ref_locs_p accs
;
1210 BITMAP_FREE (mem
->stored
);
1211 BITMAP_FREE (mem
->indep_loop
);
1212 BITMAP_FREE (mem
->dep_loop
);
1213 BITMAP_FREE (mem
->indep_ref
);
1214 BITMAP_FREE (mem
->dep_ref
);
1216 for (i
= 0; VEC_iterate (mem_ref_locs_p
, mem
->accesses_in_loop
, i
, accs
); i
++)
1217 free_mem_ref_locs (accs
);
1218 VEC_free (mem_ref_locs_p
, heap
, mem
->accesses_in_loop
);
1220 BITMAP_FREE (mem
->vops
);
1224 /* Allocates and returns a memory reference description for MEM whose hash
1225 value is HASH and id is ID. */
1228 mem_ref_alloc (tree mem
, unsigned hash
, unsigned id
)
1230 mem_ref_p ref
= XNEW (struct mem_ref
);
1234 ref
->stored
= BITMAP_ALLOC (NULL
);
1235 ref
->indep_loop
= BITMAP_ALLOC (NULL
);
1236 ref
->dep_loop
= BITMAP_ALLOC (NULL
);
1237 ref
->indep_ref
= BITMAP_ALLOC (NULL
);
1238 ref
->dep_ref
= BITMAP_ALLOC (NULL
);
1239 ref
->accesses_in_loop
= NULL
;
1240 ref
->vops
= BITMAP_ALLOC (NULL
);
1245 /* Allocates and returns the new list of locations. */
1247 static mem_ref_locs_p
1248 mem_ref_locs_alloc (void)
1250 mem_ref_locs_p accs
= XNEW (struct mem_ref_locs
);
1255 /* Records memory reference location *LOC in LOOP to the memory reference
1256 description REF. The reference occurs in statement STMT. */
1259 record_mem_ref_loc (mem_ref_p ref
, struct loop
*loop
, tree stmt
, tree
*loc
)
1261 mem_ref_loc_p aref
= XNEW (struct mem_ref_loc
);
1262 mem_ref_locs_p accs
;
1263 bitmap ril
= VEC_index (bitmap
, memory_accesses
.refs_in_loop
, loop
->num
);
1265 if (VEC_length (mem_ref_locs_p
, ref
->accesses_in_loop
)
1266 <= (unsigned) loop
->num
)
1267 VEC_safe_grow_cleared (mem_ref_locs_p
, heap
, ref
->accesses_in_loop
,
1269 accs
= VEC_index (mem_ref_locs_p
, ref
->accesses_in_loop
, loop
->num
);
1272 accs
= mem_ref_locs_alloc ();
1273 VEC_replace (mem_ref_locs_p
, ref
->accesses_in_loop
, loop
->num
, accs
);
1279 VEC_safe_push (mem_ref_loc_p
, heap
, accs
->locs
, aref
);
1280 bitmap_set_bit (ril
, ref
->id
);
1283 /* Marks reference REF as stored in LOOP. */
1286 mark_ref_stored (mem_ref_p ref
, struct loop
*loop
)
1289 loop
!= current_loops
->tree_root
1290 && !bitmap_bit_p (ref
->stored
, loop
->num
);
1291 loop
= loop_outer (loop
))
1292 bitmap_set_bit (ref
->stored
, loop
->num
);
1295 /* Gathers memory references in statement STMT in LOOP, storing the
1296 information about them in the memory_accesses structure. Marks
1297 the vops accessed through unrecognized statements there as
1301 gather_mem_refs_stmt (struct loop
*loop
, tree stmt
)
1313 if (ZERO_SSA_OPERANDS (stmt
, SSA_OP_ALL_VIRTUALS
))
1316 mem
= simple_mem_ref_in_stmt (stmt
, &is_stored
);
1320 hash
= iterative_hash_expr (*mem
, 0);
1321 slot
= htab_find_slot_with_hash (memory_accesses
.refs
, *mem
, hash
, INSERT
);
1325 ref
= (mem_ref_p
) *slot
;
1330 id
= VEC_length (mem_ref_p
, memory_accesses
.refs_list
);
1331 ref
= mem_ref_alloc (*mem
, hash
, id
);
1332 VEC_safe_push (mem_ref_p
, heap
, memory_accesses
.refs_list
, ref
);
1335 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1337 fprintf (dump_file
, "Memory reference %u: ", id
);
1338 print_generic_expr (dump_file
, ref
->mem
, TDF_SLIM
);
1339 fprintf (dump_file
, "\n");
1343 mark_ref_stored (ref
, loop
);
1345 FOR_EACH_SSA_TREE_OPERAND (vname
, stmt
, oi
, SSA_OP_VIRTUAL_USES
)
1346 bitmap_set_bit (ref
->vops
, DECL_UID (SSA_NAME_VAR (vname
)));
1347 record_mem_ref_loc (ref
, loop
, stmt
, mem
);
1351 clvops
= VEC_index (bitmap
, memory_accesses
.clobbered_vops
, loop
->num
);
1352 FOR_EACH_SSA_TREE_OPERAND (vname
, stmt
, oi
, SSA_OP_VIRTUAL_USES
)
1353 bitmap_set_bit (clvops
, DECL_UID (SSA_NAME_VAR (vname
)));
1356 /* Gathers memory references in loops. */
1359 gather_mem_refs_in_loops (void)
1361 block_stmt_iterator bsi
;
1366 bitmap lrefs
, alrefs
, alrefso
;
1370 loop
= bb
->loop_father
;
1371 if (loop
== current_loops
->tree_root
)
1374 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
1375 gather_mem_refs_stmt (loop
, bsi_stmt (bsi
));
1378 /* Propagate the information about clobbered vops and accessed memory
1379 references up the loop hierarchy. */
1380 FOR_EACH_LOOP (li
, loop
, LI_FROM_INNERMOST
)
1382 lrefs
= VEC_index (bitmap
, memory_accesses
.refs_in_loop
, loop
->num
);
1383 alrefs
= VEC_index (bitmap
, memory_accesses
.all_refs_in_loop
, loop
->num
);
1384 bitmap_ior_into (alrefs
, lrefs
);
1386 if (loop_outer (loop
) == current_loops
->tree_root
)
1389 clvi
= VEC_index (bitmap
, memory_accesses
.clobbered_vops
, loop
->num
);
1390 clvo
= VEC_index (bitmap
, memory_accesses
.clobbered_vops
,
1391 loop_outer (loop
)->num
);
1392 bitmap_ior_into (clvo
, clvi
);
1394 alrefso
= VEC_index (bitmap
, memory_accesses
.all_refs_in_loop
,
1395 loop_outer (loop
)->num
);
1396 bitmap_ior_into (alrefso
, alrefs
);
1400 /* Element of the hash table that maps vops to memory references. */
1402 struct vop_to_refs_elt
1404 /* DECL_UID of the vop. */
1407 /* List of the all references. */
1410 /* List of stored references. */
1414 /* A hash function for struct vop_to_refs_elt object OBJ. */
1417 vtoe_hash (const void *obj
)
1419 const struct vop_to_refs_elt
*const vtoe
=
1420 (const struct vop_to_refs_elt
*) obj
;
1425 /* An equality function for struct vop_to_refs_elt object OBJ1 with
1426 uid of a vop OBJ2. */
1429 vtoe_eq (const void *obj1
, const void *obj2
)
1431 const struct vop_to_refs_elt
*const vtoe
=
1432 (const struct vop_to_refs_elt
*) obj1
;
1433 const unsigned *const uid
= (const unsigned *) obj2
;
1435 return vtoe
->uid
== *uid
;
1438 /* A function to free the struct vop_to_refs_elt object. */
1441 vtoe_free (void *obj
)
1443 struct vop_to_refs_elt
*const vtoe
=
1444 (struct vop_to_refs_elt
*) obj
;
1446 BITMAP_FREE (vtoe
->refs_all
);
1447 BITMAP_FREE (vtoe
->refs_stored
);
1451 /* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true
1452 if the reference REF is stored. */
1455 record_vop_access (htab_t vop_to_refs
, unsigned vop
, unsigned ref
, bool stored
)
1457 void **slot
= htab_find_slot_with_hash (vop_to_refs
, &vop
, vop
, INSERT
);
1458 struct vop_to_refs_elt
*vtoe
;
1462 vtoe
= XNEW (struct vop_to_refs_elt
);
1464 vtoe
->refs_all
= BITMAP_ALLOC (NULL
);
1465 vtoe
->refs_stored
= BITMAP_ALLOC (NULL
);
1469 vtoe
= (struct vop_to_refs_elt
*) *slot
;
1471 bitmap_set_bit (vtoe
->refs_all
, ref
);
1473 bitmap_set_bit (vtoe
->refs_stored
, ref
);
1476 /* Returns the set of references that access VOP according to the table
1480 get_vop_accesses (htab_t vop_to_refs
, unsigned vop
)
1482 struct vop_to_refs_elt
*const vtoe
=
1483 (struct vop_to_refs_elt
*) htab_find_with_hash (vop_to_refs
, &vop
, vop
);
1484 return vtoe
->refs_all
;
1487 /* Returns the set of stores that access VOP according to the table
1491 get_vop_stores (htab_t vop_to_refs
, unsigned vop
)
1493 struct vop_to_refs_elt
*const vtoe
=
1494 (struct vop_to_refs_elt
*) htab_find_with_hash (vop_to_refs
, &vop
, vop
);
1495 return vtoe
->refs_stored
;
1498 /* Adds REF to mapping from virtual operands to references in LOOP. */
1501 add_vop_ref_mapping (struct loop
*loop
, mem_ref_p ref
)
1503 htab_t map
= VEC_index (htab_t
, memory_accesses
.vop_ref_map
, loop
->num
);
1504 bool stored
= bitmap_bit_p (ref
->stored
, loop
->num
);
1505 bitmap clobbers
= VEC_index (bitmap
, memory_accesses
.clobbered_vops
,
1510 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref
->vops
, clobbers
, 0, vop
, bi
)
1512 record_vop_access (map
, vop
, ref
->id
, stored
);
1516 /* Create a mapping from virtual operands to references that touch them
1520 create_vop_ref_mapping_loop (struct loop
*loop
)
1522 bitmap refs
= VEC_index (bitmap
, memory_accesses
.refs_in_loop
, loop
->num
);
1528 EXECUTE_IF_SET_IN_BITMAP (refs
, 0, i
, bi
)
1530 ref
= VEC_index (mem_ref_p
, memory_accesses
.refs_list
, i
);
1531 for (sloop
= loop
; sloop
!= current_loops
->tree_root
; sloop
= loop_outer (sloop
))
1532 add_vop_ref_mapping (sloop
, ref
);
1536 /* For each non-clobbered virtual operand and each loop, record the memory
1537 references in this loop that touch the operand. */
1540 create_vop_ref_mapping (void)
1545 FOR_EACH_LOOP (li
, loop
, 0)
1547 create_vop_ref_mapping_loop (loop
);
1551 /* Gathers information about memory accesses in the loops. */
1554 analyze_memory_references (void)
1560 memory_accesses
.refs
1561 = htab_create (100, memref_hash
, memref_eq
, memref_free
);
1562 memory_accesses
.refs_list
= NULL
;
1563 memory_accesses
.refs_in_loop
= VEC_alloc (bitmap
, heap
,
1564 number_of_loops ());
1565 memory_accesses
.all_refs_in_loop
= VEC_alloc (bitmap
, heap
,
1566 number_of_loops ());
1567 memory_accesses
.clobbered_vops
= VEC_alloc (bitmap
, heap
,
1568 number_of_loops ());
1569 memory_accesses
.vop_ref_map
= VEC_alloc (htab_t
, heap
,
1570 number_of_loops ());
1572 for (i
= 0; i
< number_of_loops (); i
++)
1574 empty
= BITMAP_ALLOC (NULL
);
1575 VEC_quick_push (bitmap
, memory_accesses
.refs_in_loop
, empty
);
1576 empty
= BITMAP_ALLOC (NULL
);
1577 VEC_quick_push (bitmap
, memory_accesses
.all_refs_in_loop
, empty
);
1578 empty
= BITMAP_ALLOC (NULL
);
1579 VEC_quick_push (bitmap
, memory_accesses
.clobbered_vops
, empty
);
1580 hempty
= htab_create (10, vtoe_hash
, vtoe_eq
, vtoe_free
);
1581 VEC_quick_push (htab_t
, memory_accesses
.vop_ref_map
, hempty
);
1584 memory_accesses
.ttae_cache
= NULL
;
1586 gather_mem_refs_in_loops ();
1587 create_vop_ref_mapping ();
1590 /* Returns true if a region of size SIZE1 at position 0 and a region of
1591 size SIZE2 at position DIFF cannot overlap. */
1594 cannot_overlap_p (aff_tree
*diff
, double_int size1
, double_int size2
)
1596 double_int d
, bound
;
1598 /* Unless the difference is a constant, we fail. */
1603 if (double_int_negative_p (d
))
1605 /* The second object is before the first one, we succeed if the last
1606 element of the second object is before the start of the first one. */
1607 bound
= double_int_add (d
, double_int_add (size2
, double_int_minus_one
));
1608 return double_int_negative_p (bound
);
1612 /* We succeed if the second object starts after the first one ends. */
1613 return double_int_scmp (size1
, d
) <= 0;
1617 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1618 tree_to_aff_combination_expand. */
1621 mem_refs_may_alias_p (tree mem1
, tree mem2
, struct pointer_map_t
**ttae_cache
)
1623 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1624 object and their offset differ in such a way that the locations cannot
1625 overlap, then they cannot alias. */
1626 double_int size1
, size2
;
1627 aff_tree off1
, off2
;
1629 /* Perform basic offset and type-based disambiguation. */
1630 if (!refs_may_alias_p (mem1
, mem2
))
1633 /* The expansion of addresses may be a bit expensive, thus we only do
1634 the check at -O2 and higher optimization levels. */
1638 get_inner_reference_aff (mem1
, &off1
, &size1
);
1639 get_inner_reference_aff (mem2
, &off2
, &size2
);
1640 aff_combination_expand (&off1
, ttae_cache
);
1641 aff_combination_expand (&off2
, ttae_cache
);
1642 aff_combination_scale (&off1
, double_int_minus_one
);
1643 aff_combination_add (&off2
, &off1
);
1645 if (cannot_overlap_p (&off2
, size1
, size2
))
1651 /* Rewrites location LOC by TMP_VAR. */
1654 rewrite_mem_ref_loc (mem_ref_loc_p loc
, tree tmp_var
)
1656 mark_virtual_ops_for_renaming (loc
->stmt
);
1657 *loc
->ref
= tmp_var
;
1658 update_stmt (loc
->stmt
);
1661 /* Adds all locations of REF in LOOP and its subloops to LOCS. */
1664 get_all_locs_in_loop (struct loop
*loop
, mem_ref_p ref
,
1665 VEC (mem_ref_loc_p
, heap
) **locs
)
1667 mem_ref_locs_p accs
;
1670 bitmap refs
= VEC_index (bitmap
, memory_accesses
.all_refs_in_loop
,
1672 struct loop
*subloop
;
1674 if (!bitmap_bit_p (refs
, ref
->id
))
1677 if (VEC_length (mem_ref_locs_p
, ref
->accesses_in_loop
)
1678 > (unsigned) loop
->num
)
1680 accs
= VEC_index (mem_ref_locs_p
, ref
->accesses_in_loop
, loop
->num
);
1683 for (i
= 0; VEC_iterate (mem_ref_loc_p
, accs
->locs
, i
, loc
); i
++)
1684 VEC_safe_push (mem_ref_loc_p
, heap
, *locs
, loc
);
1688 for (subloop
= loop
->inner
; subloop
!= NULL
; subloop
= subloop
->next
)
1689 get_all_locs_in_loop (subloop
, ref
, locs
);
1692 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1695 rewrite_mem_refs (struct loop
*loop
, mem_ref_p ref
, tree tmp_var
)
1699 VEC (mem_ref_loc_p
, heap
) *locs
= NULL
;
1701 get_all_locs_in_loop (loop
, ref
, &locs
);
1702 for (i
= 0; VEC_iterate (mem_ref_loc_p
, locs
, i
, loc
); i
++)
1703 rewrite_mem_ref_loc (loc
, tmp_var
);
1704 VEC_free (mem_ref_loc_p
, heap
, locs
);
1707 /* The name and the length of the currently generated variable
1709 #define MAX_LSM_NAME_LENGTH 40
1710 static char lsm_tmp_name
[MAX_LSM_NAME_LENGTH
+ 1];
1711 static int lsm_tmp_name_length
;
1713 /* Adds S to lsm_tmp_name. */
1716 lsm_tmp_name_add (const char *s
)
1718 int l
= strlen (s
) + lsm_tmp_name_length
;
1719 if (l
> MAX_LSM_NAME_LENGTH
)
1722 strcpy (lsm_tmp_name
+ lsm_tmp_name_length
, s
);
1723 lsm_tmp_name_length
= l
;
1726 /* Stores the name for temporary variable that replaces REF to
1730 gen_lsm_tmp_name (tree ref
)
1734 switch (TREE_CODE (ref
))
1736 case MISALIGNED_INDIRECT_REF
:
1737 case ALIGN_INDIRECT_REF
:
1739 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1740 lsm_tmp_name_add ("_");
1744 case VIEW_CONVERT_EXPR
:
1745 case ARRAY_RANGE_REF
:
1746 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1750 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1751 lsm_tmp_name_add ("_RE");
1755 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1756 lsm_tmp_name_add ("_IM");
1760 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1761 lsm_tmp_name_add ("_");
1762 name
= get_name (TREE_OPERAND (ref
, 1));
1765 lsm_tmp_name_add ("_");
1766 lsm_tmp_name_add (name
);
1769 gen_lsm_tmp_name (TREE_OPERAND (ref
, 0));
1770 lsm_tmp_name_add ("_I");
1774 ref
= SSA_NAME_VAR (ref
);
1779 name
= get_name (ref
);
1782 lsm_tmp_name_add (name
);
1786 lsm_tmp_name_add ("S");
1790 lsm_tmp_name_add ("R");
1798 /* Determines name for temporary variable that replaces REF.
1799 The name is accumulated into the lsm_tmp_name variable.
1800 N is added to the name of the temporary. */
1803 get_lsm_tmp_name (tree ref
, unsigned n
)
1807 lsm_tmp_name_length
= 0;
1808 gen_lsm_tmp_name (ref
);
1809 lsm_tmp_name_add ("_lsm");
1814 lsm_tmp_name_add (ns
);
1816 return lsm_tmp_name
;
1819 /* Executes store motion of memory reference REF from LOOP.
1820 Exits from the LOOP are stored in EXITS. The initialization of the
1821 temporary variable is put to the preheader of the loop, and assignments
1822 to the reference from the temporary variable are emitted to exits. */
1825 execute_sm (struct loop
*loop
, VEC (edge
, heap
) *exits
, mem_ref_p ref
)
1830 struct fmt_data fmt_data
;
1833 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1835 fprintf (dump_file
, "Executing store motion of ");
1836 print_generic_expr (dump_file
, ref
->mem
, 0);
1837 fprintf (dump_file
, " from loop %d\n", loop
->num
);
1840 tmp_var
= make_rename_temp (TREE_TYPE (ref
->mem
),
1841 get_lsm_tmp_name (ref
->mem
, ~0));
1843 fmt_data
.loop
= loop
;
1844 fmt_data
.orig_loop
= loop
;
1845 for_each_index (&ref
->mem
, force_move_till
, &fmt_data
);
1847 rewrite_mem_refs (loop
, ref
, tmp_var
);
1849 /* Emit the load & stores. */
1850 load
= build_gimple_modify_stmt (tmp_var
, unshare_expr (ref
->mem
));
1851 get_stmt_ann (load
)->common
.aux
= xcalloc (1, sizeof (struct lim_aux_data
));
1852 LIM_DATA (load
)->max_loop
= loop
;
1853 LIM_DATA (load
)->tgt_loop
= loop
;
1855 /* Put this into the latch, so that we are sure it will be processed after
1856 all dependencies. */
1857 bsi_insert_on_edge (loop_latch_edge (loop
), load
);
1859 for (i
= 0; VEC_iterate (edge
, exits
, i
, ex
); i
++)
1861 store
= build_gimple_modify_stmt (unshare_expr (ref
->mem
), tmp_var
);
1862 bsi_insert_on_edge (ex
, store
);
1866 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
1867 edges of the LOOP. */
1870 hoist_memory_references (struct loop
*loop
, bitmap mem_refs
,
1871 VEC (edge
, heap
) *exits
)
1877 EXECUTE_IF_SET_IN_BITMAP (mem_refs
, 0, i
, bi
)
1879 ref
= VEC_index (mem_ref_p
, memory_accesses
.refs_list
, i
);
1880 execute_sm (loop
, exits
, ref
);
1884 /* Returns true if REF is always accessed in LOOP. */
1887 ref_always_accessed_p (struct loop
*loop
, mem_ref_p ref
)
1889 VEC (mem_ref_loc_p
, heap
) *locs
= NULL
;
1893 struct loop
*must_exec
;
1895 get_all_locs_in_loop (loop
, ref
, &locs
);
1896 for (i
= 0; VEC_iterate (mem_ref_loc_p
, locs
, i
, loc
); i
++)
1898 if (!LIM_DATA (loc
->stmt
))
1901 must_exec
= LIM_DATA (loc
->stmt
)->always_executed_in
;
1905 if (must_exec
== loop
1906 || flow_loop_nested_p (must_exec
, loop
))
1912 VEC_free (mem_ref_loc_p
, heap
, locs
);
1917 /* Returns true if REF1 and REF2 are independent. */
1920 refs_independent_p (mem_ref_p ref1
, mem_ref_p ref2
)
1923 || bitmap_bit_p (ref1
->indep_ref
, ref2
->id
))
1925 if (bitmap_bit_p (ref1
->dep_ref
, ref2
->id
))
1928 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1929 fprintf (dump_file
, "Querying dependency of refs %u and %u: ",
1930 ref1
->id
, ref2
->id
);
1932 if (mem_refs_may_alias_p (ref1
->mem
, ref2
->mem
,
1933 &memory_accesses
.ttae_cache
))
1935 bitmap_set_bit (ref1
->dep_ref
, ref2
->id
);
1936 bitmap_set_bit (ref2
->dep_ref
, ref1
->id
);
1937 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1938 fprintf (dump_file
, "dependent.\n");
1943 bitmap_set_bit (ref1
->indep_ref
, ref2
->id
);
1944 bitmap_set_bit (ref2
->indep_ref
, ref1
->id
);
1945 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1946 fprintf (dump_file
, "independent.\n");
1951 /* Records the information whether REF is independent in LOOP (according
1955 record_indep_loop (struct loop
*loop
, mem_ref_p ref
, bool indep
)
1958 bitmap_set_bit (ref
->indep_loop
, loop
->num
);
1960 bitmap_set_bit (ref
->dep_loop
, loop
->num
);
1963 /* Returns true if REF is independent on all other memory references in
1967 ref_indep_loop_p_1 (struct loop
*loop
, mem_ref_p ref
)
1969 bitmap clobbers
, refs_to_check
, refs
;
1972 bool ret
= true, stored
= bitmap_bit_p (ref
->stored
, loop
->num
);
1976 /* If the reference is clobbered, it is not independent. */
1977 clobbers
= VEC_index (bitmap
, memory_accesses
.clobbered_vops
, loop
->num
);
1978 if (bitmap_intersect_p (ref
->vops
, clobbers
))
1981 refs_to_check
= BITMAP_ALLOC (NULL
);
1983 map
= VEC_index (htab_t
, memory_accesses
.vop_ref_map
, loop
->num
);
1984 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref
->vops
, clobbers
, 0, i
, bi
)
1987 refs
= get_vop_accesses (map
, i
);
1989 refs
= get_vop_stores (map
, i
);
1991 bitmap_ior_into (refs_to_check
, refs
);
1994 EXECUTE_IF_SET_IN_BITMAP (refs_to_check
, 0, i
, bi
)
1996 aref
= VEC_index (mem_ref_p
, memory_accesses
.refs_list
, i
);
1997 if (!refs_independent_p (ref
, aref
))
2000 record_indep_loop (loop
, aref
, false);
2005 BITMAP_FREE (refs_to_check
);
2009 /* Returns true if REF is independent on all other memory references in
2010 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
2013 ref_indep_loop_p (struct loop
*loop
, mem_ref_p ref
)
2017 if (bitmap_bit_p (ref
->indep_loop
, loop
->num
))
2019 if (bitmap_bit_p (ref
->dep_loop
, loop
->num
))
2022 ret
= ref_indep_loop_p_1 (loop
, ref
);
2024 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2025 fprintf (dump_file
, "Querying dependencies of ref %u in loop %d: %s\n",
2026 ref
->id
, loop
->num
, ret
? "independent" : "dependent");
2028 record_indep_loop (loop
, ref
, ret
);
2033 /* Returns true if we can perform store motion of REF from LOOP. */
2036 can_sm_ref_p (struct loop
*loop
, mem_ref_p ref
)
2038 /* Unless the reference is stored in the loop, there is nothing to do. */
2039 if (!bitmap_bit_p (ref
->stored
, loop
->num
))
2042 /* It should be movable. */
2043 if (!is_gimple_reg_type (TREE_TYPE (ref
->mem
))
2044 || TREE_THIS_VOLATILE (ref
->mem
)
2045 || !for_each_index (&ref
->mem
, may_move_till
, loop
))
2048 /* If it can trap, it must be always executed in LOOP. */
2049 if (tree_could_trap_p (ref
->mem
)
2050 && !ref_always_accessed_p (loop
, ref
))
2053 /* And it must be independent on all other memory references
2055 if (!ref_indep_loop_p (loop
, ref
))
2061 /* Marks the references in LOOP for that store motion should be performed
2062 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2063 motion was performed in one of the outer loops. */
2066 find_refs_for_sm (struct loop
*loop
, bitmap sm_executed
, bitmap refs_to_sm
)
2068 bitmap refs
= VEC_index (bitmap
, memory_accesses
.all_refs_in_loop
,
2074 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs
, sm_executed
, 0, i
, bi
)
2076 ref
= VEC_index (mem_ref_p
, memory_accesses
.refs_list
, i
);
2077 if (can_sm_ref_p (loop
, ref
))
2078 bitmap_set_bit (refs_to_sm
, i
);
2082 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
2083 for a store motion optimization (i.e. whether we can insert statement
2087 loop_suitable_for_sm (struct loop
*loop ATTRIBUTE_UNUSED
,
2088 VEC (edge
, heap
) *exits
)
2093 for (i
= 0; VEC_iterate (edge
, exits
, i
, ex
); i
++)
2094 if (ex
->flags
& EDGE_ABNORMAL
)
2100 /* Try to perform store motion for all memory references modified inside
2101 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2102 store motion was executed in one of the outer loops. */
2105 store_motion_loop (struct loop
*loop
, bitmap sm_executed
)
2107 VEC (edge
, heap
) *exits
= get_loop_exit_edges (loop
);
2108 struct loop
*subloop
;
2109 bitmap sm_in_loop
= BITMAP_ALLOC (NULL
);
2111 if (loop_suitable_for_sm (loop
, exits
))
2113 find_refs_for_sm (loop
, sm_executed
, sm_in_loop
);
2114 hoist_memory_references (loop
, sm_in_loop
, exits
);
2116 VEC_free (edge
, heap
, exits
);
2118 bitmap_ior_into (sm_executed
, sm_in_loop
);
2119 for (subloop
= loop
->inner
; subloop
!= NULL
; subloop
= subloop
->next
)
2120 store_motion_loop (subloop
, sm_executed
);
2121 bitmap_and_compl_into (sm_executed
, sm_in_loop
);
2122 BITMAP_FREE (sm_in_loop
);
2125 /* Try to perform store motion for all memory references modified inside
2132 bitmap sm_executed
= BITMAP_ALLOC (NULL
);
2134 for (loop
= current_loops
->tree_root
->inner
; loop
!= NULL
; loop
= loop
->next
)
2135 store_motion_loop (loop
, sm_executed
);
2137 BITMAP_FREE (sm_executed
);
2138 bsi_commit_edge_inserts ();
2141 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2142 for each such basic block bb records the outermost loop for that execution
2143 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2144 blocks that contain a nonpure call. */
2147 fill_always_executed_in (struct loop
*loop
, sbitmap contains_call
)
2149 basic_block bb
= NULL
, *bbs
, last
= NULL
;
2152 struct loop
*inn_loop
= loop
;
2154 if (!loop
->header
->aux
)
2156 bbs
= get_loop_body_in_dom_order (loop
);
2158 for (i
= 0; i
< loop
->num_nodes
; i
++)
2163 if (dominated_by_p (CDI_DOMINATORS
, loop
->latch
, bb
))
2166 if (TEST_BIT (contains_call
, bb
->index
))
2169 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
2170 if (!flow_bb_inside_loop_p (loop
, e
->dest
))
2175 /* A loop might be infinite (TODO use simple loop analysis
2176 to disprove this if possible). */
2177 if (bb
->flags
& BB_IRREDUCIBLE_LOOP
)
2180 if (!flow_bb_inside_loop_p (inn_loop
, bb
))
2183 if (bb
->loop_father
->header
== bb
)
2185 if (!dominated_by_p (CDI_DOMINATORS
, loop
->latch
, bb
))
2188 /* In a loop that is always entered we may proceed anyway.
2189 But record that we entered it and stop once we leave it. */
2190 inn_loop
= bb
->loop_father
;
2197 if (last
== loop
->header
)
2199 last
= get_immediate_dominator (CDI_DOMINATORS
, last
);
2205 for (loop
= loop
->inner
; loop
; loop
= loop
->next
)
2206 fill_always_executed_in (loop
, contains_call
);
2209 /* Compute the global information needed by the loop invariant motion pass. */
2212 tree_ssa_lim_initialize (void)
2214 sbitmap contains_call
= sbitmap_alloc (last_basic_block
);
2215 block_stmt_iterator bsi
;
2219 sbitmap_zero (contains_call
);
2222 for (bsi
= bsi_start (bb
); !bsi_end_p (bsi
); bsi_next (&bsi
))
2224 if (nonpure_call_p (bsi_stmt (bsi
)))
2228 if (!bsi_end_p (bsi
))
2229 SET_BIT (contains_call
, bb
->index
);
2232 for (loop
= current_loops
->tree_root
->inner
; loop
; loop
= loop
->next
)
2233 fill_always_executed_in (loop
, contains_call
);
2235 sbitmap_free (contains_call
);
2238 /* Cleans up after the invariant motion pass. */
2241 tree_ssa_lim_finalize (void)
2253 VEC_free (mem_ref_p
, heap
, memory_accesses
.refs_list
);
2254 htab_delete (memory_accesses
.refs
);
2256 for (i
= 0; VEC_iterate (bitmap
, memory_accesses
.refs_in_loop
, i
, b
); i
++)
2258 VEC_free (bitmap
, heap
, memory_accesses
.refs_in_loop
);
2260 for (i
= 0; VEC_iterate (bitmap
, memory_accesses
.all_refs_in_loop
, i
, b
); i
++)
2262 VEC_free (bitmap
, heap
, memory_accesses
.all_refs_in_loop
);
2264 for (i
= 0; VEC_iterate (bitmap
, memory_accesses
.clobbered_vops
, i
, b
); i
++)
2266 VEC_free (bitmap
, heap
, memory_accesses
.clobbered_vops
);
2268 for (i
= 0; VEC_iterate (htab_t
, memory_accesses
.vop_ref_map
, i
, h
); i
++)
2270 VEC_free (htab_t
, heap
, memory_accesses
.vop_ref_map
);
2272 if (memory_accesses
.ttae_cache
)
2273 pointer_map_destroy (memory_accesses
.ttae_cache
);
2276 /* Moves invariants from loops. Only "expensive" invariants are moved out --
2277 i.e. those that are likely to be win regardless of the register pressure. */
2282 tree_ssa_lim_initialize ();
2284 /* Gathers information about memory accesses in the loops. */
2285 analyze_memory_references ();
2287 /* For each statement determine the outermost loop in that it is
2288 invariant and cost for computing the invariant. */
2289 determine_invariantness ();
2291 /* Execute store motion. Force the necessary invariants to be moved
2292 out of the loops as well. */
2295 /* Move the expressions that are expensive enough. */
2296 move_computations ();
2298 tree_ssa_lim_finalize ();