PR c++/27177
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blob262ad972da8bcf26fe33694a303c48058496c4fb
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "domwalk.h"
36 #include "params.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "hashtab.h"
42 /* TODO: Support for predicated code motion. I.e.
44 while (1)
46 if (cond)
48 a = inv;
49 something;
53 Where COND and INV are is invariants, but evaluating INV may trap or be
54 invalid from some other reason if !COND. This may be transformed to
56 if (cond)
57 a = inv;
58 while (1)
60 if (cond)
61 something;
62 } */
64 /* A type for the list of statements that have to be moved in order to be able
65 to hoist an invariant computation. */
67 struct depend
69 tree stmt;
70 struct depend *next;
73 /* The auxiliary data kept for each statement. */
75 struct lim_aux_data
77 struct loop *max_loop; /* The outermost loop in that the statement
78 is invariant. */
80 struct loop *tgt_loop; /* The loop out of that we want to move the
81 invariant. */
83 struct loop *always_executed_in;
84 /* The outermost loop for that we are sure
85 the statement is executed if the loop
86 is entered. */
88 bool sm_done; /* True iff the store motion for a memory
89 reference in the statement has already
90 been executed. */
92 unsigned cost; /* Cost of the computation performed by the
93 statement. */
95 struct depend *depends; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
99 MAX_LOOP loop. */
102 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
103 ? NULL \
104 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
106 /* Description of a memory reference location for store motion. */
108 struct mem_ref_loc
110 tree *ref; /* The reference itself. */
111 tree stmt; /* The statement in that it occurs. */
112 struct mem_ref_loc *next; /* Next use in the chain. */
115 /* Description of a memory reference for store motion. */
117 struct mem_ref
119 tree mem; /* The memory itself. */
120 hashval_t hash; /* Its hash value. */
121 bool is_stored; /* True if there is a store to the location
122 in the loop. */
123 struct mem_ref_loc *locs; /* The locations where it is found. */
124 bitmap vops; /* Vops corresponding to this memory
125 location. */
126 struct mem_ref *next; /* Next memory reference in the list.
127 Memory references are stored in a hash
128 table, but the hash function depends
129 on values of pointers. Thus we cannot use
130 htab_traverse, since then we would get
131 miscompares during bootstrap (although the
132 produced code would be correct). */
135 /* Minimum cost of an expensive expression. */
136 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
138 /* The outermost loop for that execution of the header guarantees that the
139 block will be executed. */
140 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
142 /* Calls CBCK for each index in memory reference ADDR_P. There are two
143 kinds situations handled; in each of these cases, the memory reference
144 and DATA are passed to the callback:
146 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
147 pass the pointer to the index to the callback.
149 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
150 pointer to addr to the callback.
152 If the callback returns false, the whole search stops and false is returned.
153 Otherwise the function returns true after traversing through the whole
154 reference *ADDR_P. */
156 bool
157 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
159 tree *nxt, *idx;
161 for (; ; addr_p = nxt)
163 switch (TREE_CODE (*addr_p))
165 case SSA_NAME:
166 return cbck (*addr_p, addr_p, data);
168 case MISALIGNED_INDIRECT_REF:
169 case ALIGN_INDIRECT_REF:
170 case INDIRECT_REF:
171 nxt = &TREE_OPERAND (*addr_p, 0);
172 return cbck (*addr_p, nxt, data);
174 case BIT_FIELD_REF:
175 case VIEW_CONVERT_EXPR:
176 case REALPART_EXPR:
177 case IMAGPART_EXPR:
178 nxt = &TREE_OPERAND (*addr_p, 0);
179 break;
181 case COMPONENT_REF:
182 /* If the component has varying offset, it behaves like index
183 as well. */
184 idx = &TREE_OPERAND (*addr_p, 2);
185 if (*idx
186 && !cbck (*addr_p, idx, data))
187 return false;
189 nxt = &TREE_OPERAND (*addr_p, 0);
190 break;
192 case ARRAY_REF:
193 case ARRAY_RANGE_REF:
194 nxt = &TREE_OPERAND (*addr_p, 0);
195 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
196 return false;
197 break;
199 case VAR_DECL:
200 case PARM_DECL:
201 case STRING_CST:
202 case RESULT_DECL:
203 case VECTOR_CST:
204 case COMPLEX_CST:
205 case INTEGER_CST:
206 case REAL_CST:
207 case FIXED_CST:
208 case CONSTRUCTOR:
209 return true;
211 case TARGET_MEM_REF:
212 idx = &TMR_BASE (*addr_p);
213 if (*idx
214 && !cbck (*addr_p, idx, data))
215 return false;
216 idx = &TMR_INDEX (*addr_p);
217 if (*idx
218 && !cbck (*addr_p, idx, data))
219 return false;
220 return true;
222 default:
223 gcc_unreachable ();
228 /* If it is possible to hoist the statement STMT unconditionally,
229 returns MOVE_POSSIBLE.
230 If it is possible to hoist the statement STMT, but we must avoid making
231 it executed if it would not be executed in the original program (e.g.
232 because it may trap), return MOVE_PRESERVE_EXECUTION.
233 Otherwise return MOVE_IMPOSSIBLE. */
235 enum move_pos
236 movement_possibility (tree stmt)
238 tree lhs, rhs;
240 if (flag_unswitch_loops
241 && TREE_CODE (stmt) == COND_EXPR)
243 /* If we perform unswitching, force the operands of the invariant
244 condition to be moved out of the loop. */
245 return MOVE_POSSIBLE;
248 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
249 return MOVE_IMPOSSIBLE;
251 if (stmt_ends_bb_p (stmt))
252 return MOVE_IMPOSSIBLE;
254 if (stmt_ann (stmt)->has_volatile_ops)
255 return MOVE_IMPOSSIBLE;
257 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
258 if (TREE_CODE (lhs) == SSA_NAME
259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
260 return MOVE_IMPOSSIBLE;
262 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
264 if (TREE_SIDE_EFFECTS (rhs)
265 || tree_could_throw_p (rhs))
266 return MOVE_IMPOSSIBLE;
268 if (TREE_CODE (lhs) != SSA_NAME
269 || tree_could_trap_p (rhs))
270 return MOVE_PRESERVE_EXECUTION;
272 if (get_call_expr_in (stmt))
274 /* While pure or const call is guaranteed to have no side effects, we
275 cannot move it arbitrarily. Consider code like
277 char *s = something ();
279 while (1)
281 if (s)
282 t = strlen (s);
283 else
284 t = 0;
287 Here the strlen call cannot be moved out of the loop, even though
288 s is invariant. In addition to possibly creating a call with
289 invalid arguments, moving out a function call that is not executed
290 may cause performance regressions in case the call is costly and
291 not executed at all. */
292 return MOVE_PRESERVE_EXECUTION;
294 return MOVE_POSSIBLE;
297 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
298 loop to that we could move the expression using DEF if it did not have
299 other operands, i.e. the outermost loop enclosing LOOP in that the value
300 of DEF is invariant. */
302 static struct loop *
303 outermost_invariant_loop (tree def, struct loop *loop)
305 tree def_stmt;
306 basic_block def_bb;
307 struct loop *max_loop;
309 if (TREE_CODE (def) != SSA_NAME)
310 return superloop_at_depth (loop, 1);
312 def_stmt = SSA_NAME_DEF_STMT (def);
313 def_bb = bb_for_stmt (def_stmt);
314 if (!def_bb)
315 return superloop_at_depth (loop, 1);
317 max_loop = find_common_loop (loop, def_bb->loop_father);
319 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
320 max_loop = find_common_loop (max_loop,
321 loop_outer (LIM_DATA (def_stmt)->max_loop));
322 if (max_loop == loop)
323 return NULL;
324 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
326 return max_loop;
329 /* Returns the outermost superloop of LOOP in that the expression EXPR is
330 invariant. */
332 static struct loop *
333 outermost_invariant_loop_expr (tree expr, struct loop *loop)
335 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
336 unsigned i, nops;
337 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
339 if (TREE_CODE (expr) == SSA_NAME
340 || TREE_CODE (expr) == INTEGER_CST
341 || is_gimple_min_invariant (expr))
342 return outermost_invariant_loop (expr, loop);
344 if (codeclass != tcc_unary
345 && codeclass != tcc_binary
346 && codeclass != tcc_expression
347 && codeclass != tcc_vl_exp
348 && codeclass != tcc_comparison)
349 return NULL;
351 nops = TREE_OPERAND_LENGTH (expr);
352 for (i = 0; i < nops; i++)
354 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
355 if (!aloop)
356 return NULL;
358 if (flow_loop_nested_p (max_loop, aloop))
359 max_loop = aloop;
362 return max_loop;
365 /* DATA is a structure containing information associated with a statement
366 inside LOOP. DEF is one of the operands of this statement.
368 Find the outermost loop enclosing LOOP in that value of DEF is invariant
369 and record this in DATA->max_loop field. If DEF itself is defined inside
370 this loop as well (i.e. we need to hoist it out of the loop if we want
371 to hoist the statement represented by DATA), record the statement in that
372 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
373 add the cost of the computation of DEF to the DATA->cost.
375 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
377 static bool
378 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
379 bool add_cost)
381 tree def_stmt = SSA_NAME_DEF_STMT (def);
382 basic_block def_bb = bb_for_stmt (def_stmt);
383 struct loop *max_loop;
384 struct depend *dep;
386 if (!def_bb)
387 return true;
389 max_loop = outermost_invariant_loop (def, loop);
390 if (!max_loop)
391 return false;
393 if (flow_loop_nested_p (data->max_loop, max_loop))
394 data->max_loop = max_loop;
396 if (!LIM_DATA (def_stmt))
397 return true;
399 if (add_cost
400 /* Only add the cost if the statement defining DEF is inside LOOP,
401 i.e. if it is likely that by moving the invariants dependent
402 on it, we will be able to avoid creating a new register for
403 it (since it will be only used in these dependent invariants). */
404 && def_bb->loop_father == loop)
405 data->cost += LIM_DATA (def_stmt)->cost;
407 dep = XNEW (struct depend);
408 dep->stmt = def_stmt;
409 dep->next = data->depends;
410 data->depends = dep;
412 return true;
415 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
416 are just ad-hoc constants. The estimates should be based on target-specific
417 values. */
419 static unsigned
420 stmt_cost (tree stmt)
422 tree rhs;
423 unsigned cost = 1;
425 /* Always try to create possibilities for unswitching. */
426 if (TREE_CODE (stmt) == COND_EXPR)
427 return LIM_EXPENSIVE;
429 rhs = GENERIC_TREE_OPERAND (stmt, 1);
431 /* Hoisting memory references out should almost surely be a win. */
432 if (stmt_references_memory_p (stmt))
433 cost += 20;
435 switch (TREE_CODE (rhs))
437 case CALL_EXPR:
438 /* We should be hoisting calls if possible. */
440 /* Unless the call is a builtin_constant_p; this always folds to a
441 constant, so moving it is useless. */
442 rhs = get_callee_fndecl (rhs);
443 if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
444 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
445 return 0;
447 cost += 20;
448 break;
450 case MULT_EXPR:
451 case TRUNC_DIV_EXPR:
452 case CEIL_DIV_EXPR:
453 case FLOOR_DIV_EXPR:
454 case ROUND_DIV_EXPR:
455 case EXACT_DIV_EXPR:
456 case CEIL_MOD_EXPR:
457 case FLOOR_MOD_EXPR:
458 case ROUND_MOD_EXPR:
459 case TRUNC_MOD_EXPR:
460 case RDIV_EXPR:
461 /* Division and multiplication are usually expensive. */
462 cost += 20;
463 break;
465 case LSHIFT_EXPR:
466 case RSHIFT_EXPR:
467 cost += 20;
468 break;
470 default:
471 break;
474 return cost;
477 /* Determine the outermost loop to that it is possible to hoist a statement
478 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
479 the outermost loop in that the value computed by STMT is invariant.
480 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
481 we preserve the fact whether STMT is executed. It also fills other related
482 information to LIM_DATA (STMT).
484 The function returns false if STMT cannot be hoisted outside of the loop it
485 is defined in, and true otherwise. */
487 static bool
488 determine_max_movement (tree stmt, bool must_preserve_exec)
490 basic_block bb = bb_for_stmt (stmt);
491 struct loop *loop = bb->loop_father;
492 struct loop *level;
493 struct lim_aux_data *lim_data = LIM_DATA (stmt);
494 tree val;
495 ssa_op_iter iter;
497 if (must_preserve_exec)
498 level = ALWAYS_EXECUTED_IN (bb);
499 else
500 level = superloop_at_depth (loop, 1);
501 lim_data->max_loop = level;
503 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
504 if (!add_dependency (val, lim_data, loop, true))
505 return false;
507 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
508 if (!add_dependency (val, lim_data, loop, false))
509 return false;
511 lim_data->cost += stmt_cost (stmt);
513 return true;
516 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
517 and that one of the operands of this statement is computed by STMT.
518 Ensure that STMT (together with all the statements that define its
519 operands) is hoisted at least out of the loop LEVEL. */
521 static void
522 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
524 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
525 struct depend *dep;
527 stmt_loop = find_common_loop (orig_loop, stmt_loop);
528 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
529 stmt_loop = find_common_loop (stmt_loop,
530 loop_outer (LIM_DATA (stmt)->tgt_loop));
531 if (flow_loop_nested_p (stmt_loop, level))
532 return;
534 gcc_assert (LIM_DATA (stmt));
535 gcc_assert (level == LIM_DATA (stmt)->max_loop
536 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
538 LIM_DATA (stmt)->tgt_loop = level;
539 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
540 set_level (dep->stmt, orig_loop, level);
543 /* Determines an outermost loop from that we want to hoist the statement STMT.
544 For now we chose the outermost possible loop. TODO -- use profiling
545 information to set it more sanely. */
547 static void
548 set_profitable_level (tree stmt)
550 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
553 /* Returns true if STMT is not a pure call. */
555 static bool
556 nonpure_call_p (tree stmt)
558 tree call = get_call_expr_in (stmt);
560 if (!call)
561 return false;
563 return TREE_SIDE_EFFECTS (call) != 0;
566 /* Releases the memory occupied by DATA. */
568 static void
569 free_lim_aux_data (struct lim_aux_data *data)
571 struct depend *dep, *next;
573 for (dep = data->depends; dep; dep = next)
575 next = dep->next;
576 free (dep);
578 free (data);
581 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
583 static tree
584 rewrite_reciprocal (block_stmt_iterator *bsi)
586 tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
588 stmt = bsi_stmt (*bsi);
589 lhs = GENERIC_TREE_OPERAND (stmt, 0);
590 rhs = GENERIC_TREE_OPERAND (stmt, 1);
592 /* stmt must be GIMPLE_MODIFY_STMT. */
593 var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
594 add_referenced_var (var);
596 tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
597 build_real (TREE_TYPE (rhs), dconst1),
598 TREE_OPERAND (rhs, 1));
599 stmt1 = build_gimple_modify_stmt (var, tmp);
600 name = make_ssa_name (var, stmt1);
601 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
602 tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
603 name, TREE_OPERAND (rhs, 0));
604 stmt2 = build_gimple_modify_stmt (lhs, tmp);
606 /* Replace division stmt with reciprocal and multiply stmts.
607 The multiply stmt is not invariant, so update iterator
608 and avoid rescanning. */
609 bsi_replace (bsi, stmt1, true);
610 bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
611 SSA_NAME_DEF_STMT (lhs) = stmt2;
613 /* Continue processing with invariant reciprocal statement. */
614 return stmt1;
617 /* Check if the pattern at *BSI is a bittest of the form
618 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
620 static tree
621 rewrite_bittest (block_stmt_iterator *bsi)
623 tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
624 use_operand_p use;
626 stmt = bsi_stmt (*bsi);
627 lhs = GENERIC_TREE_OPERAND (stmt, 0);
628 rhs = GENERIC_TREE_OPERAND (stmt, 1);
630 /* Verify that the single use of lhs is a comparison against zero. */
631 if (TREE_CODE (lhs) != SSA_NAME
632 || !single_imm_use (lhs, &use, &use_stmt)
633 || TREE_CODE (use_stmt) != COND_EXPR)
634 return stmt;
635 t = COND_EXPR_COND (use_stmt);
636 if (TREE_OPERAND (t, 0) != lhs
637 || (TREE_CODE (t) != NE_EXPR
638 && TREE_CODE (t) != EQ_EXPR)
639 || !integer_zerop (TREE_OPERAND (t, 1)))
640 return stmt;
642 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
643 stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
644 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
645 return stmt;
647 /* There is a conversion in between possibly inserted by fold. */
648 t = GIMPLE_STMT_OPERAND (stmt1, 1);
649 if (TREE_CODE (t) == NOP_EXPR
650 || TREE_CODE (t) == CONVERT_EXPR)
652 t = TREE_OPERAND (t, 0);
653 if (TREE_CODE (t) != SSA_NAME
654 || !has_single_use (t))
655 return stmt;
656 stmt1 = SSA_NAME_DEF_STMT (t);
657 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
658 return stmt;
659 t = GIMPLE_STMT_OPERAND (stmt1, 1);
662 /* Verify that B is loop invariant but A is not. Verify that with
663 all the stmt walking we are still in the same loop. */
664 if (TREE_CODE (t) == RSHIFT_EXPR
665 && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
666 && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
667 loop_containing_stmt (stmt1)) != NULL
668 && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
669 loop_containing_stmt (stmt1)) == NULL)
671 tree a = TREE_OPERAND (t, 0);
672 tree b = TREE_OPERAND (t, 1);
674 /* 1 << B */
675 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
676 add_referenced_var (var);
677 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
678 build_int_cst (TREE_TYPE (a), 1), b);
679 stmt1 = build_gimple_modify_stmt (var, t);
680 name = make_ssa_name (var, stmt1);
681 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
683 /* A & (1 << B) */
684 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
685 stmt2 = build_gimple_modify_stmt (var, t);
686 name = make_ssa_name (var, stmt2);
687 GIMPLE_STMT_OPERAND (stmt2, 0) = name;
688 SET_USE (use, name);
690 bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
691 bsi_replace (bsi, stmt2, true);
693 return stmt1;
696 return stmt;
700 /* Determine the outermost loops in that statements in basic block BB are
701 invariant, and record them to the LIM_DATA associated with the statements.
702 Callback for walk_dominator_tree. */
704 static void
705 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
706 basic_block bb)
708 enum move_pos pos;
709 block_stmt_iterator bsi;
710 tree stmt, rhs;
711 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
712 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
714 if (!loop_outer (bb->loop_father))
715 return;
717 if (dump_file && (dump_flags & TDF_DETAILS))
718 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
719 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
721 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
723 stmt = bsi_stmt (bsi);
725 pos = movement_possibility (stmt);
726 if (pos == MOVE_IMPOSSIBLE)
728 if (nonpure_call_p (stmt))
730 maybe_never = true;
731 outermost = NULL;
733 continue;
736 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
738 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
740 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
741 to be hoisted out of loop, saving expensive divide. */
742 if (pos == MOVE_POSSIBLE
743 && TREE_CODE (rhs) == RDIV_EXPR
744 && flag_unsafe_math_optimizations
745 && !flag_trapping_math
746 && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
747 loop_containing_stmt (stmt)) != NULL
748 && outermost_invariant_loop_expr (rhs,
749 loop_containing_stmt (stmt)) == NULL)
750 stmt = rewrite_reciprocal (&bsi);
752 /* If the shift count is invariant, convert (A >> B) & 1 to
753 A & (1 << B) allowing the bit mask to be hoisted out of the loop
754 saving an expensive shift. */
755 if (pos == MOVE_POSSIBLE
756 && TREE_CODE (rhs) == BIT_AND_EXPR
757 && integer_onep (TREE_OPERAND (rhs, 1))
758 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
759 && has_single_use (TREE_OPERAND (rhs, 0)))
760 stmt = rewrite_bittest (&bsi);
763 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
764 LIM_DATA (stmt)->always_executed_in = outermost;
766 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
767 continue;
769 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
771 LIM_DATA (stmt)->max_loop = NULL;
772 continue;
775 if (dump_file && (dump_flags & TDF_DETAILS))
777 print_generic_stmt_indented (dump_file, stmt, 0, 2);
778 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
779 loop_depth (LIM_DATA (stmt)->max_loop),
780 LIM_DATA (stmt)->cost);
783 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
784 set_profitable_level (stmt);
788 /* For each statement determines the outermost loop in that it is invariant,
789 statements on whose motion it depends and the cost of the computation.
790 This information is stored to the LIM_DATA structure associated with
791 each statement. */
793 static void
794 determine_invariantness (void)
796 struct dom_walk_data walk_data;
798 memset (&walk_data, 0, sizeof (struct dom_walk_data));
799 walk_data.dom_direction = CDI_DOMINATORS;
800 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
802 init_walk_dominator_tree (&walk_data);
803 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
804 fini_walk_dominator_tree (&walk_data);
807 /* Hoist the statements in basic block BB out of the loops prescribed by
808 data stored in LIM_DATA structures associated with each statement. Callback
809 for walk_dominator_tree. */
811 static void
812 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
813 basic_block bb)
815 struct loop *level;
816 block_stmt_iterator bsi;
817 tree stmt;
818 unsigned cost = 0;
820 if (!loop_outer (bb->loop_father))
821 return;
823 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
825 stmt = bsi_stmt (bsi);
827 if (!LIM_DATA (stmt))
829 bsi_next (&bsi);
830 continue;
833 cost = LIM_DATA (stmt)->cost;
834 level = LIM_DATA (stmt)->tgt_loop;
835 free_lim_aux_data (LIM_DATA (stmt));
836 stmt_ann (stmt)->common.aux = NULL;
838 if (!level)
840 bsi_next (&bsi);
841 continue;
844 /* We do not really want to move conditionals out of the loop; we just
845 placed it here to force its operands to be moved if necessary. */
846 if (TREE_CODE (stmt) == COND_EXPR)
847 continue;
849 if (dump_file && (dump_flags & TDF_DETAILS))
851 fprintf (dump_file, "Moving statement\n");
852 print_generic_stmt (dump_file, stmt, 0);
853 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
854 cost, level->num);
856 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
857 bsi_remove (&bsi, false);
861 /* Hoist the statements out of the loops prescribed by data stored in
862 LIM_DATA structures associated with each statement.*/
864 static void
865 move_computations (void)
867 struct dom_walk_data walk_data;
869 memset (&walk_data, 0, sizeof (struct dom_walk_data));
870 walk_data.dom_direction = CDI_DOMINATORS;
871 walk_data.before_dom_children_before_stmts = move_computations_stmt;
873 init_walk_dominator_tree (&walk_data);
874 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
875 fini_walk_dominator_tree (&walk_data);
877 bsi_commit_edge_inserts ();
878 if (need_ssa_update_p ())
879 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
882 /* Checks whether the statement defining variable *INDEX can be hoisted
883 out of the loop passed in DATA. Callback for for_each_index. */
885 static bool
886 may_move_till (tree ref, tree *index, void *data)
888 struct loop *loop = (struct loop*) data, *max_loop;
890 /* If REF is an array reference, check also that the step and the lower
891 bound is invariant in LOOP. */
892 if (TREE_CODE (ref) == ARRAY_REF)
894 tree step = array_ref_element_size (ref);
895 tree lbound = array_ref_low_bound (ref);
897 max_loop = outermost_invariant_loop_expr (step, loop);
898 if (!max_loop)
899 return false;
901 max_loop = outermost_invariant_loop_expr (lbound, loop);
902 if (!max_loop)
903 return false;
906 max_loop = outermost_invariant_loop (*index, loop);
907 if (!max_loop)
908 return false;
910 return true;
913 /* Forces statements defining (invariant) SSA names in expression EXPR to be
914 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
916 static void
917 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
919 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
920 unsigned i, nops;
922 if (TREE_CODE (expr) == SSA_NAME)
924 tree stmt = SSA_NAME_DEF_STMT (expr);
925 if (IS_EMPTY_STMT (stmt))
926 return;
928 set_level (stmt, orig_loop, loop);
929 return;
932 if (codeclass != tcc_unary
933 && codeclass != tcc_binary
934 && codeclass != tcc_expression
935 && codeclass != tcc_vl_exp
936 && codeclass != tcc_comparison)
937 return;
939 nops = TREE_OPERAND_LENGTH (expr);
940 for (i = 0; i < nops; i++)
941 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
944 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
945 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
946 for_each_index. */
948 struct fmt_data
950 struct loop *loop;
951 struct loop *orig_loop;
954 static bool
955 force_move_till (tree ref, tree *index, void *data)
957 tree stmt;
958 struct fmt_data *fmt_data = (struct fmt_data *) data;
960 if (TREE_CODE (ref) == ARRAY_REF)
962 tree step = array_ref_element_size (ref);
963 tree lbound = array_ref_low_bound (ref);
965 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
966 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
969 if (TREE_CODE (*index) != SSA_NAME)
970 return true;
972 stmt = SSA_NAME_DEF_STMT (*index);
973 if (IS_EMPTY_STMT (stmt))
974 return true;
976 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
978 return true;
981 /* Records memory reference location *REF to the list MEM_REFS. The reference
982 occurs in statement STMT. */
984 static void
985 record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
987 struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
989 aref->stmt = stmt;
990 aref->ref = ref;
992 aref->next = *mem_refs;
993 *mem_refs = aref;
996 /* Releases list of memory reference locations MEM_REFS. */
998 static void
999 free_mem_ref_locs (struct mem_ref_loc *mem_refs)
1001 struct mem_ref_loc *act;
1003 while (mem_refs)
1005 act = mem_refs;
1006 mem_refs = mem_refs->next;
1007 free (act);
1011 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
1013 static void
1014 rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
1016 tree var;
1017 ssa_op_iter iter;
1019 for (; mem_refs; mem_refs = mem_refs->next)
1021 FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
1022 mark_sym_for_renaming (SSA_NAME_VAR (var));
1024 *mem_refs->ref = tmp_var;
1025 update_stmt (mem_refs->stmt);
1029 /* The name and the length of the currently generated variable
1030 for lsm. */
1031 #define MAX_LSM_NAME_LENGTH 40
1032 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1033 static int lsm_tmp_name_length;
1035 /* Adds S to lsm_tmp_name. */
1037 static void
1038 lsm_tmp_name_add (const char *s)
1040 int l = strlen (s) + lsm_tmp_name_length;
1041 if (l > MAX_LSM_NAME_LENGTH)
1042 return;
1044 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1045 lsm_tmp_name_length = l;
1048 /* Stores the name for temporary variable that replaces REF to
1049 lsm_tmp_name. */
1051 static void
1052 gen_lsm_tmp_name (tree ref)
1054 const char *name;
1056 switch (TREE_CODE (ref))
1058 case MISALIGNED_INDIRECT_REF:
1059 case ALIGN_INDIRECT_REF:
1060 case INDIRECT_REF:
1061 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1062 lsm_tmp_name_add ("_");
1063 break;
1065 case BIT_FIELD_REF:
1066 case VIEW_CONVERT_EXPR:
1067 case ARRAY_RANGE_REF:
1068 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1069 break;
1071 case REALPART_EXPR:
1072 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1073 lsm_tmp_name_add ("_RE");
1074 break;
1076 case IMAGPART_EXPR:
1077 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1078 lsm_tmp_name_add ("_IM");
1079 break;
1081 case COMPONENT_REF:
1082 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1083 lsm_tmp_name_add ("_");
1084 name = get_name (TREE_OPERAND (ref, 1));
1085 if (!name)
1086 name = "F";
1087 lsm_tmp_name_add ("_");
1088 lsm_tmp_name_add (name);
1090 case ARRAY_REF:
1091 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1092 lsm_tmp_name_add ("_I");
1093 break;
1095 case SSA_NAME:
1096 ref = SSA_NAME_VAR (ref);
1097 /* Fallthru. */
1099 case VAR_DECL:
1100 case PARM_DECL:
1101 name = get_name (ref);
1102 if (!name)
1103 name = "D";
1104 lsm_tmp_name_add (name);
1105 break;
1107 case STRING_CST:
1108 lsm_tmp_name_add ("S");
1109 break;
1111 case RESULT_DECL:
1112 lsm_tmp_name_add ("R");
1113 break;
1115 default:
1116 gcc_unreachable ();
1120 /* Determines name for temporary variable that replaces REF.
1121 The name is accumulated into the lsm_tmp_name variable.
1122 N is added to the name of the temporary. */
1124 char *
1125 get_lsm_tmp_name (tree ref, unsigned n)
1127 char ns[2];
1129 lsm_tmp_name_length = 0;
1130 gen_lsm_tmp_name (ref);
1131 lsm_tmp_name_add ("_lsm");
1132 if (n < 10)
1134 ns[0] = '0' + n;
1135 ns[1] = 0;
1136 lsm_tmp_name_add (ns);
1138 return lsm_tmp_name;
1141 /* Records request for store motion of memory reference REF from LOOP.
1142 MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1143 these references are rewritten by a new temporary variable.
1144 Exits from the LOOP are stored in EXITS. The initialization of the
1145 temporary variable is put to the preheader of the loop, and assignments
1146 to the reference from the temporary variable are emitted to exits. */
1148 static void
1149 schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
1150 struct mem_ref_loc *mem_refs)
1152 struct mem_ref_loc *aref;
1153 tree tmp_var;
1154 unsigned i;
1155 tree load, store;
1156 struct fmt_data fmt_data;
1157 edge ex;
1159 if (dump_file && (dump_flags & TDF_DETAILS))
1161 fprintf (dump_file, "Executing store motion of ");
1162 print_generic_expr (dump_file, ref, 0);
1163 fprintf (dump_file, " from loop %d\n", loop->num);
1166 tmp_var = make_rename_temp (TREE_TYPE (ref),
1167 get_lsm_tmp_name (ref, ~0));
1169 fmt_data.loop = loop;
1170 fmt_data.orig_loop = loop;
1171 for_each_index (&ref, force_move_till, &fmt_data);
1173 rewrite_mem_refs (tmp_var, mem_refs);
1174 for (aref = mem_refs; aref; aref = aref->next)
1175 if (LIM_DATA (aref->stmt))
1176 LIM_DATA (aref->stmt)->sm_done = true;
1178 /* Emit the load & stores. */
1179 load = build_gimple_modify_stmt (tmp_var, ref);
1180 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1181 LIM_DATA (load)->max_loop = loop;
1182 LIM_DATA (load)->tgt_loop = loop;
1184 /* Put this into the latch, so that we are sure it will be processed after
1185 all dependencies. */
1186 bsi_insert_on_edge (loop_latch_edge (loop), load);
1188 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1190 store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
1191 bsi_insert_on_edge (ex, store);
1195 /* Check whether memory reference REF can be hoisted out of the LOOP. If this
1196 is true, prepare the statements that load the value of the memory reference
1197 to a temporary variable in the loop preheader, store it back on the loop
1198 exits, and replace all the references inside LOOP by this temporary variable.
1199 EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
1200 operands that are clobbered by a call or accessed through multiple references
1201 in loop. */
1203 static void
1204 determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
1205 bitmap clobbered_vops, struct mem_ref *ref)
1207 struct mem_ref_loc *aref;
1208 struct loop *must_exec;
1210 /* In case the memory is not stored to, there is nothing for SM to do. */
1211 if (!ref->is_stored)
1212 return;
1214 /* If the reference is aliased with any different ref, or killed by call
1215 in function, then fail. */
1216 if (bitmap_intersect_p (ref->vops, clobbered_vops))
1217 return;
1219 if (tree_could_trap_p (ref->mem))
1221 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1222 of the statements in that it occurs is always executed when the loop
1223 is entered. This way we know that by moving the load from the
1224 reference out of the loop we will not cause the error that would not
1225 occur otherwise.
1227 TODO -- in fact we would like to check for anticipability of the
1228 reference, i.e. that on each path from loop entry to loop exit at
1229 least one of the statements containing the memory reference is
1230 executed. */
1232 for (aref = ref->locs; aref; aref = aref->next)
1234 if (!LIM_DATA (aref->stmt))
1235 continue;
1237 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1238 if (!must_exec)
1239 continue;
1241 if (must_exec == loop
1242 || flow_loop_nested_p (must_exec, loop))
1243 break;
1246 if (!aref)
1247 return;
1250 schedule_sm (loop, exits, ref->mem, ref->locs);
1253 /* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
1254 of vops clobbered by call in loop or accessed by multiple memory references.
1255 EXITS is the list of exit edges of the LOOP. */
1257 static void
1258 hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
1259 bitmap clobbered_vops, VEC (edge, heap) *exits)
1261 struct mem_ref *ref;
1263 for (ref = mem_refs; ref; ref = ref->next)
1264 determine_lsm_ref (loop, exits, clobbered_vops, ref);
1267 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
1268 for a store motion optimization (i.e. whether we can insert statement
1269 on its exits). */
1271 static bool
1272 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
1273 VEC (edge, heap) *exits)
1275 unsigned i;
1276 edge ex;
1278 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1279 if (ex->flags & EDGE_ABNORMAL)
1280 return false;
1282 return true;
1285 /* A hash function for struct mem_ref object OBJ. */
1287 static hashval_t
1288 memref_hash (const void *obj)
1290 return ((const struct mem_ref *) obj)->hash;
1293 /* An equality function for struct mem_ref object OBJ1 with
1294 memory reference OBJ2. */
1296 static int
1297 memref_eq (const void *obj1, const void *obj2)
1299 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1301 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1304 /* Gathers memory references in statement STMT in LOOP, storing the
1305 information about them in MEM_REFS hash table. Note vops accessed through
1306 unrecognized statements in CLOBBERED_VOPS. The newly created references
1307 are also stored to MEM_REF_LIST. */
1309 static void
1310 gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
1311 bitmap clobbered_vops, tree stmt,
1312 struct mem_ref **mem_ref_list)
1314 tree *lhs, *rhs, *mem = NULL;
1315 hashval_t hash;
1316 PTR *slot;
1317 struct mem_ref *ref = NULL;
1318 ssa_op_iter oi;
1319 tree vname;
1320 bool is_stored;
1322 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
1323 return;
1325 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
1326 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
1327 goto fail;
1329 lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
1330 rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
1332 if (TREE_CODE (*lhs) == SSA_NAME)
1334 if (!is_gimple_addressable (*rhs))
1335 goto fail;
1337 mem = rhs;
1338 is_stored = false;
1340 else if (TREE_CODE (*rhs) == SSA_NAME
1341 || is_gimple_min_invariant (*rhs))
1343 mem = lhs;
1344 is_stored = true;
1346 else
1347 goto fail;
1349 /* If we cannot create an SSA name for the result, give up. */
1350 if (!is_gimple_reg_type (TREE_TYPE (*mem))
1351 || TREE_THIS_VOLATILE (*mem))
1352 goto fail;
1354 /* If we cannot move the reference out of the loop, fail. */
1355 if (!for_each_index (mem, may_move_till, loop))
1356 goto fail;
1358 hash = iterative_hash_expr (*mem, 0);
1359 slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
1361 if (*slot)
1362 ref = (struct mem_ref *) *slot;
1363 else
1365 ref = XNEW (struct mem_ref);
1366 ref->mem = *mem;
1367 ref->hash = hash;
1368 ref->locs = NULL;
1369 ref->is_stored = false;
1370 ref->vops = BITMAP_ALLOC (NULL);
1371 ref->next = *mem_ref_list;
1372 *mem_ref_list = ref;
1373 *slot = ref;
1375 ref->is_stored |= is_stored;
1377 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1378 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1379 record_mem_ref_loc (&ref->locs, stmt, mem);
1380 return;
1382 fail:
1383 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1384 bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
1387 /* Gathers memory references in LOOP. Notes vops accessed through unrecognized
1388 statements in CLOBBERED_VOPS. The list of the references found by
1389 the function is returned. */
1391 static struct mem_ref *
1392 gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
1394 basic_block *body = get_loop_body (loop);
1395 block_stmt_iterator bsi;
1396 unsigned i;
1397 struct mem_ref *mem_ref_list = NULL;
1398 htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
1400 for (i = 0; i < loop->num_nodes; i++)
1402 for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
1403 gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
1404 &mem_ref_list);
1407 free (body);
1409 htab_delete (mem_refs);
1410 return mem_ref_list;
1413 /* Finds the vops accessed by more than one of the memory references described
1414 in MEM_REFS and marks them in CLOBBERED_VOPS. */
1416 static void
1417 find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
1419 bitmap_head tmp, all_vops;
1420 struct mem_ref *ref;
1422 bitmap_initialize (&tmp, &bitmap_default_obstack);
1423 bitmap_initialize (&all_vops, &bitmap_default_obstack);
1425 for (ref = mem_refs; ref; ref = ref->next)
1427 /* The vops that are already in all_vops are accessed by more than
1428 one memory reference. */
1429 bitmap_and (&tmp, &all_vops, ref->vops);
1430 bitmap_ior_into (clobbered_vops, &tmp);
1431 bitmap_clear (&tmp);
1433 bitmap_ior_into (&all_vops, ref->vops);
1436 bitmap_clear (&all_vops);
1439 /* Releases the memory occupied by REF. */
1441 static void
1442 free_mem_ref (struct mem_ref *ref)
1444 free_mem_ref_locs (ref->locs);
1445 BITMAP_FREE (ref->vops);
1446 free (ref);
1449 /* Releases the memory occupied by REFS. */
1451 static void
1452 free_mem_refs (struct mem_ref *refs)
1454 struct mem_ref *ref, *next;
1456 for (ref = refs; ref; ref = next)
1458 next = ref->next;
1459 free_mem_ref (ref);
1463 /* Try to perform store motion for all memory references modified inside
1464 LOOP. */
1466 static void
1467 determine_lsm_loop (struct loop *loop)
1469 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
1470 bitmap clobbered_vops;
1471 struct mem_ref *mem_refs;
1473 if (!loop_suitable_for_sm (loop, exits))
1475 VEC_free (edge, heap, exits);
1476 return;
1479 /* Find the memory references in LOOP. */
1480 clobbered_vops = BITMAP_ALLOC (NULL);
1481 mem_refs = gather_mem_refs (loop, clobbered_vops);
1483 /* Find the vops that are used for more than one reference. */
1484 find_more_ref_vops (mem_refs, clobbered_vops);
1486 /* Hoist all suitable memory references. */
1487 hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
1489 free_mem_refs (mem_refs);
1490 VEC_free (edge, heap, exits);
1491 BITMAP_FREE (clobbered_vops);
1494 /* Try to perform store motion for all memory references modified inside
1495 loops. */
1497 static void
1498 determine_lsm (void)
1500 struct loop *loop;
1501 loop_iterator li;
1503 /* Pass the loops from the outermost and perform the store motion as
1504 suitable. */
1506 FOR_EACH_LOOP (li, loop, 0)
1508 determine_lsm_loop (loop);
1511 bsi_commit_edge_inserts ();
1514 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1515 for each such basic block bb records the outermost loop for that execution
1516 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1517 blocks that contain a nonpure call. */
1519 static void
1520 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1522 basic_block bb = NULL, *bbs, last = NULL;
1523 unsigned i;
1524 edge e;
1525 struct loop *inn_loop = loop;
1527 if (!loop->header->aux)
1529 bbs = get_loop_body_in_dom_order (loop);
1531 for (i = 0; i < loop->num_nodes; i++)
1533 edge_iterator ei;
1534 bb = bbs[i];
1536 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1537 last = bb;
1539 if (TEST_BIT (contains_call, bb->index))
1540 break;
1542 FOR_EACH_EDGE (e, ei, bb->succs)
1543 if (!flow_bb_inside_loop_p (loop, e->dest))
1544 break;
1545 if (e)
1546 break;
1548 /* A loop might be infinite (TODO use simple loop analysis
1549 to disprove this if possible). */
1550 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1551 break;
1553 if (!flow_bb_inside_loop_p (inn_loop, bb))
1554 break;
1556 if (bb->loop_father->header == bb)
1558 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1559 break;
1561 /* In a loop that is always entered we may proceed anyway.
1562 But record that we entered it and stop once we leave it. */
1563 inn_loop = bb->loop_father;
1567 while (1)
1569 last->aux = loop;
1570 if (last == loop->header)
1571 break;
1572 last = get_immediate_dominator (CDI_DOMINATORS, last);
1575 free (bbs);
1578 for (loop = loop->inner; loop; loop = loop->next)
1579 fill_always_executed_in (loop, contains_call);
1582 /* Compute the global information needed by the loop invariant motion pass. */
1584 static void
1585 tree_ssa_lim_initialize (void)
1587 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1588 block_stmt_iterator bsi;
1589 struct loop *loop;
1590 basic_block bb;
1592 sbitmap_zero (contains_call);
1593 FOR_EACH_BB (bb)
1595 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1597 if (nonpure_call_p (bsi_stmt (bsi)))
1598 break;
1601 if (!bsi_end_p (bsi))
1602 SET_BIT (contains_call, bb->index);
1605 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
1606 fill_always_executed_in (loop, contains_call);
1608 sbitmap_free (contains_call);
1611 /* Cleans up after the invariant motion pass. */
1613 static void
1614 tree_ssa_lim_finalize (void)
1616 basic_block bb;
1618 FOR_EACH_BB (bb)
1620 bb->aux = NULL;
1624 /* Moves invariants from loops. Only "expensive" invariants are moved out --
1625 i.e. those that are likely to be win regardless of the register pressure. */
1627 void
1628 tree_ssa_lim (void)
1630 tree_ssa_lim_initialize ();
1632 /* For each statement determine the outermost loop in that it is
1633 invariant and cost for computing the invariant. */
1634 determine_invariantness ();
1636 /* For each memory reference determine whether it is possible to hoist it
1637 out of the loop. Force the necessary invariants to be moved out of the
1638 loops as well. */
1639 determine_lsm ();
1641 /* Move the expressions that are expensive enough. */
1642 move_computations ();
1644 tree_ssa_lim_finalize ();