Daily bump.
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blobcad14452a16ac17f1f6e333a273342039ec1eee6
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "domwalk.h"
36 #include "params.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "hashtab.h"
42 /* TODO: Support for predicated code motion. I.e.
44 while (1)
46 if (cond)
48 a = inv;
49 something;
53 Where COND and INV are is invariants, but evaluating INV may trap or be
54 invalid from some other reason if !COND. This may be transformed to
56 if (cond)
57 a = inv;
58 while (1)
60 if (cond)
61 something;
62 } */
64 /* A type for the list of statements that have to be moved in order to be able
65 to hoist an invariant computation. */
67 struct depend
69 tree stmt;
70 struct depend *next;
73 /* The auxiliary data kept for each statement. */
75 struct lim_aux_data
77 struct loop *max_loop; /* The outermost loop in that the statement
78 is invariant. */
80 struct loop *tgt_loop; /* The loop out of that we want to move the
81 invariant. */
83 struct loop *always_executed_in;
84 /* The outermost loop for that we are sure
85 the statement is executed if the loop
86 is entered. */
88 bool sm_done; /* True iff the store motion for a memory
89 reference in the statement has already
90 been executed. */
92 unsigned cost; /* Cost of the computation performed by the
93 statement. */
95 struct depend *depends; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
99 MAX_LOOP loop. */
102 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
103 ? NULL \
104 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
106 /* Description of a memory reference location for store motion. */
108 struct mem_ref_loc
110 tree *ref; /* The reference itself. */
111 tree stmt; /* The statement in that it occurs. */
112 struct mem_ref_loc *next; /* Next use in the chain. */
115 /* Description of a memory reference for store motion. */
117 struct mem_ref
119 tree mem; /* The memory itself. */
120 hashval_t hash; /* Its hash value. */
121 bool is_stored; /* True if there is a store to the location
122 in the loop. */
123 struct mem_ref_loc *locs; /* The locations where it is found. */
124 bitmap vops; /* Vops corresponding to this memory
125 location. */
126 struct mem_ref *next; /* Next memory reference in the list.
127 Memory references are stored in a hash
128 table, but the hash function depends
129 on values of pointers. Thus we cannot use
130 htab_traverse, since then we would get
131 miscompares during bootstrap (although the
132 produced code would be correct). */
135 /* Minimum cost of an expensive expression. */
136 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
138 /* The outermost loop for that execution of the header guarantees that the
139 block will be executed. */
140 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
142 /* Calls CBCK for each index in memory reference ADDR_P. There are two
143 kinds situations handled; in each of these cases, the memory reference
144 and DATA are passed to the callback:
146 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
147 pass the pointer to the index to the callback.
149 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
150 pointer to addr to the callback.
152 If the callback returns false, the whole search stops and false is returned.
153 Otherwise the function returns true after traversing through the whole
154 reference *ADDR_P. */
156 bool
157 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
159 tree *nxt, *idx;
161 for (; ; addr_p = nxt)
163 switch (TREE_CODE (*addr_p))
165 case SSA_NAME:
166 return cbck (*addr_p, addr_p, data);
168 case MISALIGNED_INDIRECT_REF:
169 case ALIGN_INDIRECT_REF:
170 case INDIRECT_REF:
171 nxt = &TREE_OPERAND (*addr_p, 0);
172 return cbck (*addr_p, nxt, data);
174 case BIT_FIELD_REF:
175 case VIEW_CONVERT_EXPR:
176 case REALPART_EXPR:
177 case IMAGPART_EXPR:
178 nxt = &TREE_OPERAND (*addr_p, 0);
179 break;
181 case COMPONENT_REF:
182 /* If the component has varying offset, it behaves like index
183 as well. */
184 idx = &TREE_OPERAND (*addr_p, 2);
185 if (*idx
186 && !cbck (*addr_p, idx, data))
187 return false;
189 nxt = &TREE_OPERAND (*addr_p, 0);
190 break;
192 case ARRAY_REF:
193 case ARRAY_RANGE_REF:
194 nxt = &TREE_OPERAND (*addr_p, 0);
195 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
196 return false;
197 break;
199 case VAR_DECL:
200 case PARM_DECL:
201 case STRING_CST:
202 case RESULT_DECL:
203 case VECTOR_CST:
204 case COMPLEX_CST:
205 case INTEGER_CST:
206 case REAL_CST:
207 case FIXED_CST:
208 case CONSTRUCTOR:
209 return true;
211 case TARGET_MEM_REF:
212 idx = &TMR_BASE (*addr_p);
213 if (*idx
214 && !cbck (*addr_p, idx, data))
215 return false;
216 idx = &TMR_INDEX (*addr_p);
217 if (*idx
218 && !cbck (*addr_p, idx, data))
219 return false;
220 return true;
222 default:
223 gcc_unreachable ();
228 /* If it is possible to hoist the statement STMT unconditionally,
229 returns MOVE_POSSIBLE.
230 If it is possible to hoist the statement STMT, but we must avoid making
231 it executed if it would not be executed in the original program (e.g.
232 because it may trap), return MOVE_PRESERVE_EXECUTION.
233 Otherwise return MOVE_IMPOSSIBLE. */
235 enum move_pos
236 movement_possibility (tree stmt)
238 tree lhs, rhs;
240 if (flag_unswitch_loops
241 && TREE_CODE (stmt) == COND_EXPR)
243 /* If we perform unswitching, force the operands of the invariant
244 condition to be moved out of the loop. */
245 return MOVE_POSSIBLE;
248 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
249 return MOVE_IMPOSSIBLE;
251 if (stmt_ends_bb_p (stmt))
252 return MOVE_IMPOSSIBLE;
254 if (stmt_ann (stmt)->has_volatile_ops)
255 return MOVE_IMPOSSIBLE;
257 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
258 if (TREE_CODE (lhs) == SSA_NAME
259 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
260 return MOVE_IMPOSSIBLE;
262 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
264 if (TREE_SIDE_EFFECTS (rhs)
265 || tree_could_throw_p (rhs))
266 return MOVE_IMPOSSIBLE;
268 if (TREE_CODE (lhs) != SSA_NAME
269 || tree_could_trap_p (rhs))
270 return MOVE_PRESERVE_EXECUTION;
272 if (get_call_expr_in (stmt))
274 /* While pure or const call is guaranteed to have no side effects, we
275 cannot move it arbitrarily. Consider code like
277 char *s = something ();
279 while (1)
281 if (s)
282 t = strlen (s);
283 else
284 t = 0;
287 Here the strlen call cannot be moved out of the loop, even though
288 s is invariant. In addition to possibly creating a call with
289 invalid arguments, moving out a function call that is not executed
290 may cause performance regressions in case the call is costly and
291 not executed at all. */
292 return MOVE_PRESERVE_EXECUTION;
294 return MOVE_POSSIBLE;
297 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
298 loop to that we could move the expression using DEF if it did not have
299 other operands, i.e. the outermost loop enclosing LOOP in that the value
300 of DEF is invariant. */
302 static struct loop *
303 outermost_invariant_loop (tree def, struct loop *loop)
305 tree def_stmt;
306 basic_block def_bb;
307 struct loop *max_loop;
309 if (TREE_CODE (def) != SSA_NAME)
310 return superloop_at_depth (loop, 1);
312 def_stmt = SSA_NAME_DEF_STMT (def);
313 def_bb = bb_for_stmt (def_stmt);
314 if (!def_bb)
315 return superloop_at_depth (loop, 1);
317 max_loop = find_common_loop (loop, def_bb->loop_father);
319 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
320 max_loop = find_common_loop (max_loop,
321 loop_outer (LIM_DATA (def_stmt)->max_loop));
322 if (max_loop == loop)
323 return NULL;
324 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
326 return max_loop;
329 /* Returns the outermost superloop of LOOP in that the expression EXPR is
330 invariant. */
332 static struct loop *
333 outermost_invariant_loop_expr (tree expr, struct loop *loop)
335 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
336 unsigned i, nops;
337 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
339 if (TREE_CODE (expr) == SSA_NAME
340 || TREE_CODE (expr) == INTEGER_CST
341 || is_gimple_min_invariant (expr))
342 return outermost_invariant_loop (expr, loop);
344 if (codeclass != tcc_unary
345 && codeclass != tcc_binary
346 && codeclass != tcc_expression
347 && codeclass != tcc_vl_exp
348 && codeclass != tcc_comparison)
349 return NULL;
351 nops = TREE_OPERAND_LENGTH (expr);
352 for (i = 0; i < nops; i++)
354 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
355 if (!aloop)
356 return NULL;
358 if (flow_loop_nested_p (max_loop, aloop))
359 max_loop = aloop;
362 return max_loop;
365 /* DATA is a structure containing information associated with a statement
366 inside LOOP. DEF is one of the operands of this statement.
368 Find the outermost loop enclosing LOOP in that value of DEF is invariant
369 and record this in DATA->max_loop field. If DEF itself is defined inside
370 this loop as well (i.e. we need to hoist it out of the loop if we want
371 to hoist the statement represented by DATA), record the statement in that
372 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
373 add the cost of the computation of DEF to the DATA->cost.
375 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
377 static bool
378 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
379 bool add_cost)
381 tree def_stmt = SSA_NAME_DEF_STMT (def);
382 basic_block def_bb = bb_for_stmt (def_stmt);
383 struct loop *max_loop;
384 struct depend *dep;
386 if (!def_bb)
387 return true;
389 max_loop = outermost_invariant_loop (def, loop);
390 if (!max_loop)
391 return false;
393 if (flow_loop_nested_p (data->max_loop, max_loop))
394 data->max_loop = max_loop;
396 if (!LIM_DATA (def_stmt))
397 return true;
399 if (add_cost
400 /* Only add the cost if the statement defining DEF is inside LOOP,
401 i.e. if it is likely that by moving the invariants dependent
402 on it, we will be able to avoid creating a new register for
403 it (since it will be only used in these dependent invariants). */
404 && def_bb->loop_father == loop)
405 data->cost += LIM_DATA (def_stmt)->cost;
407 dep = XNEW (struct depend);
408 dep->stmt = def_stmt;
409 dep->next = data->depends;
410 data->depends = dep;
412 return true;
415 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
416 are just ad-hoc constants. The estimates should be based on target-specific
417 values. */
419 static unsigned
420 stmt_cost (tree stmt)
422 tree rhs;
423 unsigned cost = 1;
425 /* Always try to create possibilities for unswitching. */
426 if (TREE_CODE (stmt) == COND_EXPR)
427 return LIM_EXPENSIVE;
429 rhs = GENERIC_TREE_OPERAND (stmt, 1);
431 /* Hoisting memory references out should almost surely be a win. */
432 if (stmt_references_memory_p (stmt))
433 cost += 20;
435 switch (TREE_CODE (rhs))
437 case CALL_EXPR:
438 /* We should be hoisting calls if possible. */
440 /* Unless the call is a builtin_constant_p; this always folds to a
441 constant, so moving it is useless. */
442 rhs = get_callee_fndecl (rhs);
443 if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
444 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
445 return 0;
447 cost += 20;
448 break;
450 case MULT_EXPR:
451 case TRUNC_DIV_EXPR:
452 case CEIL_DIV_EXPR:
453 case FLOOR_DIV_EXPR:
454 case ROUND_DIV_EXPR:
455 case EXACT_DIV_EXPR:
456 case CEIL_MOD_EXPR:
457 case FLOOR_MOD_EXPR:
458 case ROUND_MOD_EXPR:
459 case TRUNC_MOD_EXPR:
460 case RDIV_EXPR:
461 /* Division and multiplication are usually expensive. */
462 cost += 20;
463 break;
465 case LSHIFT_EXPR:
466 case RSHIFT_EXPR:
467 cost += 20;
468 break;
470 default:
471 break;
474 return cost;
477 /* Determine the outermost loop to that it is possible to hoist a statement
478 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
479 the outermost loop in that the value computed by STMT is invariant.
480 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
481 we preserve the fact whether STMT is executed. It also fills other related
482 information to LIM_DATA (STMT).
484 The function returns false if STMT cannot be hoisted outside of the loop it
485 is defined in, and true otherwise. */
487 static bool
488 determine_max_movement (tree stmt, bool must_preserve_exec)
490 basic_block bb = bb_for_stmt (stmt);
491 struct loop *loop = bb->loop_father;
492 struct loop *level;
493 struct lim_aux_data *lim_data = LIM_DATA (stmt);
494 tree val;
495 ssa_op_iter iter;
497 if (must_preserve_exec)
498 level = ALWAYS_EXECUTED_IN (bb);
499 else
500 level = superloop_at_depth (loop, 1);
501 lim_data->max_loop = level;
503 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
504 if (!add_dependency (val, lim_data, loop, true))
505 return false;
507 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
508 if (!add_dependency (val, lim_data, loop, false))
509 return false;
511 lim_data->cost += stmt_cost (stmt);
513 return true;
516 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
517 and that one of the operands of this statement is computed by STMT.
518 Ensure that STMT (together with all the statements that define its
519 operands) is hoisted at least out of the loop LEVEL. */
521 static void
522 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
524 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
525 struct depend *dep;
527 stmt_loop = find_common_loop (orig_loop, stmt_loop);
528 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
529 stmt_loop = find_common_loop (stmt_loop,
530 loop_outer (LIM_DATA (stmt)->tgt_loop));
531 if (flow_loop_nested_p (stmt_loop, level))
532 return;
534 gcc_assert (LIM_DATA (stmt));
535 gcc_assert (level == LIM_DATA (stmt)->max_loop
536 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
538 LIM_DATA (stmt)->tgt_loop = level;
539 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
540 set_level (dep->stmt, orig_loop, level);
543 /* Determines an outermost loop from that we want to hoist the statement STMT.
544 For now we chose the outermost possible loop. TODO -- use profiling
545 information to set it more sanely. */
547 static void
548 set_profitable_level (tree stmt)
550 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
553 /* Returns true if STMT is not a pure call. */
555 static bool
556 nonpure_call_p (tree stmt)
558 tree call = get_call_expr_in (stmt);
560 if (!call)
561 return false;
563 return TREE_SIDE_EFFECTS (call) != 0;
566 /* Releases the memory occupied by DATA. */
568 static void
569 free_lim_aux_data (struct lim_aux_data *data)
571 struct depend *dep, *next;
573 for (dep = data->depends; dep; dep = next)
575 next = dep->next;
576 free (dep);
578 free (data);
581 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
583 static tree
584 rewrite_reciprocal (block_stmt_iterator *bsi)
586 tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
588 stmt = bsi_stmt (*bsi);
589 lhs = GENERIC_TREE_OPERAND (stmt, 0);
590 rhs = GENERIC_TREE_OPERAND (stmt, 1);
592 /* stmt must be GIMPLE_MODIFY_STMT. */
593 var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
594 add_referenced_var (var);
596 tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
597 build_real (TREE_TYPE (rhs), dconst1),
598 TREE_OPERAND (rhs, 1));
599 stmt1 = build_gimple_modify_stmt (var, tmp);
600 name = make_ssa_name (var, stmt1);
601 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
602 tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
603 name, TREE_OPERAND (rhs, 0));
604 stmt2 = build_gimple_modify_stmt (lhs, tmp);
606 /* Replace division stmt with reciprocal and multiply stmts.
607 The multiply stmt is not invariant, so update iterator
608 and avoid rescanning. */
609 bsi_replace (bsi, stmt1, true);
610 bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
611 SSA_NAME_DEF_STMT (lhs) = stmt2;
613 /* Continue processing with invariant reciprocal statement. */
614 return stmt1;
617 /* Check if the pattern at *BSI is a bittest of the form
618 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
620 static tree
621 rewrite_bittest (block_stmt_iterator *bsi)
623 tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
624 use_operand_p use;
626 stmt = bsi_stmt (*bsi);
627 lhs = GENERIC_TREE_OPERAND (stmt, 0);
628 rhs = GENERIC_TREE_OPERAND (stmt, 1);
630 /* Verify that the single use of lhs is a comparison against zero. */
631 if (TREE_CODE (lhs) != SSA_NAME
632 || !single_imm_use (lhs, &use, &use_stmt)
633 || TREE_CODE (use_stmt) != COND_EXPR)
634 return stmt;
635 t = COND_EXPR_COND (use_stmt);
636 if (TREE_OPERAND (t, 0) != lhs
637 || (TREE_CODE (t) != NE_EXPR
638 && TREE_CODE (t) != EQ_EXPR)
639 || !integer_zerop (TREE_OPERAND (t, 1)))
640 return stmt;
642 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
643 stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
644 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
645 return stmt;
647 /* There is a conversion in between possibly inserted by fold. */
648 t = GIMPLE_STMT_OPERAND (stmt1, 1);
649 if (TREE_CODE (t) == NOP_EXPR
650 || TREE_CODE (t) == CONVERT_EXPR)
652 t = TREE_OPERAND (t, 0);
653 if (TREE_CODE (t) != SSA_NAME
654 || !has_single_use (t))
655 return stmt;
656 stmt1 = SSA_NAME_DEF_STMT (t);
657 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
658 return stmt;
659 t = GIMPLE_STMT_OPERAND (stmt1, 1);
662 /* Verify that B is loop invariant but A is not. Verify that with
663 all the stmt walking we are still in the same loop. */
664 if (TREE_CODE (t) == RSHIFT_EXPR
665 && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
666 && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
667 loop_containing_stmt (stmt1)) != NULL
668 && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
669 loop_containing_stmt (stmt1)) == NULL)
671 tree a = TREE_OPERAND (t, 0);
672 tree b = TREE_OPERAND (t, 1);
674 /* 1 << B */
675 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
676 add_referenced_var (var);
677 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
678 build_int_cst (TREE_TYPE (a), 1), b);
679 stmt1 = build_gimple_modify_stmt (var, t);
680 name = make_ssa_name (var, stmt1);
681 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
683 /* A & (1 << B) */
684 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
685 stmt2 = build_gimple_modify_stmt (var, t);
686 name = make_ssa_name (var, stmt2);
687 GIMPLE_STMT_OPERAND (stmt2, 0) = name;
689 /* Replace the SSA_NAME we compare against zero. Adjust
690 the type of zero accordingly. */
691 SET_USE (use, name);
692 TREE_OPERAND (COND_EXPR_COND (use_stmt), 1)
693 = build_int_cst_type (TREE_TYPE (name), 0);
695 bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
696 bsi_replace (bsi, stmt2, true);
698 return stmt1;
701 return stmt;
705 /* Determine the outermost loops in that statements in basic block BB are
706 invariant, and record them to the LIM_DATA associated with the statements.
707 Callback for walk_dominator_tree. */
709 static void
710 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
711 basic_block bb)
713 enum move_pos pos;
714 block_stmt_iterator bsi;
715 tree stmt, rhs;
716 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
717 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
719 if (!loop_outer (bb->loop_father))
720 return;
722 if (dump_file && (dump_flags & TDF_DETAILS))
723 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
724 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
726 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
728 stmt = bsi_stmt (bsi);
730 pos = movement_possibility (stmt);
731 if (pos == MOVE_IMPOSSIBLE)
733 if (nonpure_call_p (stmt))
735 maybe_never = true;
736 outermost = NULL;
738 continue;
741 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
743 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
745 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
746 to be hoisted out of loop, saving expensive divide. */
747 if (pos == MOVE_POSSIBLE
748 && TREE_CODE (rhs) == RDIV_EXPR
749 && flag_unsafe_math_optimizations
750 && !flag_trapping_math
751 && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
752 loop_containing_stmt (stmt)) != NULL
753 && outermost_invariant_loop_expr (rhs,
754 loop_containing_stmt (stmt)) == NULL)
755 stmt = rewrite_reciprocal (&bsi);
757 /* If the shift count is invariant, convert (A >> B) & 1 to
758 A & (1 << B) allowing the bit mask to be hoisted out of the loop
759 saving an expensive shift. */
760 if (pos == MOVE_POSSIBLE
761 && TREE_CODE (rhs) == BIT_AND_EXPR
762 && integer_onep (TREE_OPERAND (rhs, 1))
763 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
764 && has_single_use (TREE_OPERAND (rhs, 0)))
765 stmt = rewrite_bittest (&bsi);
768 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
769 LIM_DATA (stmt)->always_executed_in = outermost;
771 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
772 continue;
774 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
776 LIM_DATA (stmt)->max_loop = NULL;
777 continue;
780 if (dump_file && (dump_flags & TDF_DETAILS))
782 print_generic_stmt_indented (dump_file, stmt, 0, 2);
783 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
784 loop_depth (LIM_DATA (stmt)->max_loop),
785 LIM_DATA (stmt)->cost);
788 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
789 set_profitable_level (stmt);
793 /* For each statement determines the outermost loop in that it is invariant,
794 statements on whose motion it depends and the cost of the computation.
795 This information is stored to the LIM_DATA structure associated with
796 each statement. */
798 static void
799 determine_invariantness (void)
801 struct dom_walk_data walk_data;
803 memset (&walk_data, 0, sizeof (struct dom_walk_data));
804 walk_data.dom_direction = CDI_DOMINATORS;
805 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
807 init_walk_dominator_tree (&walk_data);
808 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
809 fini_walk_dominator_tree (&walk_data);
812 /* Hoist the statements in basic block BB out of the loops prescribed by
813 data stored in LIM_DATA structures associated with each statement. Callback
814 for walk_dominator_tree. */
816 static void
817 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
818 basic_block bb)
820 struct loop *level;
821 block_stmt_iterator bsi;
822 tree stmt;
823 unsigned cost = 0;
825 if (!loop_outer (bb->loop_father))
826 return;
828 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
830 stmt = bsi_stmt (bsi);
832 if (!LIM_DATA (stmt))
834 bsi_next (&bsi);
835 continue;
838 cost = LIM_DATA (stmt)->cost;
839 level = LIM_DATA (stmt)->tgt_loop;
840 free_lim_aux_data (LIM_DATA (stmt));
841 stmt_ann (stmt)->common.aux = NULL;
843 if (!level)
845 bsi_next (&bsi);
846 continue;
849 /* We do not really want to move conditionals out of the loop; we just
850 placed it here to force its operands to be moved if necessary. */
851 if (TREE_CODE (stmt) == COND_EXPR)
852 continue;
854 if (dump_file && (dump_flags & TDF_DETAILS))
856 fprintf (dump_file, "Moving statement\n");
857 print_generic_stmt (dump_file, stmt, 0);
858 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
859 cost, level->num);
861 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
862 bsi_remove (&bsi, false);
866 /* Hoist the statements out of the loops prescribed by data stored in
867 LIM_DATA structures associated with each statement.*/
869 static void
870 move_computations (void)
872 struct dom_walk_data walk_data;
874 memset (&walk_data, 0, sizeof (struct dom_walk_data));
875 walk_data.dom_direction = CDI_DOMINATORS;
876 walk_data.before_dom_children_before_stmts = move_computations_stmt;
878 init_walk_dominator_tree (&walk_data);
879 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
880 fini_walk_dominator_tree (&walk_data);
882 bsi_commit_edge_inserts ();
883 if (need_ssa_update_p ())
884 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
887 /* Checks whether the statement defining variable *INDEX can be hoisted
888 out of the loop passed in DATA. Callback for for_each_index. */
890 static bool
891 may_move_till (tree ref, tree *index, void *data)
893 struct loop *loop = (struct loop*) data, *max_loop;
895 /* If REF is an array reference, check also that the step and the lower
896 bound is invariant in LOOP. */
897 if (TREE_CODE (ref) == ARRAY_REF)
899 tree step = array_ref_element_size (ref);
900 tree lbound = array_ref_low_bound (ref);
902 max_loop = outermost_invariant_loop_expr (step, loop);
903 if (!max_loop)
904 return false;
906 max_loop = outermost_invariant_loop_expr (lbound, loop);
907 if (!max_loop)
908 return false;
911 max_loop = outermost_invariant_loop (*index, loop);
912 if (!max_loop)
913 return false;
915 return true;
918 /* Forces statements defining (invariant) SSA names in expression EXPR to be
919 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
921 static void
922 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
924 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
925 unsigned i, nops;
927 if (TREE_CODE (expr) == SSA_NAME)
929 tree stmt = SSA_NAME_DEF_STMT (expr);
930 if (IS_EMPTY_STMT (stmt))
931 return;
933 set_level (stmt, orig_loop, loop);
934 return;
937 if (codeclass != tcc_unary
938 && codeclass != tcc_binary
939 && codeclass != tcc_expression
940 && codeclass != tcc_vl_exp
941 && codeclass != tcc_comparison)
942 return;
944 nops = TREE_OPERAND_LENGTH (expr);
945 for (i = 0; i < nops; i++)
946 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
949 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
950 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
951 for_each_index. */
953 struct fmt_data
955 struct loop *loop;
956 struct loop *orig_loop;
959 static bool
960 force_move_till (tree ref, tree *index, void *data)
962 tree stmt;
963 struct fmt_data *fmt_data = (struct fmt_data *) data;
965 if (TREE_CODE (ref) == ARRAY_REF)
967 tree step = array_ref_element_size (ref);
968 tree lbound = array_ref_low_bound (ref);
970 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
971 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
974 if (TREE_CODE (*index) != SSA_NAME)
975 return true;
977 stmt = SSA_NAME_DEF_STMT (*index);
978 if (IS_EMPTY_STMT (stmt))
979 return true;
981 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
983 return true;
986 /* Records memory reference location *REF to the list MEM_REFS. The reference
987 occurs in statement STMT. */
989 static void
990 record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
992 struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
994 aref->stmt = stmt;
995 aref->ref = ref;
997 aref->next = *mem_refs;
998 *mem_refs = aref;
1001 /* Releases list of memory reference locations MEM_REFS. */
1003 static void
1004 free_mem_ref_locs (struct mem_ref_loc *mem_refs)
1006 struct mem_ref_loc *act;
1008 while (mem_refs)
1010 act = mem_refs;
1011 mem_refs = mem_refs->next;
1012 free (act);
1016 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
1018 static void
1019 rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
1021 tree var;
1022 ssa_op_iter iter;
1024 for (; mem_refs; mem_refs = mem_refs->next)
1026 FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
1027 mark_sym_for_renaming (SSA_NAME_VAR (var));
1029 *mem_refs->ref = tmp_var;
1030 update_stmt (mem_refs->stmt);
1034 /* The name and the length of the currently generated variable
1035 for lsm. */
1036 #define MAX_LSM_NAME_LENGTH 40
1037 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1038 static int lsm_tmp_name_length;
1040 /* Adds S to lsm_tmp_name. */
1042 static void
1043 lsm_tmp_name_add (const char *s)
1045 int l = strlen (s) + lsm_tmp_name_length;
1046 if (l > MAX_LSM_NAME_LENGTH)
1047 return;
1049 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1050 lsm_tmp_name_length = l;
1053 /* Stores the name for temporary variable that replaces REF to
1054 lsm_tmp_name. */
1056 static void
1057 gen_lsm_tmp_name (tree ref)
1059 const char *name;
1061 switch (TREE_CODE (ref))
1063 case MISALIGNED_INDIRECT_REF:
1064 case ALIGN_INDIRECT_REF:
1065 case INDIRECT_REF:
1066 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1067 lsm_tmp_name_add ("_");
1068 break;
1070 case BIT_FIELD_REF:
1071 case VIEW_CONVERT_EXPR:
1072 case ARRAY_RANGE_REF:
1073 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1074 break;
1076 case REALPART_EXPR:
1077 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1078 lsm_tmp_name_add ("_RE");
1079 break;
1081 case IMAGPART_EXPR:
1082 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1083 lsm_tmp_name_add ("_IM");
1084 break;
1086 case COMPONENT_REF:
1087 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1088 lsm_tmp_name_add ("_");
1089 name = get_name (TREE_OPERAND (ref, 1));
1090 if (!name)
1091 name = "F";
1092 lsm_tmp_name_add ("_");
1093 lsm_tmp_name_add (name);
1095 case ARRAY_REF:
1096 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1097 lsm_tmp_name_add ("_I");
1098 break;
1100 case SSA_NAME:
1101 ref = SSA_NAME_VAR (ref);
1102 /* Fallthru. */
1104 case VAR_DECL:
1105 case PARM_DECL:
1106 name = get_name (ref);
1107 if (!name)
1108 name = "D";
1109 lsm_tmp_name_add (name);
1110 break;
1112 case STRING_CST:
1113 lsm_tmp_name_add ("S");
1114 break;
1116 case RESULT_DECL:
1117 lsm_tmp_name_add ("R");
1118 break;
1120 default:
1121 gcc_unreachable ();
1125 /* Determines name for temporary variable that replaces REF.
1126 The name is accumulated into the lsm_tmp_name variable.
1127 N is added to the name of the temporary. */
1129 char *
1130 get_lsm_tmp_name (tree ref, unsigned n)
1132 char ns[2];
1134 lsm_tmp_name_length = 0;
1135 gen_lsm_tmp_name (ref);
1136 lsm_tmp_name_add ("_lsm");
1137 if (n < 10)
1139 ns[0] = '0' + n;
1140 ns[1] = 0;
1141 lsm_tmp_name_add (ns);
1143 return lsm_tmp_name;
1146 /* Records request for store motion of memory reference REF from LOOP.
1147 MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1148 these references are rewritten by a new temporary variable.
1149 Exits from the LOOP are stored in EXITS. The initialization of the
1150 temporary variable is put to the preheader of the loop, and assignments
1151 to the reference from the temporary variable are emitted to exits. */
1153 static void
1154 schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
1155 struct mem_ref_loc *mem_refs)
1157 struct mem_ref_loc *aref;
1158 tree tmp_var;
1159 unsigned i;
1160 tree load, store;
1161 struct fmt_data fmt_data;
1162 edge ex;
1164 if (dump_file && (dump_flags & TDF_DETAILS))
1166 fprintf (dump_file, "Executing store motion of ");
1167 print_generic_expr (dump_file, ref, 0);
1168 fprintf (dump_file, " from loop %d\n", loop->num);
1171 tmp_var = make_rename_temp (TREE_TYPE (ref),
1172 get_lsm_tmp_name (ref, ~0));
1174 fmt_data.loop = loop;
1175 fmt_data.orig_loop = loop;
1176 for_each_index (&ref, force_move_till, &fmt_data);
1178 rewrite_mem_refs (tmp_var, mem_refs);
1179 for (aref = mem_refs; aref; aref = aref->next)
1180 if (LIM_DATA (aref->stmt))
1181 LIM_DATA (aref->stmt)->sm_done = true;
1183 /* Emit the load & stores. */
1184 load = build_gimple_modify_stmt (tmp_var, ref);
1185 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1186 LIM_DATA (load)->max_loop = loop;
1187 LIM_DATA (load)->tgt_loop = loop;
1189 /* Put this into the latch, so that we are sure it will be processed after
1190 all dependencies. */
1191 bsi_insert_on_edge (loop_latch_edge (loop), load);
1193 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1195 store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
1196 bsi_insert_on_edge (ex, store);
1200 /* Check whether memory reference REF can be hoisted out of the LOOP. If this
1201 is true, prepare the statements that load the value of the memory reference
1202 to a temporary variable in the loop preheader, store it back on the loop
1203 exits, and replace all the references inside LOOP by this temporary variable.
1204 EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
1205 operands that are clobbered by a call or accessed through multiple references
1206 in loop. */
1208 static void
1209 determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
1210 bitmap clobbered_vops, struct mem_ref *ref)
1212 struct mem_ref_loc *aref;
1213 struct loop *must_exec;
1215 /* In case the memory is not stored to, there is nothing for SM to do. */
1216 if (!ref->is_stored)
1217 return;
1219 /* If the reference is aliased with any different ref, or killed by call
1220 in function, then fail. */
1221 if (bitmap_intersect_p (ref->vops, clobbered_vops))
1222 return;
1224 if (tree_could_trap_p (ref->mem))
1226 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1227 of the statements in that it occurs is always executed when the loop
1228 is entered. This way we know that by moving the load from the
1229 reference out of the loop we will not cause the error that would not
1230 occur otherwise.
1232 TODO -- in fact we would like to check for anticipability of the
1233 reference, i.e. that on each path from loop entry to loop exit at
1234 least one of the statements containing the memory reference is
1235 executed. */
1237 for (aref = ref->locs; aref; aref = aref->next)
1239 if (!LIM_DATA (aref->stmt))
1240 continue;
1242 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1243 if (!must_exec)
1244 continue;
1246 if (must_exec == loop
1247 || flow_loop_nested_p (must_exec, loop))
1248 break;
1251 if (!aref)
1252 return;
1255 schedule_sm (loop, exits, ref->mem, ref->locs);
1258 /* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
1259 of vops clobbered by call in loop or accessed by multiple memory references.
1260 EXITS is the list of exit edges of the LOOP. */
1262 static void
1263 hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
1264 bitmap clobbered_vops, VEC (edge, heap) *exits)
1266 struct mem_ref *ref;
1268 for (ref = mem_refs; ref; ref = ref->next)
1269 determine_lsm_ref (loop, exits, clobbered_vops, ref);
1272 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
1273 for a store motion optimization (i.e. whether we can insert statement
1274 on its exits). */
1276 static bool
1277 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
1278 VEC (edge, heap) *exits)
1280 unsigned i;
1281 edge ex;
1283 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1284 if (ex->flags & EDGE_ABNORMAL)
1285 return false;
1287 return true;
1290 /* A hash function for struct mem_ref object OBJ. */
1292 static hashval_t
1293 memref_hash (const void *obj)
1295 return ((const struct mem_ref *) obj)->hash;
1298 /* An equality function for struct mem_ref object OBJ1 with
1299 memory reference OBJ2. */
1301 static int
1302 memref_eq (const void *obj1, const void *obj2)
1304 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1306 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1309 /* Gathers memory references in statement STMT in LOOP, storing the
1310 information about them in MEM_REFS hash table. Note vops accessed through
1311 unrecognized statements in CLOBBERED_VOPS. The newly created references
1312 are also stored to MEM_REF_LIST. */
1314 static void
1315 gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
1316 bitmap clobbered_vops, tree stmt,
1317 struct mem_ref **mem_ref_list)
1319 tree *lhs, *rhs, *mem = NULL;
1320 hashval_t hash;
1321 PTR *slot;
1322 struct mem_ref *ref = NULL;
1323 ssa_op_iter oi;
1324 tree vname;
1325 bool is_stored;
1327 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
1328 return;
1330 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
1331 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
1332 goto fail;
1334 lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
1335 rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
1337 if (TREE_CODE (*lhs) == SSA_NAME)
1339 if (!is_gimple_addressable (*rhs))
1340 goto fail;
1342 mem = rhs;
1343 is_stored = false;
1345 else if (TREE_CODE (*rhs) == SSA_NAME
1346 || is_gimple_min_invariant (*rhs))
1348 mem = lhs;
1349 is_stored = true;
1351 else
1352 goto fail;
1354 /* If we cannot create an SSA name for the result, give up. */
1355 if (!is_gimple_reg_type (TREE_TYPE (*mem))
1356 || TREE_THIS_VOLATILE (*mem))
1357 goto fail;
1359 /* If we cannot move the reference out of the loop, fail. */
1360 if (!for_each_index (mem, may_move_till, loop))
1361 goto fail;
1363 hash = iterative_hash_expr (*mem, 0);
1364 slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
1366 if (*slot)
1367 ref = (struct mem_ref *) *slot;
1368 else
1370 ref = XNEW (struct mem_ref);
1371 ref->mem = *mem;
1372 ref->hash = hash;
1373 ref->locs = NULL;
1374 ref->is_stored = false;
1375 ref->vops = BITMAP_ALLOC (NULL);
1376 ref->next = *mem_ref_list;
1377 *mem_ref_list = ref;
1378 *slot = ref;
1380 ref->is_stored |= is_stored;
1382 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1383 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1384 record_mem_ref_loc (&ref->locs, stmt, mem);
1385 return;
1387 fail:
1388 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1389 bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
1392 /* Gathers memory references in LOOP. Notes vops accessed through unrecognized
1393 statements in CLOBBERED_VOPS. The list of the references found by
1394 the function is returned. */
1396 static struct mem_ref *
1397 gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
1399 basic_block *body = get_loop_body (loop);
1400 block_stmt_iterator bsi;
1401 unsigned i;
1402 struct mem_ref *mem_ref_list = NULL;
1403 htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
1405 for (i = 0; i < loop->num_nodes; i++)
1407 for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
1408 gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
1409 &mem_ref_list);
1412 free (body);
1414 htab_delete (mem_refs);
1415 return mem_ref_list;
1418 /* Finds the vops accessed by more than one of the memory references described
1419 in MEM_REFS and marks them in CLOBBERED_VOPS. */
1421 static void
1422 find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
1424 bitmap_head tmp, all_vops;
1425 struct mem_ref *ref;
1427 bitmap_initialize (&tmp, &bitmap_default_obstack);
1428 bitmap_initialize (&all_vops, &bitmap_default_obstack);
1430 for (ref = mem_refs; ref; ref = ref->next)
1432 /* The vops that are already in all_vops are accessed by more than
1433 one memory reference. */
1434 bitmap_and (&tmp, &all_vops, ref->vops);
1435 bitmap_ior_into (clobbered_vops, &tmp);
1436 bitmap_clear (&tmp);
1438 bitmap_ior_into (&all_vops, ref->vops);
1441 bitmap_clear (&all_vops);
1444 /* Releases the memory occupied by REF. */
1446 static void
1447 free_mem_ref (struct mem_ref *ref)
1449 free_mem_ref_locs (ref->locs);
1450 BITMAP_FREE (ref->vops);
1451 free (ref);
1454 /* Releases the memory occupied by REFS. */
1456 static void
1457 free_mem_refs (struct mem_ref *refs)
1459 struct mem_ref *ref, *next;
1461 for (ref = refs; ref; ref = next)
1463 next = ref->next;
1464 free_mem_ref (ref);
1468 /* Try to perform store motion for all memory references modified inside
1469 LOOP. */
1471 static void
1472 determine_lsm_loop (struct loop *loop)
1474 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
1475 bitmap clobbered_vops;
1476 struct mem_ref *mem_refs;
1478 if (!loop_suitable_for_sm (loop, exits))
1480 VEC_free (edge, heap, exits);
1481 return;
1484 /* Find the memory references in LOOP. */
1485 clobbered_vops = BITMAP_ALLOC (NULL);
1486 mem_refs = gather_mem_refs (loop, clobbered_vops);
1488 /* Find the vops that are used for more than one reference. */
1489 find_more_ref_vops (mem_refs, clobbered_vops);
1491 /* Hoist all suitable memory references. */
1492 hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
1494 free_mem_refs (mem_refs);
1495 VEC_free (edge, heap, exits);
1496 BITMAP_FREE (clobbered_vops);
1499 /* Try to perform store motion for all memory references modified inside
1500 loops. */
1502 static void
1503 determine_lsm (void)
1505 struct loop *loop;
1506 loop_iterator li;
1508 /* Pass the loops from the outermost and perform the store motion as
1509 suitable. */
1511 FOR_EACH_LOOP (li, loop, 0)
1513 determine_lsm_loop (loop);
1516 bsi_commit_edge_inserts ();
1519 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1520 for each such basic block bb records the outermost loop for that execution
1521 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1522 blocks that contain a nonpure call. */
1524 static void
1525 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1527 basic_block bb = NULL, *bbs, last = NULL;
1528 unsigned i;
1529 edge e;
1530 struct loop *inn_loop = loop;
1532 if (!loop->header->aux)
1534 bbs = get_loop_body_in_dom_order (loop);
1536 for (i = 0; i < loop->num_nodes; i++)
1538 edge_iterator ei;
1539 bb = bbs[i];
1541 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1542 last = bb;
1544 if (TEST_BIT (contains_call, bb->index))
1545 break;
1547 FOR_EACH_EDGE (e, ei, bb->succs)
1548 if (!flow_bb_inside_loop_p (loop, e->dest))
1549 break;
1550 if (e)
1551 break;
1553 /* A loop might be infinite (TODO use simple loop analysis
1554 to disprove this if possible). */
1555 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1556 break;
1558 if (!flow_bb_inside_loop_p (inn_loop, bb))
1559 break;
1561 if (bb->loop_father->header == bb)
1563 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1564 break;
1566 /* In a loop that is always entered we may proceed anyway.
1567 But record that we entered it and stop once we leave it. */
1568 inn_loop = bb->loop_father;
1572 while (1)
1574 last->aux = loop;
1575 if (last == loop->header)
1576 break;
1577 last = get_immediate_dominator (CDI_DOMINATORS, last);
1580 free (bbs);
1583 for (loop = loop->inner; loop; loop = loop->next)
1584 fill_always_executed_in (loop, contains_call);
1587 /* Compute the global information needed by the loop invariant motion pass. */
1589 static void
1590 tree_ssa_lim_initialize (void)
1592 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1593 block_stmt_iterator bsi;
1594 struct loop *loop;
1595 basic_block bb;
1597 sbitmap_zero (contains_call);
1598 FOR_EACH_BB (bb)
1600 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1602 if (nonpure_call_p (bsi_stmt (bsi)))
1603 break;
1606 if (!bsi_end_p (bsi))
1607 SET_BIT (contains_call, bb->index);
1610 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
1611 fill_always_executed_in (loop, contains_call);
1613 sbitmap_free (contains_call);
1616 /* Cleans up after the invariant motion pass. */
1618 static void
1619 tree_ssa_lim_finalize (void)
1621 basic_block bb;
1623 FOR_EACH_BB (bb)
1625 bb->aux = NULL;
1629 /* Moves invariants from loops. Only "expensive" invariants are moved out --
1630 i.e. those that are likely to be win regardless of the register pressure. */
1632 void
1633 tree_ssa_lim (void)
1635 tree_ssa_lim_initialize ();
1637 /* For each statement determine the outermost loop in that it is
1638 invariant and cost for computing the invariant. */
1639 determine_invariantness ();
1641 /* For each memory reference determine whether it is possible to hoist it
1642 out of the loop. Force the necessary invariants to be moved out of the
1643 loops as well. */
1644 determine_lsm ();
1646 /* Move the expressions that are expensive enough. */
1647 move_computations ();
1649 tree_ssa_lim_finalize ();