2007-10-02 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blobd2ab41ce0543eac3e9cb059d2b750b1a1b2532ab
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "tree.h"
25 #include "rtl.h"
26 #include "tm_p.h"
27 #include "hard-reg-set.h"
28 #include "basic-block.h"
29 #include "output.h"
30 #include "diagnostic.h"
31 #include "tree-flow.h"
32 #include "tree-dump.h"
33 #include "timevar.h"
34 #include "cfgloop.h"
35 #include "domwalk.h"
36 #include "params.h"
37 #include "tree-pass.h"
38 #include "flags.h"
39 #include "real.h"
40 #include "hashtab.h"
42 /* TODO: Support for predicated code motion. I.e.
44 while (1)
46 if (cond)
48 a = inv;
49 something;
53 Where COND and INV are is invariants, but evaluating INV may trap or be
54 invalid from some other reason if !COND. This may be transformed to
56 if (cond)
57 a = inv;
58 while (1)
60 if (cond)
61 something;
62 } */
64 /* A type for the list of statements that have to be moved in order to be able
65 to hoist an invariant computation. */
67 struct depend
69 tree stmt;
70 struct depend *next;
73 /* The auxiliary data kept for each statement. */
75 struct lim_aux_data
77 struct loop *max_loop; /* The outermost loop in that the statement
78 is invariant. */
80 struct loop *tgt_loop; /* The loop out of that we want to move the
81 invariant. */
83 struct loop *always_executed_in;
84 /* The outermost loop for that we are sure
85 the statement is executed if the loop
86 is entered. */
88 bool sm_done; /* True iff the store motion for a memory
89 reference in the statement has already
90 been executed. */
92 unsigned cost; /* Cost of the computation performed by the
93 statement. */
95 struct depend *depends; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
99 MAX_LOOP loop. */
102 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
103 ? NULL \
104 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
106 /* Description of a memory reference location for store motion. */
108 struct mem_ref_loc
110 tree *ref; /* The reference itself. */
111 tree stmt; /* The statement in that it occurs. */
112 struct mem_ref_loc *next; /* Next use in the chain. */
115 /* Description of a memory reference for store motion. */
117 struct mem_ref
119 tree mem; /* The memory itself. */
120 hashval_t hash; /* Its hash value. */
121 bool is_stored; /* True if there is a store to the location
122 in the loop. */
123 struct mem_ref_loc *locs; /* The locations where it is found. */
124 bitmap vops; /* Vops corresponding to this memory
125 location. */
126 struct mem_ref *next; /* Next memory reference in the list.
127 Memory references are stored in a hash
128 table, but the hash function depends
129 on values of pointers. Thus we cannot use
130 htab_traverse, since then we would get
131 miscompares during bootstrap (although the
132 produced code would be correct). */
135 /* Minimum cost of an expensive expression. */
136 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
138 /* The outermost loop for that execution of the header guarantees that the
139 block will be executed. */
140 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
142 /* Calls CBCK for each index in memory reference ADDR_P. There are two
143 kinds situations handled; in each of these cases, the memory reference
144 and DATA are passed to the callback:
146 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
147 pass the pointer to the index to the callback.
149 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
150 pointer to addr to the callback.
152 If the callback returns false, the whole search stops and false is returned.
153 Otherwise the function returns true after traversing through the whole
154 reference *ADDR_P. */
156 bool
157 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
159 tree *nxt, *idx;
161 for (; ; addr_p = nxt)
163 switch (TREE_CODE (*addr_p))
165 case SSA_NAME:
166 return cbck (*addr_p, addr_p, data);
168 case MISALIGNED_INDIRECT_REF:
169 case ALIGN_INDIRECT_REF:
170 case INDIRECT_REF:
171 nxt = &TREE_OPERAND (*addr_p, 0);
172 return cbck (*addr_p, nxt, data);
174 case BIT_FIELD_REF:
175 case VIEW_CONVERT_EXPR:
176 case REALPART_EXPR:
177 case IMAGPART_EXPR:
178 nxt = &TREE_OPERAND (*addr_p, 0);
179 break;
181 case COMPONENT_REF:
182 /* If the component has varying offset, it behaves like index
183 as well. */
184 idx = &TREE_OPERAND (*addr_p, 2);
185 if (*idx
186 && !cbck (*addr_p, idx, data))
187 return false;
189 nxt = &TREE_OPERAND (*addr_p, 0);
190 break;
192 case ARRAY_REF:
193 case ARRAY_RANGE_REF:
194 nxt = &TREE_OPERAND (*addr_p, 0);
195 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
196 return false;
197 break;
199 case VAR_DECL:
200 case PARM_DECL:
201 case STRING_CST:
202 case RESULT_DECL:
203 case VECTOR_CST:
204 case COMPLEX_CST:
205 case INTEGER_CST:
206 case REAL_CST:
207 case FIXED_CST:
208 return true;
210 case TARGET_MEM_REF:
211 idx = &TMR_BASE (*addr_p);
212 if (*idx
213 && !cbck (*addr_p, idx, data))
214 return false;
215 idx = &TMR_INDEX (*addr_p);
216 if (*idx
217 && !cbck (*addr_p, idx, data))
218 return false;
219 return true;
221 default:
222 gcc_unreachable ();
227 /* If it is possible to hoist the statement STMT unconditionally,
228 returns MOVE_POSSIBLE.
229 If it is possible to hoist the statement STMT, but we must avoid making
230 it executed if it would not be executed in the original program (e.g.
231 because it may trap), return MOVE_PRESERVE_EXECUTION.
232 Otherwise return MOVE_IMPOSSIBLE. */
234 enum move_pos
235 movement_possibility (tree stmt)
237 tree lhs, rhs;
239 if (flag_unswitch_loops
240 && TREE_CODE (stmt) == COND_EXPR)
242 /* If we perform unswitching, force the operands of the invariant
243 condition to be moved out of the loop. */
244 return MOVE_POSSIBLE;
247 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
248 return MOVE_IMPOSSIBLE;
250 if (stmt_ends_bb_p (stmt))
251 return MOVE_IMPOSSIBLE;
253 if (stmt_ann (stmt)->has_volatile_ops)
254 return MOVE_IMPOSSIBLE;
256 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
257 if (TREE_CODE (lhs) == SSA_NAME
258 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
259 return MOVE_IMPOSSIBLE;
261 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
263 if (TREE_SIDE_EFFECTS (rhs))
264 return MOVE_IMPOSSIBLE;
266 if (TREE_CODE (lhs) != SSA_NAME
267 || tree_could_trap_p (rhs))
268 return MOVE_PRESERVE_EXECUTION;
270 if (get_call_expr_in (stmt))
272 /* While pure or const call is guaranteed to have no side effects, we
273 cannot move it arbitrarily. Consider code like
275 char *s = something ();
277 while (1)
279 if (s)
280 t = strlen (s);
281 else
282 t = 0;
285 Here the strlen call cannot be moved out of the loop, even though
286 s is invariant. In addition to possibly creating a call with
287 invalid arguments, moving out a function call that is not executed
288 may cause performance regressions in case the call is costly and
289 not executed at all. */
290 return MOVE_PRESERVE_EXECUTION;
292 return MOVE_POSSIBLE;
295 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
296 loop to that we could move the expression using DEF if it did not have
297 other operands, i.e. the outermost loop enclosing LOOP in that the value
298 of DEF is invariant. */
300 static struct loop *
301 outermost_invariant_loop (tree def, struct loop *loop)
303 tree def_stmt;
304 basic_block def_bb;
305 struct loop *max_loop;
307 if (TREE_CODE (def) != SSA_NAME)
308 return superloop_at_depth (loop, 1);
310 def_stmt = SSA_NAME_DEF_STMT (def);
311 def_bb = bb_for_stmt (def_stmt);
312 if (!def_bb)
313 return superloop_at_depth (loop, 1);
315 max_loop = find_common_loop (loop, def_bb->loop_father);
317 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
318 max_loop = find_common_loop (max_loop,
319 loop_outer (LIM_DATA (def_stmt)->max_loop));
320 if (max_loop == loop)
321 return NULL;
322 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
324 return max_loop;
327 /* Returns the outermost superloop of LOOP in that the expression EXPR is
328 invariant. */
330 static struct loop *
331 outermost_invariant_loop_expr (tree expr, struct loop *loop)
333 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
334 unsigned i, nops;
335 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
337 if (TREE_CODE (expr) == SSA_NAME
338 || TREE_CODE (expr) == INTEGER_CST
339 || is_gimple_min_invariant (expr))
340 return outermost_invariant_loop (expr, loop);
342 if (codeclass != tcc_unary
343 && codeclass != tcc_binary
344 && codeclass != tcc_expression
345 && codeclass != tcc_vl_exp
346 && codeclass != tcc_comparison)
347 return NULL;
349 nops = TREE_OPERAND_LENGTH (expr);
350 for (i = 0; i < nops; i++)
352 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
353 if (!aloop)
354 return NULL;
356 if (flow_loop_nested_p (max_loop, aloop))
357 max_loop = aloop;
360 return max_loop;
363 /* DATA is a structure containing information associated with a statement
364 inside LOOP. DEF is one of the operands of this statement.
366 Find the outermost loop enclosing LOOP in that value of DEF is invariant
367 and record this in DATA->max_loop field. If DEF itself is defined inside
368 this loop as well (i.e. we need to hoist it out of the loop if we want
369 to hoist the statement represented by DATA), record the statement in that
370 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
371 add the cost of the computation of DEF to the DATA->cost.
373 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
375 static bool
376 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
377 bool add_cost)
379 tree def_stmt = SSA_NAME_DEF_STMT (def);
380 basic_block def_bb = bb_for_stmt (def_stmt);
381 struct loop *max_loop;
382 struct depend *dep;
384 if (!def_bb)
385 return true;
387 max_loop = outermost_invariant_loop (def, loop);
388 if (!max_loop)
389 return false;
391 if (flow_loop_nested_p (data->max_loop, max_loop))
392 data->max_loop = max_loop;
394 if (!LIM_DATA (def_stmt))
395 return true;
397 if (add_cost
398 /* Only add the cost if the statement defining DEF is inside LOOP,
399 i.e. if it is likely that by moving the invariants dependent
400 on it, we will be able to avoid creating a new register for
401 it (since it will be only used in these dependent invariants). */
402 && def_bb->loop_father == loop)
403 data->cost += LIM_DATA (def_stmt)->cost;
405 dep = XNEW (struct depend);
406 dep->stmt = def_stmt;
407 dep->next = data->depends;
408 data->depends = dep;
410 return true;
413 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
414 are just ad-hoc constants. The estimates should be based on target-specific
415 values. */
417 static unsigned
418 stmt_cost (tree stmt)
420 tree rhs;
421 unsigned cost = 1;
423 /* Always try to create possibilities for unswitching. */
424 if (TREE_CODE (stmt) == COND_EXPR)
425 return LIM_EXPENSIVE;
427 rhs = GENERIC_TREE_OPERAND (stmt, 1);
429 /* Hoisting memory references out should almost surely be a win. */
430 if (stmt_references_memory_p (stmt))
431 cost += 20;
433 switch (TREE_CODE (rhs))
435 case CALL_EXPR:
436 /* We should be hoisting calls if possible. */
438 /* Unless the call is a builtin_constant_p; this always folds to a
439 constant, so moving it is useless. */
440 rhs = get_callee_fndecl (rhs);
441 if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
442 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
443 return 0;
445 cost += 20;
446 break;
448 case MULT_EXPR:
449 case TRUNC_DIV_EXPR:
450 case CEIL_DIV_EXPR:
451 case FLOOR_DIV_EXPR:
452 case ROUND_DIV_EXPR:
453 case EXACT_DIV_EXPR:
454 case CEIL_MOD_EXPR:
455 case FLOOR_MOD_EXPR:
456 case ROUND_MOD_EXPR:
457 case TRUNC_MOD_EXPR:
458 case RDIV_EXPR:
459 /* Division and multiplication are usually expensive. */
460 cost += 20;
461 break;
463 case LSHIFT_EXPR:
464 case RSHIFT_EXPR:
465 cost += 20;
466 break;
468 default:
469 break;
472 return cost;
475 /* Determine the outermost loop to that it is possible to hoist a statement
476 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
477 the outermost loop in that the value computed by STMT is invariant.
478 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
479 we preserve the fact whether STMT is executed. It also fills other related
480 information to LIM_DATA (STMT).
482 The function returns false if STMT cannot be hoisted outside of the loop it
483 is defined in, and true otherwise. */
485 static bool
486 determine_max_movement (tree stmt, bool must_preserve_exec)
488 basic_block bb = bb_for_stmt (stmt);
489 struct loop *loop = bb->loop_father;
490 struct loop *level;
491 struct lim_aux_data *lim_data = LIM_DATA (stmt);
492 tree val;
493 ssa_op_iter iter;
495 if (must_preserve_exec)
496 level = ALWAYS_EXECUTED_IN (bb);
497 else
498 level = superloop_at_depth (loop, 1);
499 lim_data->max_loop = level;
501 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
502 if (!add_dependency (val, lim_data, loop, true))
503 return false;
505 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
506 if (!add_dependency (val, lim_data, loop, false))
507 return false;
509 lim_data->cost += stmt_cost (stmt);
511 return true;
514 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
515 and that one of the operands of this statement is computed by STMT.
516 Ensure that STMT (together with all the statements that define its
517 operands) is hoisted at least out of the loop LEVEL. */
519 static void
520 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
522 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
523 struct depend *dep;
525 stmt_loop = find_common_loop (orig_loop, stmt_loop);
526 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
527 stmt_loop = find_common_loop (stmt_loop,
528 loop_outer (LIM_DATA (stmt)->tgt_loop));
529 if (flow_loop_nested_p (stmt_loop, level))
530 return;
532 gcc_assert (LIM_DATA (stmt));
533 gcc_assert (level == LIM_DATA (stmt)->max_loop
534 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
536 LIM_DATA (stmt)->tgt_loop = level;
537 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
538 set_level (dep->stmt, orig_loop, level);
541 /* Determines an outermost loop from that we want to hoist the statement STMT.
542 For now we chose the outermost possible loop. TODO -- use profiling
543 information to set it more sanely. */
545 static void
546 set_profitable_level (tree stmt)
548 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
551 /* Returns true if STMT is not a pure call. */
553 static bool
554 nonpure_call_p (tree stmt)
556 tree call = get_call_expr_in (stmt);
558 if (!call)
559 return false;
561 return TREE_SIDE_EFFECTS (call) != 0;
564 /* Releases the memory occupied by DATA. */
566 static void
567 free_lim_aux_data (struct lim_aux_data *data)
569 struct depend *dep, *next;
571 for (dep = data->depends; dep; dep = next)
573 next = dep->next;
574 free (dep);
576 free (data);
579 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
581 static tree
582 rewrite_reciprocal (block_stmt_iterator *bsi)
584 tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
586 stmt = bsi_stmt (*bsi);
587 lhs = GENERIC_TREE_OPERAND (stmt, 0);
588 rhs = GENERIC_TREE_OPERAND (stmt, 1);
590 /* stmt must be GIMPLE_MODIFY_STMT. */
591 var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
592 add_referenced_var (var);
594 tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
595 build_real (TREE_TYPE (rhs), dconst1),
596 TREE_OPERAND (rhs, 1));
597 stmt1 = build_gimple_modify_stmt (var, tmp);
598 name = make_ssa_name (var, stmt1);
599 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
600 tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
601 name, TREE_OPERAND (rhs, 0));
602 stmt2 = build_gimple_modify_stmt (lhs, tmp);
604 /* Replace division stmt with reciprocal and multiply stmts.
605 The multiply stmt is not invariant, so update iterator
606 and avoid rescanning. */
607 bsi_replace (bsi, stmt1, true);
608 bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
609 SSA_NAME_DEF_STMT (lhs) = stmt2;
611 /* Continue processing with invariant reciprocal statement. */
612 return stmt1;
615 /* Check if the pattern at *BSI is a bittest of the form
616 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
618 static tree
619 rewrite_bittest (block_stmt_iterator *bsi)
621 tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
622 use_operand_p use;
624 stmt = bsi_stmt (*bsi);
625 lhs = GENERIC_TREE_OPERAND (stmt, 0);
626 rhs = GENERIC_TREE_OPERAND (stmt, 1);
628 /* Verify that the single use of lhs is a comparison against zero. */
629 if (TREE_CODE (lhs) != SSA_NAME
630 || !single_imm_use (lhs, &use, &use_stmt)
631 || TREE_CODE (use_stmt) != COND_EXPR)
632 return stmt;
633 t = COND_EXPR_COND (use_stmt);
634 if (TREE_OPERAND (t, 0) != lhs
635 || (TREE_CODE (t) != NE_EXPR
636 && TREE_CODE (t) != EQ_EXPR)
637 || !integer_zerop (TREE_OPERAND (t, 1)))
638 return stmt;
640 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
641 stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
642 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
643 return stmt;
645 /* There is a conversion in between possibly inserted by fold. */
646 t = GIMPLE_STMT_OPERAND (stmt1, 1);
647 if (TREE_CODE (t) == NOP_EXPR
648 || TREE_CODE (t) == CONVERT_EXPR)
650 t = TREE_OPERAND (t, 0);
651 if (TREE_CODE (t) != SSA_NAME
652 || !has_single_use (t))
653 return stmt;
654 stmt1 = SSA_NAME_DEF_STMT (t);
655 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
656 return stmt;
657 t = GIMPLE_STMT_OPERAND (stmt1, 1);
660 /* Verify that B is loop invariant but A is not. Verify that with
661 all the stmt walking we are still in the same loop. */
662 if (TREE_CODE (t) == RSHIFT_EXPR
663 && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
664 && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
665 loop_containing_stmt (stmt1)) != NULL
666 && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
667 loop_containing_stmt (stmt1)) == NULL)
669 tree a = TREE_OPERAND (t, 0);
670 tree b = TREE_OPERAND (t, 1);
672 /* 1 << B */
673 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
674 add_referenced_var (var);
675 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
676 build_int_cst (TREE_TYPE (a), 1), b);
677 stmt1 = build_gimple_modify_stmt (var, t);
678 name = make_ssa_name (var, stmt1);
679 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
681 /* A & (1 << B) */
682 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
683 stmt2 = build_gimple_modify_stmt (var, t);
684 name = make_ssa_name (var, stmt2);
685 GIMPLE_STMT_OPERAND (stmt2, 0) = name;
686 SET_USE (use, name);
688 bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
689 bsi_replace (bsi, stmt2, true);
691 return stmt1;
694 return stmt;
698 /* Determine the outermost loops in that statements in basic block BB are
699 invariant, and record them to the LIM_DATA associated with the statements.
700 Callback for walk_dominator_tree. */
702 static void
703 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
704 basic_block bb)
706 enum move_pos pos;
707 block_stmt_iterator bsi;
708 tree stmt, rhs;
709 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
710 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
712 if (!loop_outer (bb->loop_father))
713 return;
715 if (dump_file && (dump_flags & TDF_DETAILS))
716 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
717 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
719 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
721 stmt = bsi_stmt (bsi);
723 pos = movement_possibility (stmt);
724 if (pos == MOVE_IMPOSSIBLE)
726 if (nonpure_call_p (stmt))
728 maybe_never = true;
729 outermost = NULL;
731 continue;
734 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
736 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
738 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
739 to be hoisted out of loop, saving expensive divide. */
740 if (pos == MOVE_POSSIBLE
741 && TREE_CODE (rhs) == RDIV_EXPR
742 && flag_unsafe_math_optimizations
743 && !flag_trapping_math
744 && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
745 loop_containing_stmt (stmt)) != NULL
746 && outermost_invariant_loop_expr (rhs,
747 loop_containing_stmt (stmt)) == NULL)
748 stmt = rewrite_reciprocal (&bsi);
750 /* If the shift count is invariant, convert (A >> B) & 1 to
751 A & (1 << B) allowing the bit mask to be hoisted out of the loop
752 saving an expensive shift. */
753 if (pos == MOVE_POSSIBLE
754 && TREE_CODE (rhs) == BIT_AND_EXPR
755 && integer_onep (TREE_OPERAND (rhs, 1))
756 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
757 && has_single_use (TREE_OPERAND (rhs, 0)))
758 stmt = rewrite_bittest (&bsi);
761 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
762 LIM_DATA (stmt)->always_executed_in = outermost;
764 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
765 continue;
767 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
769 LIM_DATA (stmt)->max_loop = NULL;
770 continue;
773 if (dump_file && (dump_flags & TDF_DETAILS))
775 print_generic_stmt_indented (dump_file, stmt, 0, 2);
776 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
777 loop_depth (LIM_DATA (stmt)->max_loop),
778 LIM_DATA (stmt)->cost);
781 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
782 set_profitable_level (stmt);
786 /* For each statement determines the outermost loop in that it is invariant,
787 statements on whose motion it depends and the cost of the computation.
788 This information is stored to the LIM_DATA structure associated with
789 each statement. */
791 static void
792 determine_invariantness (void)
794 struct dom_walk_data walk_data;
796 memset (&walk_data, 0, sizeof (struct dom_walk_data));
797 walk_data.dom_direction = CDI_DOMINATORS;
798 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
800 init_walk_dominator_tree (&walk_data);
801 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
802 fini_walk_dominator_tree (&walk_data);
805 /* Hoist the statements in basic block BB out of the loops prescribed by
806 data stored in LIM_DATA structures associated with each statement. Callback
807 for walk_dominator_tree. */
809 static void
810 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
811 basic_block bb)
813 struct loop *level;
814 block_stmt_iterator bsi;
815 tree stmt;
816 unsigned cost = 0;
818 if (!loop_outer (bb->loop_father))
819 return;
821 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
823 stmt = bsi_stmt (bsi);
825 if (!LIM_DATA (stmt))
827 bsi_next (&bsi);
828 continue;
831 cost = LIM_DATA (stmt)->cost;
832 level = LIM_DATA (stmt)->tgt_loop;
833 free_lim_aux_data (LIM_DATA (stmt));
834 stmt_ann (stmt)->common.aux = NULL;
836 if (!level)
838 bsi_next (&bsi);
839 continue;
842 /* We do not really want to move conditionals out of the loop; we just
843 placed it here to force its operands to be moved if necessary. */
844 if (TREE_CODE (stmt) == COND_EXPR)
845 continue;
847 if (dump_file && (dump_flags & TDF_DETAILS))
849 fprintf (dump_file, "Moving statement\n");
850 print_generic_stmt (dump_file, stmt, 0);
851 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
852 cost, level->num);
854 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
855 bsi_remove (&bsi, false);
859 /* Hoist the statements out of the loops prescribed by data stored in
860 LIM_DATA structures associated with each statement.*/
862 static void
863 move_computations (void)
865 struct dom_walk_data walk_data;
867 memset (&walk_data, 0, sizeof (struct dom_walk_data));
868 walk_data.dom_direction = CDI_DOMINATORS;
869 walk_data.before_dom_children_before_stmts = move_computations_stmt;
871 init_walk_dominator_tree (&walk_data);
872 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
873 fini_walk_dominator_tree (&walk_data);
875 bsi_commit_edge_inserts ();
876 if (need_ssa_update_p ())
877 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
880 /* Checks whether the statement defining variable *INDEX can be hoisted
881 out of the loop passed in DATA. Callback for for_each_index. */
883 static bool
884 may_move_till (tree ref, tree *index, void *data)
886 struct loop *loop = (struct loop*) data, *max_loop;
888 /* If REF is an array reference, check also that the step and the lower
889 bound is invariant in LOOP. */
890 if (TREE_CODE (ref) == ARRAY_REF)
892 tree step = array_ref_element_size (ref);
893 tree lbound = array_ref_low_bound (ref);
895 max_loop = outermost_invariant_loop_expr (step, loop);
896 if (!max_loop)
897 return false;
899 max_loop = outermost_invariant_loop_expr (lbound, loop);
900 if (!max_loop)
901 return false;
904 max_loop = outermost_invariant_loop (*index, loop);
905 if (!max_loop)
906 return false;
908 return true;
911 /* Forces statements defining (invariant) SSA names in expression EXPR to be
912 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
914 static void
915 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
917 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
918 unsigned i, nops;
920 if (TREE_CODE (expr) == SSA_NAME)
922 tree stmt = SSA_NAME_DEF_STMT (expr);
923 if (IS_EMPTY_STMT (stmt))
924 return;
926 set_level (stmt, orig_loop, loop);
927 return;
930 if (codeclass != tcc_unary
931 && codeclass != tcc_binary
932 && codeclass != tcc_expression
933 && codeclass != tcc_vl_exp
934 && codeclass != tcc_comparison)
935 return;
937 nops = TREE_OPERAND_LENGTH (expr);
938 for (i = 0; i < nops; i++)
939 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
942 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
943 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
944 for_each_index. */
946 struct fmt_data
948 struct loop *loop;
949 struct loop *orig_loop;
952 static bool
953 force_move_till (tree ref, tree *index, void *data)
955 tree stmt;
956 struct fmt_data *fmt_data = (struct fmt_data *) data;
958 if (TREE_CODE (ref) == ARRAY_REF)
960 tree step = array_ref_element_size (ref);
961 tree lbound = array_ref_low_bound (ref);
963 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
964 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
967 if (TREE_CODE (*index) != SSA_NAME)
968 return true;
970 stmt = SSA_NAME_DEF_STMT (*index);
971 if (IS_EMPTY_STMT (stmt))
972 return true;
974 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
976 return true;
979 /* Records memory reference location *REF to the list MEM_REFS. The reference
980 occurs in statement STMT. */
982 static void
983 record_mem_ref_loc (struct mem_ref_loc **mem_refs, tree stmt, tree *ref)
985 struct mem_ref_loc *aref = XNEW (struct mem_ref_loc);
987 aref->stmt = stmt;
988 aref->ref = ref;
990 aref->next = *mem_refs;
991 *mem_refs = aref;
994 /* Releases list of memory reference locations MEM_REFS. */
996 static void
997 free_mem_ref_locs (struct mem_ref_loc *mem_refs)
999 struct mem_ref_loc *act;
1001 while (mem_refs)
1003 act = mem_refs;
1004 mem_refs = mem_refs->next;
1005 free (act);
1009 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
1011 static void
1012 rewrite_mem_refs (tree tmp_var, struct mem_ref_loc *mem_refs)
1014 tree var;
1015 ssa_op_iter iter;
1017 for (; mem_refs; mem_refs = mem_refs->next)
1019 FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
1020 mark_sym_for_renaming (SSA_NAME_VAR (var));
1022 *mem_refs->ref = tmp_var;
1023 update_stmt (mem_refs->stmt);
1027 /* The name and the length of the currently generated variable
1028 for lsm. */
1029 #define MAX_LSM_NAME_LENGTH 40
1030 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1031 static int lsm_tmp_name_length;
1033 /* Adds S to lsm_tmp_name. */
1035 static void
1036 lsm_tmp_name_add (const char *s)
1038 int l = strlen (s) + lsm_tmp_name_length;
1039 if (l > MAX_LSM_NAME_LENGTH)
1040 return;
1042 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1043 lsm_tmp_name_length = l;
1046 /* Stores the name for temporary variable that replaces REF to
1047 lsm_tmp_name. */
1049 static void
1050 gen_lsm_tmp_name (tree ref)
1052 const char *name;
1054 switch (TREE_CODE (ref))
1056 case MISALIGNED_INDIRECT_REF:
1057 case ALIGN_INDIRECT_REF:
1058 case INDIRECT_REF:
1059 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1060 lsm_tmp_name_add ("_");
1061 break;
1063 case BIT_FIELD_REF:
1064 case VIEW_CONVERT_EXPR:
1065 case ARRAY_RANGE_REF:
1066 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1067 break;
1069 case REALPART_EXPR:
1070 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1071 lsm_tmp_name_add ("_RE");
1072 break;
1074 case IMAGPART_EXPR:
1075 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1076 lsm_tmp_name_add ("_IM");
1077 break;
1079 case COMPONENT_REF:
1080 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1081 lsm_tmp_name_add ("_");
1082 name = get_name (TREE_OPERAND (ref, 1));
1083 if (!name)
1084 name = "F";
1085 lsm_tmp_name_add ("_");
1086 lsm_tmp_name_add (name);
1088 case ARRAY_REF:
1089 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1090 lsm_tmp_name_add ("_I");
1091 break;
1093 case SSA_NAME:
1094 ref = SSA_NAME_VAR (ref);
1095 /* Fallthru. */
1097 case VAR_DECL:
1098 case PARM_DECL:
1099 name = get_name (ref);
1100 if (!name)
1101 name = "D";
1102 lsm_tmp_name_add (name);
1103 break;
1105 case STRING_CST:
1106 lsm_tmp_name_add ("S");
1107 break;
1109 case RESULT_DECL:
1110 lsm_tmp_name_add ("R");
1111 break;
1113 default:
1114 gcc_unreachable ();
1118 /* Determines name for temporary variable that replaces REF.
1119 The name is accumulated into the lsm_tmp_name variable.
1120 N is added to the name of the temporary. */
1122 char *
1123 get_lsm_tmp_name (tree ref, unsigned n)
1125 char ns[2];
1127 lsm_tmp_name_length = 0;
1128 gen_lsm_tmp_name (ref);
1129 lsm_tmp_name_add ("_lsm");
1130 if (n < 10)
1132 ns[0] = '0' + n;
1133 ns[1] = 0;
1134 lsm_tmp_name_add (ns);
1136 return lsm_tmp_name;
1139 /* Records request for store motion of memory reference REF from LOOP.
1140 MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1141 these references are rewritten by a new temporary variable.
1142 Exits from the LOOP are stored in EXITS. The initialization of the
1143 temporary variable is put to the preheader of the loop, and assignments
1144 to the reference from the temporary variable are emitted to exits. */
1146 static void
1147 schedule_sm (struct loop *loop, VEC (edge, heap) *exits, tree ref,
1148 struct mem_ref_loc *mem_refs)
1150 struct mem_ref_loc *aref;
1151 tree tmp_var;
1152 unsigned i;
1153 tree load, store;
1154 struct fmt_data fmt_data;
1155 edge ex;
1157 if (dump_file && (dump_flags & TDF_DETAILS))
1159 fprintf (dump_file, "Executing store motion of ");
1160 print_generic_expr (dump_file, ref, 0);
1161 fprintf (dump_file, " from loop %d\n", loop->num);
1164 tmp_var = make_rename_temp (TREE_TYPE (ref),
1165 get_lsm_tmp_name (ref, ~0));
1167 fmt_data.loop = loop;
1168 fmt_data.orig_loop = loop;
1169 for_each_index (&ref, force_move_till, &fmt_data);
1171 rewrite_mem_refs (tmp_var, mem_refs);
1172 for (aref = mem_refs; aref; aref = aref->next)
1173 if (LIM_DATA (aref->stmt))
1174 LIM_DATA (aref->stmt)->sm_done = true;
1176 /* Emit the load & stores. */
1177 load = build_gimple_modify_stmt (tmp_var, ref);
1178 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1179 LIM_DATA (load)->max_loop = loop;
1180 LIM_DATA (load)->tgt_loop = loop;
1182 /* Put this into the latch, so that we are sure it will be processed after
1183 all dependencies. */
1184 bsi_insert_on_edge (loop_latch_edge (loop), load);
1186 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1188 store = build_gimple_modify_stmt (unshare_expr (ref), tmp_var);
1189 bsi_insert_on_edge (ex, store);
1193 /* Check whether memory reference REF can be hoisted out of the LOOP. If this
1194 is true, prepare the statements that load the value of the memory reference
1195 to a temporary variable in the loop preheader, store it back on the loop
1196 exits, and replace all the references inside LOOP by this temporary variable.
1197 EXITS is the list of exits of LOOP. CLOBBERED_VOPS is the bitmap of virtual
1198 operands that are clobbered by a call or accessed through multiple references
1199 in loop. */
1201 static void
1202 determine_lsm_ref (struct loop *loop, VEC (edge, heap) *exits,
1203 bitmap clobbered_vops, struct mem_ref *ref)
1205 struct mem_ref_loc *aref;
1206 struct loop *must_exec;
1208 /* In case the memory is not stored to, there is nothing for SM to do. */
1209 if (!ref->is_stored)
1210 return;
1212 /* If the reference is aliased with any different ref, or killed by call
1213 in function, then fail. */
1214 if (bitmap_intersect_p (ref->vops, clobbered_vops))
1215 return;
1217 if (tree_could_trap_p (ref->mem))
1219 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1220 of the statements in that it occurs is always executed when the loop
1221 is entered. This way we know that by moving the load from the
1222 reference out of the loop we will not cause the error that would not
1223 occur otherwise.
1225 TODO -- in fact we would like to check for anticipability of the
1226 reference, i.e. that on each path from loop entry to loop exit at
1227 least one of the statements containing the memory reference is
1228 executed. */
1230 for (aref = ref->locs; aref; aref = aref->next)
1232 if (!LIM_DATA (aref->stmt))
1233 continue;
1235 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1236 if (!must_exec)
1237 continue;
1239 if (must_exec == loop
1240 || flow_loop_nested_p (must_exec, loop))
1241 break;
1244 if (!aref)
1245 return;
1248 schedule_sm (loop, exits, ref->mem, ref->locs);
1251 /* Hoists memory references MEM_REFS out of LOOP. CLOBBERED_VOPS is the list
1252 of vops clobbered by call in loop or accessed by multiple memory references.
1253 EXITS is the list of exit edges of the LOOP. */
1255 static void
1256 hoist_memory_references (struct loop *loop, struct mem_ref *mem_refs,
1257 bitmap clobbered_vops, VEC (edge, heap) *exits)
1259 struct mem_ref *ref;
1261 for (ref = mem_refs; ref; ref = ref->next)
1262 determine_lsm_ref (loop, exits, clobbered_vops, ref);
1265 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
1266 for a store motion optimization (i.e. whether we can insert statement
1267 on its exits). */
1269 static bool
1270 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
1271 VEC (edge, heap) *exits)
1273 unsigned i;
1274 edge ex;
1276 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1277 if (ex->flags & EDGE_ABNORMAL)
1278 return false;
1280 return true;
1283 /* A hash function for struct mem_ref object OBJ. */
1285 static hashval_t
1286 memref_hash (const void *obj)
1288 return ((const struct mem_ref *) obj)->hash;
1291 /* An equality function for struct mem_ref object OBJ1 with
1292 memory reference OBJ2. */
1294 static int
1295 memref_eq (const void *obj1, const void *obj2)
1297 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1299 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1302 /* Gathers memory references in statement STMT in LOOP, storing the
1303 information about them in MEM_REFS hash table. Note vops accessed through
1304 unrecognized statements in CLOBBERED_VOPS. The newly created references
1305 are also stored to MEM_REF_LIST. */
1307 static void
1308 gather_mem_refs_stmt (struct loop *loop, htab_t mem_refs,
1309 bitmap clobbered_vops, tree stmt,
1310 struct mem_ref **mem_ref_list)
1312 tree *lhs, *rhs, *mem = NULL;
1313 hashval_t hash;
1314 PTR *slot;
1315 struct mem_ref *ref = NULL;
1316 ssa_op_iter oi;
1317 tree vname;
1318 bool is_stored;
1320 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
1321 return;
1323 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
1324 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
1325 goto fail;
1327 lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
1328 rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
1330 if (TREE_CODE (*lhs) == SSA_NAME)
1332 if (!is_gimple_addressable (*rhs))
1333 goto fail;
1335 mem = rhs;
1336 is_stored = false;
1338 else if (TREE_CODE (*rhs) == SSA_NAME
1339 || is_gimple_min_invariant (*rhs))
1341 mem = lhs;
1342 is_stored = true;
1344 else
1345 goto fail;
1347 /* If we cannot create an SSA name for the result, give up. */
1348 if (!is_gimple_reg_type (TREE_TYPE (*mem))
1349 || TREE_THIS_VOLATILE (*mem))
1350 goto fail;
1352 /* If we cannot move the reference out of the loop, fail. */
1353 if (!for_each_index (mem, may_move_till, loop))
1354 goto fail;
1356 hash = iterative_hash_expr (*mem, 0);
1357 slot = htab_find_slot_with_hash (mem_refs, *mem, hash, INSERT);
1359 if (*slot)
1360 ref = (struct mem_ref *) *slot;
1361 else
1363 ref = XNEW (struct mem_ref);
1364 ref->mem = *mem;
1365 ref->hash = hash;
1366 ref->locs = NULL;
1367 ref->is_stored = false;
1368 ref->vops = BITMAP_ALLOC (NULL);
1369 ref->next = *mem_ref_list;
1370 *mem_ref_list = ref;
1371 *slot = ref;
1373 ref->is_stored |= is_stored;
1375 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1376 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1377 record_mem_ref_loc (&ref->locs, stmt, mem);
1378 return;
1380 fail:
1381 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1382 bitmap_set_bit (clobbered_vops, DECL_UID (SSA_NAME_VAR (vname)));
1385 /* Gathers memory references in LOOP. Notes vops accessed through unrecognized
1386 statements in CLOBBERED_VOPS. The list of the references found by
1387 the function is returned. */
1389 static struct mem_ref *
1390 gather_mem_refs (struct loop *loop, bitmap clobbered_vops)
1392 basic_block *body = get_loop_body (loop);
1393 block_stmt_iterator bsi;
1394 unsigned i;
1395 struct mem_ref *mem_ref_list = NULL;
1396 htab_t mem_refs = htab_create (100, memref_hash, memref_eq, NULL);
1398 for (i = 0; i < loop->num_nodes; i++)
1400 for (bsi = bsi_start (body[i]); !bsi_end_p (bsi); bsi_next (&bsi))
1401 gather_mem_refs_stmt (loop, mem_refs, clobbered_vops, bsi_stmt (bsi),
1402 &mem_ref_list);
1405 free (body);
1407 htab_delete (mem_refs);
1408 return mem_ref_list;
1411 /* Finds the vops accessed by more than one of the memory references described
1412 in MEM_REFS and marks them in CLOBBERED_VOPS. */
1414 static void
1415 find_more_ref_vops (struct mem_ref *mem_refs, bitmap clobbered_vops)
1417 bitmap_head tmp, all_vops;
1418 struct mem_ref *ref;
1420 bitmap_initialize (&tmp, &bitmap_default_obstack);
1421 bitmap_initialize (&all_vops, &bitmap_default_obstack);
1423 for (ref = mem_refs; ref; ref = ref->next)
1425 /* The vops that are already in all_vops are accessed by more than
1426 one memory reference. */
1427 bitmap_and (&tmp, &all_vops, ref->vops);
1428 bitmap_ior_into (clobbered_vops, &tmp);
1429 bitmap_clear (&tmp);
1431 bitmap_ior_into (&all_vops, ref->vops);
1434 bitmap_clear (&all_vops);
1437 /* Releases the memory occupied by REF. */
1439 static void
1440 free_mem_ref (struct mem_ref *ref)
1442 free_mem_ref_locs (ref->locs);
1443 BITMAP_FREE (ref->vops);
1444 free (ref);
1447 /* Releases the memory occupied by REFS. */
1449 static void
1450 free_mem_refs (struct mem_ref *refs)
1452 struct mem_ref *ref, *next;
1454 for (ref = refs; ref; ref = next)
1456 next = ref->next;
1457 free_mem_ref (ref);
1461 /* Try to perform store motion for all memory references modified inside
1462 LOOP. */
1464 static void
1465 determine_lsm_loop (struct loop *loop)
1467 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
1468 bitmap clobbered_vops;
1469 struct mem_ref *mem_refs;
1471 if (!loop_suitable_for_sm (loop, exits))
1473 VEC_free (edge, heap, exits);
1474 return;
1477 /* Find the memory references in LOOP. */
1478 clobbered_vops = BITMAP_ALLOC (NULL);
1479 mem_refs = gather_mem_refs (loop, clobbered_vops);
1481 /* Find the vops that are used for more than one reference. */
1482 find_more_ref_vops (mem_refs, clobbered_vops);
1484 /* Hoist all suitable memory references. */
1485 hoist_memory_references (loop, mem_refs, clobbered_vops, exits);
1487 free_mem_refs (mem_refs);
1488 VEC_free (edge, heap, exits);
1489 BITMAP_FREE (clobbered_vops);
1492 /* Try to perform store motion for all memory references modified inside
1493 loops. */
1495 static void
1496 determine_lsm (void)
1498 struct loop *loop;
1499 loop_iterator li;
1501 /* Pass the loops from the outermost and perform the store motion as
1502 suitable. */
1504 FOR_EACH_LOOP (li, loop, 0)
1506 determine_lsm_loop (loop);
1509 bsi_commit_edge_inserts ();
1512 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1513 for each such basic block bb records the outermost loop for that execution
1514 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1515 blocks that contain a nonpure call. */
1517 static void
1518 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1520 basic_block bb = NULL, *bbs, last = NULL;
1521 unsigned i;
1522 edge e;
1523 struct loop *inn_loop = loop;
1525 if (!loop->header->aux)
1527 bbs = get_loop_body_in_dom_order (loop);
1529 for (i = 0; i < loop->num_nodes; i++)
1531 edge_iterator ei;
1532 bb = bbs[i];
1534 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1535 last = bb;
1537 if (TEST_BIT (contains_call, bb->index))
1538 break;
1540 FOR_EACH_EDGE (e, ei, bb->succs)
1541 if (!flow_bb_inside_loop_p (loop, e->dest))
1542 break;
1543 if (e)
1544 break;
1546 /* A loop might be infinite (TODO use simple loop analysis
1547 to disprove this if possible). */
1548 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1549 break;
1551 if (!flow_bb_inside_loop_p (inn_loop, bb))
1552 break;
1554 if (bb->loop_father->header == bb)
1556 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1557 break;
1559 /* In a loop that is always entered we may proceed anyway.
1560 But record that we entered it and stop once we leave it. */
1561 inn_loop = bb->loop_father;
1565 while (1)
1567 last->aux = loop;
1568 if (last == loop->header)
1569 break;
1570 last = get_immediate_dominator (CDI_DOMINATORS, last);
1573 free (bbs);
1576 for (loop = loop->inner; loop; loop = loop->next)
1577 fill_always_executed_in (loop, contains_call);
1580 /* Compute the global information needed by the loop invariant motion pass. */
1582 static void
1583 tree_ssa_lim_initialize (void)
1585 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1586 block_stmt_iterator bsi;
1587 struct loop *loop;
1588 basic_block bb;
1590 sbitmap_zero (contains_call);
1591 FOR_EACH_BB (bb)
1593 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1595 if (nonpure_call_p (bsi_stmt (bsi)))
1596 break;
1599 if (!bsi_end_p (bsi))
1600 SET_BIT (contains_call, bb->index);
1603 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
1604 fill_always_executed_in (loop, contains_call);
1606 sbitmap_free (contains_call);
1609 /* Cleans up after the invariant motion pass. */
1611 static void
1612 tree_ssa_lim_finalize (void)
1614 basic_block bb;
1616 FOR_EACH_BB (bb)
1618 bb->aux = NULL;
1622 /* Moves invariants from loops. Only "expensive" invariants are moved out --
1623 i.e. those that are likely to be win regardless of the register pressure. */
1625 void
1626 tree_ssa_lim (void)
1628 tree_ssa_lim_initialize ();
1630 /* For each statement determine the outermost loop in that it is
1631 invariant and cost for computing the invariant. */
1632 determine_invariantness ();
1634 /* For each memory reference determine whether it is possible to hoist it
1635 out of the loop. Force the necessary invariants to be moved out of the
1636 loops as well. */
1637 determine_lsm ();
1639 /* Move the expressions that are expensive enough. */
1640 move_computations ();
1642 tree_ssa_lim_finalize ();