2008-07-06 Kai Tietz <kai.tietz@onevision.com>
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blob899eb8ab1a9ed2d465e58d3f24d12acfcca82cec
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005, 2006, 2007, 2008 Free Software
3 Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
10 later version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "cfgloop.h"
36 #include "domwalk.h"
37 #include "params.h"
38 #include "tree-pass.h"
39 #include "flags.h"
40 #include "real.h"
41 #include "hashtab.h"
42 #include "tree-affine.h"
43 #include "pointer-set.h"
44 #include "tree-ssa-propagate.h"
46 /* TODO: Support for predicated code motion. I.e.
48 while (1)
50 if (cond)
52 a = inv;
53 something;
57 Where COND and INV are is invariants, but evaluating INV may trap or be
58 invalid from some other reason if !COND. This may be transformed to
60 if (cond)
61 a = inv;
62 while (1)
64 if (cond)
65 something;
66 } */
68 /* A type for the list of statements that have to be moved in order to be able
69 to hoist an invariant computation. */
71 struct depend
73 tree stmt;
74 struct depend *next;
77 /* The auxiliary data kept for each statement. */
79 struct lim_aux_data
81 struct loop *max_loop; /* The outermost loop in that the statement
82 is invariant. */
84 struct loop *tgt_loop; /* The loop out of that we want to move the
85 invariant. */
87 struct loop *always_executed_in;
88 /* The outermost loop for that we are sure
89 the statement is executed if the loop
90 is entered. */
92 unsigned cost; /* Cost of the computation performed by the
93 statement. */
95 struct depend *depends; /* List of statements that must be also hoisted
96 out of the loop when this statement is
97 hoisted; i.e. those that define the operands
98 of the statement and are inside of the
99 MAX_LOOP loop. */
102 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
103 ? NULL \
104 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
106 /* Description of a memory reference location. */
108 typedef struct mem_ref_loc
110 tree *ref; /* The reference itself. */
111 tree stmt; /* The statement in that it occurs. */
112 } *mem_ref_loc_p;
114 DEF_VEC_P(mem_ref_loc_p);
115 DEF_VEC_ALLOC_P(mem_ref_loc_p, heap);
117 /* The list of memory reference locations in a loop. */
119 typedef struct mem_ref_locs
121 VEC (mem_ref_loc_p, heap) *locs;
122 } *mem_ref_locs_p;
124 DEF_VEC_P(mem_ref_locs_p);
125 DEF_VEC_ALLOC_P(mem_ref_locs_p, heap);
127 /* Description of a memory reference. */
129 typedef struct mem_ref
131 tree mem; /* The memory itself. */
132 unsigned id; /* ID assigned to the memory reference
133 (its index in memory_accesses.refs_list) */
134 hashval_t hash; /* Its hash value. */
135 bitmap stored; /* The set of loops in that this memory location
136 is stored to. */
137 VEC (mem_ref_locs_p, heap) *accesses_in_loop;
138 /* The locations of the accesses. Vector
139 indexed by the loop number. */
140 bitmap vops; /* Vops corresponding to this memory
141 location. */
143 /* The following sets are computed on demand. We keep both set and
144 its complement, so that we know whether the information was
145 already computed or not. */
146 bitmap indep_loop; /* The set of loops in that the memory
147 reference is independent, meaning:
148 If it is stored in the loop, this store
149 is independent on all other loads and
150 stores.
151 If it is only loaded, then it is independent
152 on all stores in the loop. */
153 bitmap dep_loop; /* The complement of INDEP_LOOP. */
155 bitmap indep_ref; /* The set of memory references on that
156 this reference is independent. */
157 bitmap dep_ref; /* The complement of DEP_REF. */
158 } *mem_ref_p;
160 DEF_VEC_P(mem_ref_p);
161 DEF_VEC_ALLOC_P(mem_ref_p, heap);
163 DEF_VEC_P(bitmap);
164 DEF_VEC_ALLOC_P(bitmap, heap);
166 DEF_VEC_P(htab_t);
167 DEF_VEC_ALLOC_P(htab_t, heap);
169 /* Description of memory accesses in loops. */
171 static struct
173 /* The hash table of memory references accessed in loops. */
174 htab_t refs;
176 /* The list of memory references. */
177 VEC (mem_ref_p, heap) *refs_list;
179 /* The set of memory references accessed in each loop. */
180 VEC (bitmap, heap) *refs_in_loop;
182 /* The set of memory references accessed in each loop, including
183 subloops. */
184 VEC (bitmap, heap) *all_refs_in_loop;
186 /* The set of virtual operands clobbered in a given loop. */
187 VEC (bitmap, heap) *clobbered_vops;
189 /* Map from the pair (loop, virtual operand) to the set of refs that
190 touch the virtual operand in the loop. */
191 VEC (htab_t, heap) *vop_ref_map;
193 /* Cache for expanding memory addresses. */
194 struct pointer_map_t *ttae_cache;
195 } memory_accesses;
197 static bool ref_indep_loop_p (struct loop *, mem_ref_p);
199 /* Minimum cost of an expensive expression. */
200 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
202 /* The outermost loop for that execution of the header guarantees that the
203 block will be executed. */
204 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
206 /* Calls CBCK for each index in memory reference ADDR_P. There are two
207 kinds situations handled; in each of these cases, the memory reference
208 and DATA are passed to the callback:
210 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
211 pass the pointer to the index to the callback.
213 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
214 pointer to addr to the callback.
216 If the callback returns false, the whole search stops and false is returned.
217 Otherwise the function returns true after traversing through the whole
218 reference *ADDR_P. */
220 bool
221 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
223 tree *nxt, *idx;
225 for (; ; addr_p = nxt)
227 switch (TREE_CODE (*addr_p))
229 case SSA_NAME:
230 return cbck (*addr_p, addr_p, data);
232 case MISALIGNED_INDIRECT_REF:
233 case ALIGN_INDIRECT_REF:
234 case INDIRECT_REF:
235 nxt = &TREE_OPERAND (*addr_p, 0);
236 return cbck (*addr_p, nxt, data);
238 case BIT_FIELD_REF:
239 case VIEW_CONVERT_EXPR:
240 case REALPART_EXPR:
241 case IMAGPART_EXPR:
242 nxt = &TREE_OPERAND (*addr_p, 0);
243 break;
245 case COMPONENT_REF:
246 /* If the component has varying offset, it behaves like index
247 as well. */
248 idx = &TREE_OPERAND (*addr_p, 2);
249 if (*idx
250 && !cbck (*addr_p, idx, data))
251 return false;
253 nxt = &TREE_OPERAND (*addr_p, 0);
254 break;
256 case ARRAY_REF:
257 case ARRAY_RANGE_REF:
258 nxt = &TREE_OPERAND (*addr_p, 0);
259 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
260 return false;
261 break;
263 case VAR_DECL:
264 case PARM_DECL:
265 case STRING_CST:
266 case RESULT_DECL:
267 case VECTOR_CST:
268 case COMPLEX_CST:
269 case INTEGER_CST:
270 case REAL_CST:
271 case FIXED_CST:
272 case CONSTRUCTOR:
273 return true;
275 case ADDR_EXPR:
276 gcc_assert (is_gimple_min_invariant (*addr_p));
277 return true;
279 case TARGET_MEM_REF:
280 idx = &TMR_BASE (*addr_p);
281 if (*idx
282 && !cbck (*addr_p, idx, data))
283 return false;
284 idx = &TMR_INDEX (*addr_p);
285 if (*idx
286 && !cbck (*addr_p, idx, data))
287 return false;
288 return true;
290 default:
291 gcc_unreachable ();
296 /* If it is possible to hoist the statement STMT unconditionally,
297 returns MOVE_POSSIBLE.
298 If it is possible to hoist the statement STMT, but we must avoid making
299 it executed if it would not be executed in the original program (e.g.
300 because it may trap), return MOVE_PRESERVE_EXECUTION.
301 Otherwise return MOVE_IMPOSSIBLE. */
303 enum move_pos
304 movement_possibility (tree stmt)
306 tree lhs, rhs;
308 if (flag_unswitch_loops
309 && TREE_CODE (stmt) == COND_EXPR)
311 /* If we perform unswitching, force the operands of the invariant
312 condition to be moved out of the loop. */
313 return MOVE_POSSIBLE;
316 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
317 return MOVE_IMPOSSIBLE;
319 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_DEFS))
320 return MOVE_IMPOSSIBLE;
322 if (stmt_ends_bb_p (stmt))
323 return MOVE_IMPOSSIBLE;
325 if (stmt_ann (stmt)->has_volatile_ops)
326 return MOVE_IMPOSSIBLE;
328 lhs = GIMPLE_STMT_OPERAND (stmt, 0);
329 if (TREE_CODE (lhs) == SSA_NAME
330 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
331 return MOVE_IMPOSSIBLE;
333 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
335 if (TREE_SIDE_EFFECTS (rhs)
336 || tree_could_throw_p (rhs))
337 return MOVE_IMPOSSIBLE;
339 if (TREE_CODE (lhs) != SSA_NAME
340 || tree_could_trap_p (rhs))
341 return MOVE_PRESERVE_EXECUTION;
343 if (get_call_expr_in (stmt))
345 /* While pure or const call is guaranteed to have no side effects, we
346 cannot move it arbitrarily. Consider code like
348 char *s = something ();
350 while (1)
352 if (s)
353 t = strlen (s);
354 else
355 t = 0;
358 Here the strlen call cannot be moved out of the loop, even though
359 s is invariant. In addition to possibly creating a call with
360 invalid arguments, moving out a function call that is not executed
361 may cause performance regressions in case the call is costly and
362 not executed at all. */
363 return MOVE_PRESERVE_EXECUTION;
365 return MOVE_POSSIBLE;
368 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
369 loop to that we could move the expression using DEF if it did not have
370 other operands, i.e. the outermost loop enclosing LOOP in that the value
371 of DEF is invariant. */
373 static struct loop *
374 outermost_invariant_loop (tree def, struct loop *loop)
376 tree def_stmt;
377 basic_block def_bb;
378 struct loop *max_loop;
380 if (TREE_CODE (def) != SSA_NAME)
381 return superloop_at_depth (loop, 1);
383 def_stmt = SSA_NAME_DEF_STMT (def);
384 def_bb = bb_for_stmt (def_stmt);
385 if (!def_bb)
386 return superloop_at_depth (loop, 1);
388 max_loop = find_common_loop (loop, def_bb->loop_father);
390 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
391 max_loop = find_common_loop (max_loop,
392 loop_outer (LIM_DATA (def_stmt)->max_loop));
393 if (max_loop == loop)
394 return NULL;
395 max_loop = superloop_at_depth (loop, loop_depth (max_loop) + 1);
397 return max_loop;
400 /* Returns the outermost superloop of LOOP in that the expression EXPR is
401 invariant. */
403 static struct loop *
404 outermost_invariant_loop_expr (tree expr, struct loop *loop)
406 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
407 unsigned i, nops;
408 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
410 if (TREE_CODE (expr) == SSA_NAME
411 || TREE_CODE (expr) == INTEGER_CST
412 || is_gimple_min_invariant (expr))
413 return outermost_invariant_loop (expr, loop);
415 if (codeclass != tcc_unary
416 && codeclass != tcc_binary
417 && codeclass != tcc_expression
418 && codeclass != tcc_vl_exp
419 && codeclass != tcc_comparison)
420 return NULL;
422 nops = TREE_OPERAND_LENGTH (expr);
423 for (i = 0; i < nops; i++)
425 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
426 if (!aloop)
427 return NULL;
429 if (flow_loop_nested_p (max_loop, aloop))
430 max_loop = aloop;
433 return max_loop;
436 /* DATA is a structure containing information associated with a statement
437 inside LOOP. DEF is one of the operands of this statement.
439 Find the outermost loop enclosing LOOP in that value of DEF is invariant
440 and record this in DATA->max_loop field. If DEF itself is defined inside
441 this loop as well (i.e. we need to hoist it out of the loop if we want
442 to hoist the statement represented by DATA), record the statement in that
443 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
444 add the cost of the computation of DEF to the DATA->cost.
446 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
448 static bool
449 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
450 bool add_cost)
452 tree def_stmt = SSA_NAME_DEF_STMT (def);
453 basic_block def_bb = bb_for_stmt (def_stmt);
454 struct loop *max_loop;
455 struct depend *dep;
457 if (!def_bb)
458 return true;
460 max_loop = outermost_invariant_loop (def, loop);
461 if (!max_loop)
462 return false;
464 if (flow_loop_nested_p (data->max_loop, max_loop))
465 data->max_loop = max_loop;
467 if (!LIM_DATA (def_stmt))
468 return true;
470 if (add_cost
471 /* Only add the cost if the statement defining DEF is inside LOOP,
472 i.e. if it is likely that by moving the invariants dependent
473 on it, we will be able to avoid creating a new register for
474 it (since it will be only used in these dependent invariants). */
475 && def_bb->loop_father == loop)
476 data->cost += LIM_DATA (def_stmt)->cost;
478 dep = XNEW (struct depend);
479 dep->stmt = def_stmt;
480 dep->next = data->depends;
481 data->depends = dep;
483 return true;
486 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
487 are just ad-hoc constants. The estimates should be based on target-specific
488 values. */
490 static unsigned
491 stmt_cost (tree stmt)
493 tree rhs;
494 unsigned cost = 1;
496 /* Always try to create possibilities for unswitching. */
497 if (TREE_CODE (stmt) == COND_EXPR)
498 return LIM_EXPENSIVE;
500 rhs = GENERIC_TREE_OPERAND (stmt, 1);
502 /* Hoisting memory references out should almost surely be a win. */
503 if (stmt_references_memory_p (stmt))
504 cost += 20;
506 switch (TREE_CODE (rhs))
508 case CALL_EXPR:
509 /* We should be hoisting calls if possible. */
511 /* Unless the call is a builtin_constant_p; this always folds to a
512 constant, so moving it is useless. */
513 rhs = get_callee_fndecl (rhs);
514 if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
515 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
516 return 0;
518 cost += 20;
519 break;
521 case MULT_EXPR:
522 case TRUNC_DIV_EXPR:
523 case CEIL_DIV_EXPR:
524 case FLOOR_DIV_EXPR:
525 case ROUND_DIV_EXPR:
526 case EXACT_DIV_EXPR:
527 case CEIL_MOD_EXPR:
528 case FLOOR_MOD_EXPR:
529 case ROUND_MOD_EXPR:
530 case TRUNC_MOD_EXPR:
531 case RDIV_EXPR:
532 /* Division and multiplication are usually expensive. */
533 cost += 20;
534 break;
536 case LSHIFT_EXPR:
537 case RSHIFT_EXPR:
538 cost += 20;
539 break;
541 default:
542 break;
545 return cost;
548 /* Finds the outermost loop between OUTER and LOOP in that the memory reference
549 REF is independent. If REF is not independent in LOOP, NULL is returned
550 instead. */
552 static struct loop *
553 outermost_indep_loop (struct loop *outer, struct loop *loop, mem_ref_p ref)
555 struct loop *aloop;
557 if (bitmap_bit_p (ref->stored, loop->num))
558 return NULL;
560 for (aloop = outer;
561 aloop != loop;
562 aloop = superloop_at_depth (loop, loop_depth (aloop) + 1))
563 if (!bitmap_bit_p (ref->stored, aloop->num)
564 && ref_indep_loop_p (aloop, ref))
565 return aloop;
567 if (ref_indep_loop_p (loop, ref))
568 return loop;
569 else
570 return NULL;
573 /* If there is a simple load or store to a memory reference in STMT, returns
574 the location of the memory reference, and sets IS_STORE according to whether
575 it is a store or load. Otherwise, returns NULL. */
577 static tree *
578 simple_mem_ref_in_stmt (tree stmt, bool *is_store)
580 tree *lhs, *rhs;
582 /* Recognize MEM = (SSA_NAME | invariant) and SSA_NAME = MEM patterns. */
583 if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
584 return NULL;
586 lhs = &GIMPLE_STMT_OPERAND (stmt, 0);
587 rhs = &GIMPLE_STMT_OPERAND (stmt, 1);
589 if (TREE_CODE (*lhs) == SSA_NAME)
591 if (!is_gimple_addressable (*rhs))
592 return NULL;
594 *is_store = false;
595 return rhs;
597 else if (TREE_CODE (*rhs) == SSA_NAME
598 || is_gimple_min_invariant (*rhs))
600 *is_store = true;
601 return lhs;
603 else
604 return NULL;
607 /* Returns the memory reference contained in STMT. */
609 static mem_ref_p
610 mem_ref_in_stmt (tree stmt)
612 bool store;
613 tree *mem = simple_mem_ref_in_stmt (stmt, &store);
614 hashval_t hash;
615 mem_ref_p ref;
617 if (!mem)
618 return NULL;
619 gcc_assert (!store);
621 hash = iterative_hash_expr (*mem, 0);
622 ref = (mem_ref_p) htab_find_with_hash (memory_accesses.refs, *mem, hash);
624 gcc_assert (ref != NULL);
625 return ref;
628 /* Determine the outermost loop to that it is possible to hoist a statement
629 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
630 the outermost loop in that the value computed by STMT is invariant.
631 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
632 we preserve the fact whether STMT is executed. It also fills other related
633 information to LIM_DATA (STMT).
635 The function returns false if STMT cannot be hoisted outside of the loop it
636 is defined in, and true otherwise. */
638 static bool
639 determine_max_movement (tree stmt, bool must_preserve_exec)
641 basic_block bb = bb_for_stmt (stmt);
642 struct loop *loop = bb->loop_father;
643 struct loop *level;
644 struct lim_aux_data *lim_data = LIM_DATA (stmt);
645 tree val;
646 ssa_op_iter iter;
648 if (must_preserve_exec)
649 level = ALWAYS_EXECUTED_IN (bb);
650 else
651 level = superloop_at_depth (loop, 1);
652 lim_data->max_loop = level;
654 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
655 if (!add_dependency (val, lim_data, loop, true))
656 return false;
658 if (!ZERO_SSA_OPERANDS (stmt, SSA_OP_VIRTUAL_USES))
660 mem_ref_p ref = mem_ref_in_stmt (stmt);
662 if (ref)
664 lim_data->max_loop
665 = outermost_indep_loop (lim_data->max_loop, loop, ref);
666 if (!lim_data->max_loop)
667 return false;
669 else
671 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
673 if (!add_dependency (val, lim_data, loop, false))
674 return false;
679 lim_data->cost += stmt_cost (stmt);
681 return true;
684 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
685 and that one of the operands of this statement is computed by STMT.
686 Ensure that STMT (together with all the statements that define its
687 operands) is hoisted at least out of the loop LEVEL. */
689 static void
690 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
692 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
693 struct depend *dep;
695 stmt_loop = find_common_loop (orig_loop, stmt_loop);
696 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
697 stmt_loop = find_common_loop (stmt_loop,
698 loop_outer (LIM_DATA (stmt)->tgt_loop));
699 if (flow_loop_nested_p (stmt_loop, level))
700 return;
702 gcc_assert (LIM_DATA (stmt));
703 gcc_assert (level == LIM_DATA (stmt)->max_loop
704 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
706 LIM_DATA (stmt)->tgt_loop = level;
707 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
708 set_level (dep->stmt, orig_loop, level);
711 /* Determines an outermost loop from that we want to hoist the statement STMT.
712 For now we chose the outermost possible loop. TODO -- use profiling
713 information to set it more sanely. */
715 static void
716 set_profitable_level (tree stmt)
718 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
721 /* Returns true if STMT is not a pure call. */
723 static bool
724 nonpure_call_p (tree stmt)
726 tree call = get_call_expr_in (stmt);
728 if (!call)
729 return false;
731 return TREE_SIDE_EFFECTS (call) != 0;
734 /* Releases the memory occupied by DATA. */
736 static void
737 free_lim_aux_data (struct lim_aux_data *data)
739 struct depend *dep, *next;
741 for (dep = data->depends; dep; dep = next)
743 next = dep->next;
744 free (dep);
746 free (data);
749 /* Rewrite a/b to a*(1/b). Return the invariant stmt to process. */
751 static tree
752 rewrite_reciprocal (block_stmt_iterator *bsi)
754 tree stmt, lhs, rhs, stmt1, stmt2, var, name, tmp;
756 stmt = bsi_stmt (*bsi);
757 lhs = GENERIC_TREE_OPERAND (stmt, 0);
758 rhs = GENERIC_TREE_OPERAND (stmt, 1);
760 /* stmt must be GIMPLE_MODIFY_STMT. */
761 var = create_tmp_var (TREE_TYPE (rhs), "reciptmp");
762 add_referenced_var (var);
764 tmp = build2 (RDIV_EXPR, TREE_TYPE (rhs),
765 build_real (TREE_TYPE (rhs), dconst1),
766 TREE_OPERAND (rhs, 1));
767 stmt1 = build_gimple_modify_stmt (var, tmp);
768 name = make_ssa_name (var, stmt1);
769 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
770 tmp = build2 (MULT_EXPR, TREE_TYPE (rhs),
771 name, TREE_OPERAND (rhs, 0));
772 stmt2 = build_gimple_modify_stmt (lhs, tmp);
774 /* Replace division stmt with reciprocal and multiply stmts.
775 The multiply stmt is not invariant, so update iterator
776 and avoid rescanning. */
777 bsi_replace (bsi, stmt1, true);
778 bsi_insert_after (bsi, stmt2, BSI_NEW_STMT);
779 SSA_NAME_DEF_STMT (lhs) = stmt2;
781 /* Continue processing with invariant reciprocal statement. */
782 return stmt1;
785 /* Check if the pattern at *BSI is a bittest of the form
786 (A >> B) & 1 != 0 and in this case rewrite it to A & (1 << B) != 0. */
788 static tree
789 rewrite_bittest (block_stmt_iterator *bsi)
791 tree stmt, lhs, rhs, var, name, use_stmt, stmt1, stmt2, t;
792 use_operand_p use;
794 stmt = bsi_stmt (*bsi);
795 lhs = GENERIC_TREE_OPERAND (stmt, 0);
796 rhs = GENERIC_TREE_OPERAND (stmt, 1);
798 /* Verify that the single use of lhs is a comparison against zero. */
799 if (TREE_CODE (lhs) != SSA_NAME
800 || !single_imm_use (lhs, &use, &use_stmt)
801 || TREE_CODE (use_stmt) != COND_EXPR)
802 return stmt;
803 t = COND_EXPR_COND (use_stmt);
804 if (TREE_OPERAND (t, 0) != lhs
805 || (TREE_CODE (t) != NE_EXPR
806 && TREE_CODE (t) != EQ_EXPR)
807 || !integer_zerop (TREE_OPERAND (t, 1)))
808 return stmt;
810 /* Get at the operands of the shift. The rhs is TMP1 & 1. */
811 stmt1 = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
812 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
813 return stmt;
815 /* There is a conversion in between possibly inserted by fold. */
816 t = GIMPLE_STMT_OPERAND (stmt1, 1);
817 if (CONVERT_EXPR_P (t))
819 t = TREE_OPERAND (t, 0);
820 if (TREE_CODE (t) != SSA_NAME
821 || !has_single_use (t))
822 return stmt;
823 stmt1 = SSA_NAME_DEF_STMT (t);
824 if (TREE_CODE (stmt1) != GIMPLE_MODIFY_STMT)
825 return stmt;
826 t = GIMPLE_STMT_OPERAND (stmt1, 1);
829 /* Verify that B is loop invariant but A is not. Verify that with
830 all the stmt walking we are still in the same loop. */
831 if (TREE_CODE (t) == RSHIFT_EXPR
832 && loop_containing_stmt (stmt1) == loop_containing_stmt (stmt)
833 && outermost_invariant_loop_expr (TREE_OPERAND (t, 1),
834 loop_containing_stmt (stmt1)) != NULL
835 && outermost_invariant_loop_expr (TREE_OPERAND (t, 0),
836 loop_containing_stmt (stmt1)) == NULL)
838 tree a = TREE_OPERAND (t, 0);
839 tree b = TREE_OPERAND (t, 1);
841 /* 1 << B */
842 var = create_tmp_var (TREE_TYPE (a), "shifttmp");
843 add_referenced_var (var);
844 t = fold_build2 (LSHIFT_EXPR, TREE_TYPE (a),
845 build_int_cst (TREE_TYPE (a), 1), b);
846 stmt1 = build_gimple_modify_stmt (var, t);
847 name = make_ssa_name (var, stmt1);
848 GIMPLE_STMT_OPERAND (stmt1, 0) = name;
850 /* A & (1 << B) */
851 t = fold_build2 (BIT_AND_EXPR, TREE_TYPE (a), a, name);
852 stmt2 = build_gimple_modify_stmt (var, t);
853 name = make_ssa_name (var, stmt2);
854 GIMPLE_STMT_OPERAND (stmt2, 0) = name;
856 /* Replace the SSA_NAME we compare against zero. Adjust
857 the type of zero accordingly. */
858 SET_USE (use, name);
859 TREE_OPERAND (COND_EXPR_COND (use_stmt), 1)
860 = build_int_cst_type (TREE_TYPE (name), 0);
862 bsi_insert_before (bsi, stmt1, BSI_SAME_STMT);
863 bsi_replace (bsi, stmt2, true);
865 return stmt1;
868 return stmt;
872 /* Determine the outermost loops in that statements in basic block BB are
873 invariant, and record them to the LIM_DATA associated with the statements.
874 Callback for walk_dominator_tree. */
876 static void
877 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
878 basic_block bb)
880 enum move_pos pos;
881 block_stmt_iterator bsi;
882 tree stmt, rhs;
883 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
884 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
886 if (!loop_outer (bb->loop_father))
887 return;
889 if (dump_file && (dump_flags & TDF_DETAILS))
890 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
891 bb->index, bb->loop_father->num, loop_depth (bb->loop_father));
893 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
895 stmt = bsi_stmt (bsi);
897 pos = movement_possibility (stmt);
898 if (pos == MOVE_IMPOSSIBLE)
900 if (nonpure_call_p (stmt))
902 maybe_never = true;
903 outermost = NULL;
905 /* Make sure to note always_executed_in for stores to make
906 store-motion work. */
907 else if (stmt_makes_single_store (stmt))
909 stmt_ann (stmt)->common.aux
910 = xcalloc (1, sizeof (struct lim_aux_data));
911 LIM_DATA (stmt)->always_executed_in = outermost;
913 continue;
916 if (TREE_CODE (stmt) == GIMPLE_MODIFY_STMT)
918 rhs = GIMPLE_STMT_OPERAND (stmt, 1);
920 /* If divisor is invariant, convert a/b to a*(1/b), allowing reciprocal
921 to be hoisted out of loop, saving expensive divide. */
922 if (pos == MOVE_POSSIBLE
923 && TREE_CODE (rhs) == RDIV_EXPR
924 && flag_unsafe_math_optimizations
925 && !flag_trapping_math
926 && outermost_invariant_loop_expr (TREE_OPERAND (rhs, 1),
927 loop_containing_stmt (stmt)) != NULL
928 && outermost_invariant_loop_expr (rhs,
929 loop_containing_stmt (stmt)) == NULL)
930 stmt = rewrite_reciprocal (&bsi);
932 /* If the shift count is invariant, convert (A >> B) & 1 to
933 A & (1 << B) allowing the bit mask to be hoisted out of the loop
934 saving an expensive shift. */
935 if (pos == MOVE_POSSIBLE
936 && TREE_CODE (rhs) == BIT_AND_EXPR
937 && integer_onep (TREE_OPERAND (rhs, 1))
938 && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
939 && has_single_use (TREE_OPERAND (rhs, 0)))
940 stmt = rewrite_bittest (&bsi);
943 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
944 LIM_DATA (stmt)->always_executed_in = outermost;
946 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
947 continue;
949 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
951 LIM_DATA (stmt)->max_loop = NULL;
952 continue;
955 if (dump_file && (dump_flags & TDF_DETAILS))
957 print_generic_stmt_indented (dump_file, stmt, 0, 2);
958 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
959 loop_depth (LIM_DATA (stmt)->max_loop),
960 LIM_DATA (stmt)->cost);
963 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
964 set_profitable_level (stmt);
968 /* For each statement determines the outermost loop in that it is invariant,
969 statements on whose motion it depends and the cost of the computation.
970 This information is stored to the LIM_DATA structure associated with
971 each statement. */
973 static void
974 determine_invariantness (void)
976 struct dom_walk_data walk_data;
978 memset (&walk_data, 0, sizeof (struct dom_walk_data));
979 walk_data.dom_direction = CDI_DOMINATORS;
980 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
982 init_walk_dominator_tree (&walk_data);
983 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
984 fini_walk_dominator_tree (&walk_data);
987 /* Hoist the statements in basic block BB out of the loops prescribed by
988 data stored in LIM_DATA structures associated with each statement. Callback
989 for walk_dominator_tree. */
991 static void
992 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
993 basic_block bb)
995 struct loop *level;
996 block_stmt_iterator bsi;
997 tree stmt;
998 unsigned cost = 0;
1000 if (!loop_outer (bb->loop_father))
1001 return;
1003 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
1005 stmt = bsi_stmt (bsi);
1007 if (!LIM_DATA (stmt))
1009 bsi_next (&bsi);
1010 continue;
1013 cost = LIM_DATA (stmt)->cost;
1014 level = LIM_DATA (stmt)->tgt_loop;
1015 free_lim_aux_data (LIM_DATA (stmt));
1016 stmt_ann (stmt)->common.aux = NULL;
1018 if (!level)
1020 bsi_next (&bsi);
1021 continue;
1024 /* We do not really want to move conditionals out of the loop; we just
1025 placed it here to force its operands to be moved if necessary. */
1026 if (TREE_CODE (stmt) == COND_EXPR)
1027 continue;
1029 if (dump_file && (dump_flags & TDF_DETAILS))
1031 fprintf (dump_file, "Moving statement\n");
1032 print_generic_stmt (dump_file, stmt, 0);
1033 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
1034 cost, level->num);
1037 mark_virtual_ops_for_renaming (stmt);
1038 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
1039 bsi_remove (&bsi, false);
1043 /* Hoist the statements out of the loops prescribed by data stored in
1044 LIM_DATA structures associated with each statement.*/
1046 static void
1047 move_computations (void)
1049 struct dom_walk_data walk_data;
1051 memset (&walk_data, 0, sizeof (struct dom_walk_data));
1052 walk_data.dom_direction = CDI_DOMINATORS;
1053 walk_data.before_dom_children_before_stmts = move_computations_stmt;
1055 init_walk_dominator_tree (&walk_data);
1056 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
1057 fini_walk_dominator_tree (&walk_data);
1059 bsi_commit_edge_inserts ();
1060 if (need_ssa_update_p ())
1061 rewrite_into_loop_closed_ssa (NULL, TODO_update_ssa);
1064 /* Checks whether the statement defining variable *INDEX can be hoisted
1065 out of the loop passed in DATA. Callback for for_each_index. */
1067 static bool
1068 may_move_till (tree ref, tree *index, void *data)
1070 struct loop *loop = (struct loop*) data, *max_loop;
1072 /* If REF is an array reference, check also that the step and the lower
1073 bound is invariant in LOOP. */
1074 if (TREE_CODE (ref) == ARRAY_REF)
1076 tree step = array_ref_element_size (ref);
1077 tree lbound = array_ref_low_bound (ref);
1079 max_loop = outermost_invariant_loop_expr (step, loop);
1080 if (!max_loop)
1081 return false;
1083 max_loop = outermost_invariant_loop_expr (lbound, loop);
1084 if (!max_loop)
1085 return false;
1088 max_loop = outermost_invariant_loop (*index, loop);
1089 if (!max_loop)
1090 return false;
1092 return true;
1095 /* Forces statements defining (invariant) SSA names in expression EXPR to be
1096 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
1098 static void
1099 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
1101 enum tree_code_class codeclass = TREE_CODE_CLASS (TREE_CODE (expr));
1102 unsigned i, nops;
1104 if (TREE_CODE (expr) == SSA_NAME)
1106 tree stmt = SSA_NAME_DEF_STMT (expr);
1107 if (IS_EMPTY_STMT (stmt))
1108 return;
1110 set_level (stmt, orig_loop, loop);
1111 return;
1114 if (codeclass != tcc_unary
1115 && codeclass != tcc_binary
1116 && codeclass != tcc_expression
1117 && codeclass != tcc_vl_exp
1118 && codeclass != tcc_comparison)
1119 return;
1121 nops = TREE_OPERAND_LENGTH (expr);
1122 for (i = 0; i < nops; i++)
1123 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
1126 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
1127 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
1128 for_each_index. */
1130 struct fmt_data
1132 struct loop *loop;
1133 struct loop *orig_loop;
1136 static bool
1137 force_move_till (tree ref, tree *index, void *data)
1139 tree stmt;
1140 struct fmt_data *fmt_data = (struct fmt_data *) data;
1142 if (TREE_CODE (ref) == ARRAY_REF)
1144 tree step = array_ref_element_size (ref);
1145 tree lbound = array_ref_low_bound (ref);
1147 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
1148 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
1151 if (TREE_CODE (*index) != SSA_NAME)
1152 return true;
1154 stmt = SSA_NAME_DEF_STMT (*index);
1155 if (IS_EMPTY_STMT (stmt))
1156 return true;
1158 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
1160 return true;
1163 /* A hash function for struct mem_ref object OBJ. */
1165 static hashval_t
1166 memref_hash (const void *obj)
1168 const struct mem_ref *const mem = (const struct mem_ref *) obj;
1170 return mem->hash;
1173 /* An equality function for struct mem_ref object OBJ1 with
1174 memory reference OBJ2. */
1176 static int
1177 memref_eq (const void *obj1, const void *obj2)
1179 const struct mem_ref *const mem1 = (const struct mem_ref *) obj1;
1181 return operand_equal_p (mem1->mem, (const_tree) obj2, 0);
1184 /* Releases list of memory reference locations ACCS. */
1186 static void
1187 free_mem_ref_locs (mem_ref_locs_p accs)
1189 unsigned i;
1190 mem_ref_loc_p loc;
1192 if (!accs)
1193 return;
1195 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1196 free (loc);
1197 VEC_free (mem_ref_loc_p, heap, accs->locs);
1198 free (accs);
1201 /* A function to free the mem_ref object OBJ. */
1203 static void
1204 memref_free (void *obj)
1206 struct mem_ref *const mem = (struct mem_ref *) obj;
1207 unsigned i;
1208 mem_ref_locs_p accs;
1210 BITMAP_FREE (mem->stored);
1211 BITMAP_FREE (mem->indep_loop);
1212 BITMAP_FREE (mem->dep_loop);
1213 BITMAP_FREE (mem->indep_ref);
1214 BITMAP_FREE (mem->dep_ref);
1216 for (i = 0; VEC_iterate (mem_ref_locs_p, mem->accesses_in_loop, i, accs); i++)
1217 free_mem_ref_locs (accs);
1218 VEC_free (mem_ref_locs_p, heap, mem->accesses_in_loop);
1220 BITMAP_FREE (mem->vops);
1221 free (mem);
1224 /* Allocates and returns a memory reference description for MEM whose hash
1225 value is HASH and id is ID. */
1227 static mem_ref_p
1228 mem_ref_alloc (tree mem, unsigned hash, unsigned id)
1230 mem_ref_p ref = XNEW (struct mem_ref);
1231 ref->mem = mem;
1232 ref->id = id;
1233 ref->hash = hash;
1234 ref->stored = BITMAP_ALLOC (NULL);
1235 ref->indep_loop = BITMAP_ALLOC (NULL);
1236 ref->dep_loop = BITMAP_ALLOC (NULL);
1237 ref->indep_ref = BITMAP_ALLOC (NULL);
1238 ref->dep_ref = BITMAP_ALLOC (NULL);
1239 ref->accesses_in_loop = NULL;
1240 ref->vops = BITMAP_ALLOC (NULL);
1242 return ref;
1245 /* Allocates and returns the new list of locations. */
1247 static mem_ref_locs_p
1248 mem_ref_locs_alloc (void)
1250 mem_ref_locs_p accs = XNEW (struct mem_ref_locs);
1251 accs->locs = NULL;
1252 return accs;
1255 /* Records memory reference location *LOC in LOOP to the memory reference
1256 description REF. The reference occurs in statement STMT. */
1258 static void
1259 record_mem_ref_loc (mem_ref_p ref, struct loop *loop, tree stmt, tree *loc)
1261 mem_ref_loc_p aref = XNEW (struct mem_ref_loc);
1262 mem_ref_locs_p accs;
1263 bitmap ril = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1265 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1266 <= (unsigned) loop->num)
1267 VEC_safe_grow_cleared (mem_ref_locs_p, heap, ref->accesses_in_loop,
1268 loop->num + 1);
1269 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1270 if (!accs)
1272 accs = mem_ref_locs_alloc ();
1273 VEC_replace (mem_ref_locs_p, ref->accesses_in_loop, loop->num, accs);
1276 aref->stmt = stmt;
1277 aref->ref = loc;
1279 VEC_safe_push (mem_ref_loc_p, heap, accs->locs, aref);
1280 bitmap_set_bit (ril, ref->id);
1283 /* Marks reference REF as stored in LOOP. */
1285 static void
1286 mark_ref_stored (mem_ref_p ref, struct loop *loop)
1288 for (;
1289 loop != current_loops->tree_root
1290 && !bitmap_bit_p (ref->stored, loop->num);
1291 loop = loop_outer (loop))
1292 bitmap_set_bit (ref->stored, loop->num);
1295 /* Gathers memory references in statement STMT in LOOP, storing the
1296 information about them in the memory_accesses structure. Marks
1297 the vops accessed through unrecognized statements there as
1298 well. */
1300 static void
1301 gather_mem_refs_stmt (struct loop *loop, tree stmt)
1303 tree *mem = NULL;
1304 hashval_t hash;
1305 PTR *slot;
1306 mem_ref_p ref;
1307 ssa_op_iter oi;
1308 tree vname;
1309 bool is_stored;
1310 bitmap clvops;
1311 unsigned id;
1313 if (ZERO_SSA_OPERANDS (stmt, SSA_OP_ALL_VIRTUALS))
1314 return;
1316 mem = simple_mem_ref_in_stmt (stmt, &is_stored);
1317 if (!mem)
1318 goto fail;
1320 hash = iterative_hash_expr (*mem, 0);
1321 slot = htab_find_slot_with_hash (memory_accesses.refs, *mem, hash, INSERT);
1323 if (*slot)
1325 ref = (mem_ref_p) *slot;
1326 id = ref->id;
1328 else
1330 id = VEC_length (mem_ref_p, memory_accesses.refs_list);
1331 ref = mem_ref_alloc (*mem, hash, id);
1332 VEC_safe_push (mem_ref_p, heap, memory_accesses.refs_list, ref);
1333 *slot = ref;
1335 if (dump_file && (dump_flags & TDF_DETAILS))
1337 fprintf (dump_file, "Memory reference %u: ", id);
1338 print_generic_expr (dump_file, ref->mem, TDF_SLIM);
1339 fprintf (dump_file, "\n");
1342 if (is_stored)
1343 mark_ref_stored (ref, loop);
1345 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1346 bitmap_set_bit (ref->vops, DECL_UID (SSA_NAME_VAR (vname)));
1347 record_mem_ref_loc (ref, loop, stmt, mem);
1348 return;
1350 fail:
1351 clvops = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1352 FOR_EACH_SSA_TREE_OPERAND (vname, stmt, oi, SSA_OP_VIRTUAL_USES)
1353 bitmap_set_bit (clvops, DECL_UID (SSA_NAME_VAR (vname)));
1356 /* Gathers memory references in loops. */
1358 static void
1359 gather_mem_refs_in_loops (void)
1361 block_stmt_iterator bsi;
1362 basic_block bb;
1363 struct loop *loop;
1364 loop_iterator li;
1365 bitmap clvo, clvi;
1366 bitmap lrefs, alrefs, alrefso;
1368 FOR_EACH_BB (bb)
1370 loop = bb->loop_father;
1371 if (loop == current_loops->tree_root)
1372 continue;
1374 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1375 gather_mem_refs_stmt (loop, bsi_stmt (bsi));
1378 /* Propagate the information about clobbered vops and accessed memory
1379 references up the loop hierarchy. */
1380 FOR_EACH_LOOP (li, loop, LI_FROM_INNERMOST)
1382 lrefs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1383 alrefs = VEC_index (bitmap, memory_accesses.all_refs_in_loop, loop->num);
1384 bitmap_ior_into (alrefs, lrefs);
1386 if (loop_outer (loop) == current_loops->tree_root)
1387 continue;
1389 clvi = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1390 clvo = VEC_index (bitmap, memory_accesses.clobbered_vops,
1391 loop_outer (loop)->num);
1392 bitmap_ior_into (clvo, clvi);
1394 alrefso = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1395 loop_outer (loop)->num);
1396 bitmap_ior_into (alrefso, alrefs);
1400 /* Element of the hash table that maps vops to memory references. */
1402 struct vop_to_refs_elt
1404 /* DECL_UID of the vop. */
1405 unsigned uid;
1407 /* List of the all references. */
1408 bitmap refs_all;
1410 /* List of stored references. */
1411 bitmap refs_stored;
1414 /* A hash function for struct vop_to_refs_elt object OBJ. */
1416 static hashval_t
1417 vtoe_hash (const void *obj)
1419 const struct vop_to_refs_elt *const vtoe =
1420 (const struct vop_to_refs_elt *) obj;
1422 return vtoe->uid;
1425 /* An equality function for struct vop_to_refs_elt object OBJ1 with
1426 uid of a vop OBJ2. */
1428 static int
1429 vtoe_eq (const void *obj1, const void *obj2)
1431 const struct vop_to_refs_elt *const vtoe =
1432 (const struct vop_to_refs_elt *) obj1;
1433 const unsigned *const uid = (const unsigned *) obj2;
1435 return vtoe->uid == *uid;
1438 /* A function to free the struct vop_to_refs_elt object. */
1440 static void
1441 vtoe_free (void *obj)
1443 struct vop_to_refs_elt *const vtoe =
1444 (struct vop_to_refs_elt *) obj;
1446 BITMAP_FREE (vtoe->refs_all);
1447 BITMAP_FREE (vtoe->refs_stored);
1448 free (vtoe);
1451 /* Records REF to hashtable VOP_TO_REFS for the index VOP. STORED is true
1452 if the reference REF is stored. */
1454 static void
1455 record_vop_access (htab_t vop_to_refs, unsigned vop, unsigned ref, bool stored)
1457 void **slot = htab_find_slot_with_hash (vop_to_refs, &vop, vop, INSERT);
1458 struct vop_to_refs_elt *vtoe;
1460 if (!*slot)
1462 vtoe = XNEW (struct vop_to_refs_elt);
1463 vtoe->uid = vop;
1464 vtoe->refs_all = BITMAP_ALLOC (NULL);
1465 vtoe->refs_stored = BITMAP_ALLOC (NULL);
1466 *slot = vtoe;
1468 else
1469 vtoe = (struct vop_to_refs_elt *) *slot;
1471 bitmap_set_bit (vtoe->refs_all, ref);
1472 if (stored)
1473 bitmap_set_bit (vtoe->refs_stored, ref);
1476 /* Returns the set of references that access VOP according to the table
1477 VOP_TO_REFS. */
1479 static bitmap
1480 get_vop_accesses (htab_t vop_to_refs, unsigned vop)
1482 struct vop_to_refs_elt *const vtoe =
1483 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1484 return vtoe->refs_all;
1487 /* Returns the set of stores that access VOP according to the table
1488 VOP_TO_REFS. */
1490 static bitmap
1491 get_vop_stores (htab_t vop_to_refs, unsigned vop)
1493 struct vop_to_refs_elt *const vtoe =
1494 (struct vop_to_refs_elt *) htab_find_with_hash (vop_to_refs, &vop, vop);
1495 return vtoe->refs_stored;
1498 /* Adds REF to mapping from virtual operands to references in LOOP. */
1500 static void
1501 add_vop_ref_mapping (struct loop *loop, mem_ref_p ref)
1503 htab_t map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
1504 bool stored = bitmap_bit_p (ref->stored, loop->num);
1505 bitmap clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops,
1506 loop->num);
1507 bitmap_iterator bi;
1508 unsigned vop;
1510 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, vop, bi)
1512 record_vop_access (map, vop, ref->id, stored);
1516 /* Create a mapping from virtual operands to references that touch them
1517 in LOOP. */
1519 static void
1520 create_vop_ref_mapping_loop (struct loop *loop)
1522 bitmap refs = VEC_index (bitmap, memory_accesses.refs_in_loop, loop->num);
1523 struct loop *sloop;
1524 bitmap_iterator bi;
1525 unsigned i;
1526 mem_ref_p ref;
1528 EXECUTE_IF_SET_IN_BITMAP (refs, 0, i, bi)
1530 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1531 for (sloop = loop; sloop != current_loops->tree_root; sloop = loop_outer (sloop))
1532 add_vop_ref_mapping (sloop, ref);
1536 /* For each non-clobbered virtual operand and each loop, record the memory
1537 references in this loop that touch the operand. */
1539 static void
1540 create_vop_ref_mapping (void)
1542 loop_iterator li;
1543 struct loop *loop;
1545 FOR_EACH_LOOP (li, loop, 0)
1547 create_vop_ref_mapping_loop (loop);
1551 /* Gathers information about memory accesses in the loops. */
1553 static void
1554 analyze_memory_references (void)
1556 unsigned i;
1557 bitmap empty;
1558 htab_t hempty;
1560 memory_accesses.refs
1561 = htab_create (100, memref_hash, memref_eq, memref_free);
1562 memory_accesses.refs_list = NULL;
1563 memory_accesses.refs_in_loop = VEC_alloc (bitmap, heap,
1564 number_of_loops ());
1565 memory_accesses.all_refs_in_loop = VEC_alloc (bitmap, heap,
1566 number_of_loops ());
1567 memory_accesses.clobbered_vops = VEC_alloc (bitmap, heap,
1568 number_of_loops ());
1569 memory_accesses.vop_ref_map = VEC_alloc (htab_t, heap,
1570 number_of_loops ());
1572 for (i = 0; i < number_of_loops (); i++)
1574 empty = BITMAP_ALLOC (NULL);
1575 VEC_quick_push (bitmap, memory_accesses.refs_in_loop, empty);
1576 empty = BITMAP_ALLOC (NULL);
1577 VEC_quick_push (bitmap, memory_accesses.all_refs_in_loop, empty);
1578 empty = BITMAP_ALLOC (NULL);
1579 VEC_quick_push (bitmap, memory_accesses.clobbered_vops, empty);
1580 hempty = htab_create (10, vtoe_hash, vtoe_eq, vtoe_free);
1581 VEC_quick_push (htab_t, memory_accesses.vop_ref_map, hempty);
1584 memory_accesses.ttae_cache = NULL;
1586 gather_mem_refs_in_loops ();
1587 create_vop_ref_mapping ();
1590 /* Returns true if a region of size SIZE1 at position 0 and a region of
1591 size SIZE2 at position DIFF cannot overlap. */
1593 static bool
1594 cannot_overlap_p (aff_tree *diff, double_int size1, double_int size2)
1596 double_int d, bound;
1598 /* Unless the difference is a constant, we fail. */
1599 if (diff->n != 0)
1600 return false;
1602 d = diff->offset;
1603 if (double_int_negative_p (d))
1605 /* The second object is before the first one, we succeed if the last
1606 element of the second object is before the start of the first one. */
1607 bound = double_int_add (d, double_int_add (size2, double_int_minus_one));
1608 return double_int_negative_p (bound);
1610 else
1612 /* We succeed if the second object starts after the first one ends. */
1613 return double_int_scmp (size1, d) <= 0;
1617 /* Returns true if MEM1 and MEM2 may alias. TTAE_CACHE is used as a cache in
1618 tree_to_aff_combination_expand. */
1620 static bool
1621 mem_refs_may_alias_p (tree mem1, tree mem2, struct pointer_map_t **ttae_cache)
1623 /* Perform BASE + OFFSET analysis -- if MEM1 and MEM2 are based on the same
1624 object and their offset differ in such a way that the locations cannot
1625 overlap, then they cannot alias. */
1626 double_int size1, size2;
1627 aff_tree off1, off2;
1629 /* Perform basic offset and type-based disambiguation. */
1630 if (!refs_may_alias_p (mem1, mem2))
1631 return false;
1633 /* The expansion of addresses may be a bit expensive, thus we only do
1634 the check at -O2 and higher optimization levels. */
1635 if (optimize < 2)
1636 return true;
1638 get_inner_reference_aff (mem1, &off1, &size1);
1639 get_inner_reference_aff (mem2, &off2, &size2);
1640 aff_combination_expand (&off1, ttae_cache);
1641 aff_combination_expand (&off2, ttae_cache);
1642 aff_combination_scale (&off1, double_int_minus_one);
1643 aff_combination_add (&off2, &off1);
1645 if (cannot_overlap_p (&off2, size1, size2))
1646 return false;
1648 return true;
1651 /* Rewrites location LOC by TMP_VAR. */
1653 static void
1654 rewrite_mem_ref_loc (mem_ref_loc_p loc, tree tmp_var)
1656 mark_virtual_ops_for_renaming (loc->stmt);
1657 *loc->ref = tmp_var;
1658 update_stmt (loc->stmt);
1661 /* Adds all locations of REF in LOOP and its subloops to LOCS. */
1663 static void
1664 get_all_locs_in_loop (struct loop *loop, mem_ref_p ref,
1665 VEC (mem_ref_loc_p, heap) **locs)
1667 mem_ref_locs_p accs;
1668 unsigned i;
1669 mem_ref_loc_p loc;
1670 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
1671 loop->num);
1672 struct loop *subloop;
1674 if (!bitmap_bit_p (refs, ref->id))
1675 return;
1677 if (VEC_length (mem_ref_locs_p, ref->accesses_in_loop)
1678 > (unsigned) loop->num)
1680 accs = VEC_index (mem_ref_locs_p, ref->accesses_in_loop, loop->num);
1681 if (accs)
1683 for (i = 0; VEC_iterate (mem_ref_loc_p, accs->locs, i, loc); i++)
1684 VEC_safe_push (mem_ref_loc_p, heap, *locs, loc);
1688 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
1689 get_all_locs_in_loop (subloop, ref, locs);
1692 /* Rewrites all references to REF in LOOP by variable TMP_VAR. */
1694 static void
1695 rewrite_mem_refs (struct loop *loop, mem_ref_p ref, tree tmp_var)
1697 unsigned i;
1698 mem_ref_loc_p loc;
1699 VEC (mem_ref_loc_p, heap) *locs = NULL;
1701 get_all_locs_in_loop (loop, ref, &locs);
1702 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1703 rewrite_mem_ref_loc (loc, tmp_var);
1704 VEC_free (mem_ref_loc_p, heap, locs);
1707 /* The name and the length of the currently generated variable
1708 for lsm. */
1709 #define MAX_LSM_NAME_LENGTH 40
1710 static char lsm_tmp_name[MAX_LSM_NAME_LENGTH + 1];
1711 static int lsm_tmp_name_length;
1713 /* Adds S to lsm_tmp_name. */
1715 static void
1716 lsm_tmp_name_add (const char *s)
1718 int l = strlen (s) + lsm_tmp_name_length;
1719 if (l > MAX_LSM_NAME_LENGTH)
1720 return;
1722 strcpy (lsm_tmp_name + lsm_tmp_name_length, s);
1723 lsm_tmp_name_length = l;
1726 /* Stores the name for temporary variable that replaces REF to
1727 lsm_tmp_name. */
1729 static void
1730 gen_lsm_tmp_name (tree ref)
1732 const char *name;
1734 switch (TREE_CODE (ref))
1736 case MISALIGNED_INDIRECT_REF:
1737 case ALIGN_INDIRECT_REF:
1738 case INDIRECT_REF:
1739 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1740 lsm_tmp_name_add ("_");
1741 break;
1743 case BIT_FIELD_REF:
1744 case VIEW_CONVERT_EXPR:
1745 case ARRAY_RANGE_REF:
1746 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1747 break;
1749 case REALPART_EXPR:
1750 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1751 lsm_tmp_name_add ("_RE");
1752 break;
1754 case IMAGPART_EXPR:
1755 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1756 lsm_tmp_name_add ("_IM");
1757 break;
1759 case COMPONENT_REF:
1760 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1761 lsm_tmp_name_add ("_");
1762 name = get_name (TREE_OPERAND (ref, 1));
1763 if (!name)
1764 name = "F";
1765 lsm_tmp_name_add ("_");
1766 lsm_tmp_name_add (name);
1768 case ARRAY_REF:
1769 gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
1770 lsm_tmp_name_add ("_I");
1771 break;
1773 case SSA_NAME:
1774 ref = SSA_NAME_VAR (ref);
1775 /* Fallthru. */
1777 case VAR_DECL:
1778 case PARM_DECL:
1779 name = get_name (ref);
1780 if (!name)
1781 name = "D";
1782 lsm_tmp_name_add (name);
1783 break;
1785 case STRING_CST:
1786 lsm_tmp_name_add ("S");
1787 break;
1789 case RESULT_DECL:
1790 lsm_tmp_name_add ("R");
1791 break;
1793 default:
1794 gcc_unreachable ();
1798 /* Determines name for temporary variable that replaces REF.
1799 The name is accumulated into the lsm_tmp_name variable.
1800 N is added to the name of the temporary. */
1802 char *
1803 get_lsm_tmp_name (tree ref, unsigned n)
1805 char ns[2];
1807 lsm_tmp_name_length = 0;
1808 gen_lsm_tmp_name (ref);
1809 lsm_tmp_name_add ("_lsm");
1810 if (n < 10)
1812 ns[0] = '0' + n;
1813 ns[1] = 0;
1814 lsm_tmp_name_add (ns);
1816 return lsm_tmp_name;
1819 /* Executes store motion of memory reference REF from LOOP.
1820 Exits from the LOOP are stored in EXITS. The initialization of the
1821 temporary variable is put to the preheader of the loop, and assignments
1822 to the reference from the temporary variable are emitted to exits. */
1824 static void
1825 execute_sm (struct loop *loop, VEC (edge, heap) *exits, mem_ref_p ref)
1827 tree tmp_var;
1828 unsigned i;
1829 tree load, store;
1830 struct fmt_data fmt_data;
1831 edge ex;
1833 if (dump_file && (dump_flags & TDF_DETAILS))
1835 fprintf (dump_file, "Executing store motion of ");
1836 print_generic_expr (dump_file, ref->mem, 0);
1837 fprintf (dump_file, " from loop %d\n", loop->num);
1840 tmp_var = make_rename_temp (TREE_TYPE (ref->mem),
1841 get_lsm_tmp_name (ref->mem, ~0));
1843 fmt_data.loop = loop;
1844 fmt_data.orig_loop = loop;
1845 for_each_index (&ref->mem, force_move_till, &fmt_data);
1847 rewrite_mem_refs (loop, ref, tmp_var);
1849 /* Emit the load & stores. */
1850 load = build_gimple_modify_stmt (tmp_var, unshare_expr (ref->mem));
1851 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1852 LIM_DATA (load)->max_loop = loop;
1853 LIM_DATA (load)->tgt_loop = loop;
1855 /* Put this into the latch, so that we are sure it will be processed after
1856 all dependencies. */
1857 bsi_insert_on_edge (loop_latch_edge (loop), load);
1859 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
1861 store = build_gimple_modify_stmt (unshare_expr (ref->mem), tmp_var);
1862 bsi_insert_on_edge (ex, store);
1866 /* Hoists memory references MEM_REFS out of LOOP. EXITS is the list of exit
1867 edges of the LOOP. */
1869 static void
1870 hoist_memory_references (struct loop *loop, bitmap mem_refs,
1871 VEC (edge, heap) *exits)
1873 mem_ref_p ref;
1874 unsigned i;
1875 bitmap_iterator bi;
1877 EXECUTE_IF_SET_IN_BITMAP (mem_refs, 0, i, bi)
1879 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1880 execute_sm (loop, exits, ref);
1884 /* Returns true if REF is always accessed in LOOP. */
1886 static bool
1887 ref_always_accessed_p (struct loop *loop, mem_ref_p ref)
1889 VEC (mem_ref_loc_p, heap) *locs = NULL;
1890 unsigned i;
1891 mem_ref_loc_p loc;
1892 bool ret = false;
1893 struct loop *must_exec;
1895 get_all_locs_in_loop (loop, ref, &locs);
1896 for (i = 0; VEC_iterate (mem_ref_loc_p, locs, i, loc); i++)
1898 if (!LIM_DATA (loc->stmt))
1899 continue;
1901 must_exec = LIM_DATA (loc->stmt)->always_executed_in;
1902 if (!must_exec)
1903 continue;
1905 if (must_exec == loop
1906 || flow_loop_nested_p (must_exec, loop))
1908 ret = true;
1909 break;
1912 VEC_free (mem_ref_loc_p, heap, locs);
1914 return ret;
1917 /* Returns true if REF1 and REF2 are independent. */
1919 static bool
1920 refs_independent_p (mem_ref_p ref1, mem_ref_p ref2)
1922 if (ref1 == ref2
1923 || bitmap_bit_p (ref1->indep_ref, ref2->id))
1924 return true;
1925 if (bitmap_bit_p (ref1->dep_ref, ref2->id))
1926 return false;
1928 if (dump_file && (dump_flags & TDF_DETAILS))
1929 fprintf (dump_file, "Querying dependency of refs %u and %u: ",
1930 ref1->id, ref2->id);
1932 if (mem_refs_may_alias_p (ref1->mem, ref2->mem,
1933 &memory_accesses.ttae_cache))
1935 bitmap_set_bit (ref1->dep_ref, ref2->id);
1936 bitmap_set_bit (ref2->dep_ref, ref1->id);
1937 if (dump_file && (dump_flags & TDF_DETAILS))
1938 fprintf (dump_file, "dependent.\n");
1939 return false;
1941 else
1943 bitmap_set_bit (ref1->indep_ref, ref2->id);
1944 bitmap_set_bit (ref2->indep_ref, ref1->id);
1945 if (dump_file && (dump_flags & TDF_DETAILS))
1946 fprintf (dump_file, "independent.\n");
1947 return true;
1951 /* Records the information whether REF is independent in LOOP (according
1952 to INDEP). */
1954 static void
1955 record_indep_loop (struct loop *loop, mem_ref_p ref, bool indep)
1957 if (indep)
1958 bitmap_set_bit (ref->indep_loop, loop->num);
1959 else
1960 bitmap_set_bit (ref->dep_loop, loop->num);
1963 /* Returns true if REF is independent on all other memory references in
1964 LOOP. */
1966 static bool
1967 ref_indep_loop_p_1 (struct loop *loop, mem_ref_p ref)
1969 bitmap clobbers, refs_to_check, refs;
1970 unsigned i;
1971 bitmap_iterator bi;
1972 bool ret = true, stored = bitmap_bit_p (ref->stored, loop->num);
1973 htab_t map;
1974 mem_ref_p aref;
1976 /* If the reference is clobbered, it is not independent. */
1977 clobbers = VEC_index (bitmap, memory_accesses.clobbered_vops, loop->num);
1978 if (bitmap_intersect_p (ref->vops, clobbers))
1979 return false;
1981 refs_to_check = BITMAP_ALLOC (NULL);
1983 map = VEC_index (htab_t, memory_accesses.vop_ref_map, loop->num);
1984 EXECUTE_IF_AND_COMPL_IN_BITMAP (ref->vops, clobbers, 0, i, bi)
1986 if (stored)
1987 refs = get_vop_accesses (map, i);
1988 else
1989 refs = get_vop_stores (map, i);
1991 bitmap_ior_into (refs_to_check, refs);
1994 EXECUTE_IF_SET_IN_BITMAP (refs_to_check, 0, i, bi)
1996 aref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
1997 if (!refs_independent_p (ref, aref))
1999 ret = false;
2000 record_indep_loop (loop, aref, false);
2001 break;
2005 BITMAP_FREE (refs_to_check);
2006 return ret;
2009 /* Returns true if REF is independent on all other memory references in
2010 LOOP. Wrapper over ref_indep_loop_p_1, caching its results. */
2012 static bool
2013 ref_indep_loop_p (struct loop *loop, mem_ref_p ref)
2015 bool ret;
2017 if (bitmap_bit_p (ref->indep_loop, loop->num))
2018 return true;
2019 if (bitmap_bit_p (ref->dep_loop, loop->num))
2020 return false;
2022 ret = ref_indep_loop_p_1 (loop, ref);
2024 if (dump_file && (dump_flags & TDF_DETAILS))
2025 fprintf (dump_file, "Querying dependencies of ref %u in loop %d: %s\n",
2026 ref->id, loop->num, ret ? "independent" : "dependent");
2028 record_indep_loop (loop, ref, ret);
2030 return ret;
2033 /* Returns true if we can perform store motion of REF from LOOP. */
2035 static bool
2036 can_sm_ref_p (struct loop *loop, mem_ref_p ref)
2038 /* Unless the reference is stored in the loop, there is nothing to do. */
2039 if (!bitmap_bit_p (ref->stored, loop->num))
2040 return false;
2042 /* It should be movable. */
2043 if (!is_gimple_reg_type (TREE_TYPE (ref->mem))
2044 || TREE_THIS_VOLATILE (ref->mem)
2045 || !for_each_index (&ref->mem, may_move_till, loop))
2046 return false;
2048 /* If it can trap, it must be always executed in LOOP. */
2049 if (tree_could_trap_p (ref->mem)
2050 && !ref_always_accessed_p (loop, ref))
2051 return false;
2053 /* And it must be independent on all other memory references
2054 in LOOP. */
2055 if (!ref_indep_loop_p (loop, ref))
2056 return false;
2058 return true;
2061 /* Marks the references in LOOP for that store motion should be performed
2062 in REFS_TO_SM. SM_EXECUTED is the set of references for that store
2063 motion was performed in one of the outer loops. */
2065 static void
2066 find_refs_for_sm (struct loop *loop, bitmap sm_executed, bitmap refs_to_sm)
2068 bitmap refs = VEC_index (bitmap, memory_accesses.all_refs_in_loop,
2069 loop->num);
2070 unsigned i;
2071 bitmap_iterator bi;
2072 mem_ref_p ref;
2074 EXECUTE_IF_AND_COMPL_IN_BITMAP (refs, sm_executed, 0, i, bi)
2076 ref = VEC_index (mem_ref_p, memory_accesses.refs_list, i);
2077 if (can_sm_ref_p (loop, ref))
2078 bitmap_set_bit (refs_to_sm, i);
2082 /* Checks whether LOOP (with exits stored in EXITS array) is suitable
2083 for a store motion optimization (i.e. whether we can insert statement
2084 on its exits). */
2086 static bool
2087 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED,
2088 VEC (edge, heap) *exits)
2090 unsigned i;
2091 edge ex;
2093 for (i = 0; VEC_iterate (edge, exits, i, ex); i++)
2094 if (ex->flags & EDGE_ABNORMAL)
2095 return false;
2097 return true;
2100 /* Try to perform store motion for all memory references modified inside
2101 LOOP. SM_EXECUTED is the bitmap of the memory references for that
2102 store motion was executed in one of the outer loops. */
2104 static void
2105 store_motion_loop (struct loop *loop, bitmap sm_executed)
2107 VEC (edge, heap) *exits = get_loop_exit_edges (loop);
2108 struct loop *subloop;
2109 bitmap sm_in_loop = BITMAP_ALLOC (NULL);
2111 if (loop_suitable_for_sm (loop, exits))
2113 find_refs_for_sm (loop, sm_executed, sm_in_loop);
2114 hoist_memory_references (loop, sm_in_loop, exits);
2116 VEC_free (edge, heap, exits);
2118 bitmap_ior_into (sm_executed, sm_in_loop);
2119 for (subloop = loop->inner; subloop != NULL; subloop = subloop->next)
2120 store_motion_loop (subloop, sm_executed);
2121 bitmap_and_compl_into (sm_executed, sm_in_loop);
2122 BITMAP_FREE (sm_in_loop);
2125 /* Try to perform store motion for all memory references modified inside
2126 loops. */
2128 static void
2129 store_motion (void)
2131 struct loop *loop;
2132 bitmap sm_executed = BITMAP_ALLOC (NULL);
2134 for (loop = current_loops->tree_root->inner; loop != NULL; loop = loop->next)
2135 store_motion_loop (loop, sm_executed);
2137 BITMAP_FREE (sm_executed);
2138 bsi_commit_edge_inserts ();
2141 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
2142 for each such basic block bb records the outermost loop for that execution
2143 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
2144 blocks that contain a nonpure call. */
2146 static void
2147 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
2149 basic_block bb = NULL, *bbs, last = NULL;
2150 unsigned i;
2151 edge e;
2152 struct loop *inn_loop = loop;
2154 if (!loop->header->aux)
2156 bbs = get_loop_body_in_dom_order (loop);
2158 for (i = 0; i < loop->num_nodes; i++)
2160 edge_iterator ei;
2161 bb = bbs[i];
2163 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2164 last = bb;
2166 if (TEST_BIT (contains_call, bb->index))
2167 break;
2169 FOR_EACH_EDGE (e, ei, bb->succs)
2170 if (!flow_bb_inside_loop_p (loop, e->dest))
2171 break;
2172 if (e)
2173 break;
2175 /* A loop might be infinite (TODO use simple loop analysis
2176 to disprove this if possible). */
2177 if (bb->flags & BB_IRREDUCIBLE_LOOP)
2178 break;
2180 if (!flow_bb_inside_loop_p (inn_loop, bb))
2181 break;
2183 if (bb->loop_father->header == bb)
2185 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
2186 break;
2188 /* In a loop that is always entered we may proceed anyway.
2189 But record that we entered it and stop once we leave it. */
2190 inn_loop = bb->loop_father;
2194 while (1)
2196 last->aux = loop;
2197 if (last == loop->header)
2198 break;
2199 last = get_immediate_dominator (CDI_DOMINATORS, last);
2202 free (bbs);
2205 for (loop = loop->inner; loop; loop = loop->next)
2206 fill_always_executed_in (loop, contains_call);
2209 /* Compute the global information needed by the loop invariant motion pass. */
2211 static void
2212 tree_ssa_lim_initialize (void)
2214 sbitmap contains_call = sbitmap_alloc (last_basic_block);
2215 block_stmt_iterator bsi;
2216 struct loop *loop;
2217 basic_block bb;
2219 sbitmap_zero (contains_call);
2220 FOR_EACH_BB (bb)
2222 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
2224 if (nonpure_call_p (bsi_stmt (bsi)))
2225 break;
2228 if (!bsi_end_p (bsi))
2229 SET_BIT (contains_call, bb->index);
2232 for (loop = current_loops->tree_root->inner; loop; loop = loop->next)
2233 fill_always_executed_in (loop, contains_call);
2235 sbitmap_free (contains_call);
2238 /* Cleans up after the invariant motion pass. */
2240 static void
2241 tree_ssa_lim_finalize (void)
2243 basic_block bb;
2244 unsigned i;
2245 bitmap b;
2246 htab_t h;
2248 FOR_EACH_BB (bb)
2250 bb->aux = NULL;
2253 VEC_free (mem_ref_p, heap, memory_accesses.refs_list);
2254 htab_delete (memory_accesses.refs);
2256 for (i = 0; VEC_iterate (bitmap, memory_accesses.refs_in_loop, i, b); i++)
2257 BITMAP_FREE (b);
2258 VEC_free (bitmap, heap, memory_accesses.refs_in_loop);
2260 for (i = 0; VEC_iterate (bitmap, memory_accesses.all_refs_in_loop, i, b); i++)
2261 BITMAP_FREE (b);
2262 VEC_free (bitmap, heap, memory_accesses.all_refs_in_loop);
2264 for (i = 0; VEC_iterate (bitmap, memory_accesses.clobbered_vops, i, b); i++)
2265 BITMAP_FREE (b);
2266 VEC_free (bitmap, heap, memory_accesses.clobbered_vops);
2268 for (i = 0; VEC_iterate (htab_t, memory_accesses.vop_ref_map, i, h); i++)
2269 htab_delete (h);
2270 VEC_free (htab_t, heap, memory_accesses.vop_ref_map);
2272 if (memory_accesses.ttae_cache)
2273 pointer_map_destroy (memory_accesses.ttae_cache);
2276 /* Moves invariants from loops. Only "expensive" invariants are moved out --
2277 i.e. those that are likely to be win regardless of the register pressure. */
2279 void
2280 tree_ssa_lim (void)
2282 tree_ssa_lim_initialize ();
2284 /* Gathers information about memory accesses in the loops. */
2285 analyze_memory_references ();
2287 /* For each statement determine the outermost loop in that it is
2288 invariant and cost for computing the invariant. */
2289 determine_invariantness ();
2291 /* Execute store motion. Force the necessary invariants to be moved
2292 out of the loops as well. */
2293 store_motion ();
2295 /* Move the expressions that are expensive enough. */
2296 move_computations ();
2298 tree_ssa_lim_finalize ();