* gimplify.c (struct gimplify_init_ctor_preeval_data): New.
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blob3d2d1c6a7a9933e655fe4e985ce21eecbbe17a1b
1 /* Loop invariant motion.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "cfgloop.h"
36 #include "domwalk.h"
37 #include "params.h"
38 #include "tree-pass.h"
39 #include "flags.h"
41 /* A type for the list of statements that have to be moved in order to be able
42 to hoist an invariant computation. */
44 struct depend
46 tree stmt;
47 struct depend *next;
50 /* The possibilities of statement movement. */
52 enum move_pos
54 MOVE_IMPOSSIBLE, /* No movement -- side effect expression. */
55 MOVE_PRESERVE_EXECUTION, /* Must not cause the non-executed statement
56 become executed -- memory accesses, ... */
57 MOVE_POSSIBLE /* Unlimited movement. */
60 /* The auxiliary data kept for each statement. */
62 struct lim_aux_data
64 struct loop *max_loop; /* The outermost loop in that the statement
65 is invariant. */
67 struct loop *tgt_loop; /* The loop out of that we want to move the
68 invariant. */
70 struct loop *always_executed_in;
71 /* The outermost loop for that we are sure
72 the statement is executed if the loop
73 is entered. */
75 bool sm_done; /* True iff the store motion for a memory
76 reference in the statement has already
77 been executed. */
79 unsigned cost; /* Cost of the computation performed by the
80 statement. */
82 struct depend *depends; /* List of statements that must be also hoisted
83 out of the loop when this statement is
84 hoisted; i.e. those that define the operands
85 of the statement and are inside of the
86 MAX_LOOP loop. */
89 #define LIM_DATA(STMT) ((struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
91 /* Description of a memory reference for store motion. */
93 struct mem_ref
95 tree *ref; /* The reference itself. */
96 tree stmt; /* The statement in that it occurs. */
97 struct mem_ref *next; /* Next use in the chain. */
100 /* Minimum cost of an expensive expression. */
101 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
103 /* The outermost loop for that execution of the header guarantees that the
104 block will be executed. */
105 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
107 /* Maximum uid in the statement in the function. */
109 static unsigned max_uid;
111 /* Calls CBCK for each index in memory reference ADDR_P. There are two
112 kinds situations handled; in each of these cases, the memory reference
113 and DATA are passed to the callback:
115 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
116 pass the pointer to the index to the callback.
118 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
119 pointer to addr to the callback.
121 If the callback returns false, the whole search stops and false is returned.
122 Otherwise the function returns true after traversing through the whole
123 reference *ADDR_P. */
125 bool
126 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
128 tree *nxt;
130 for (; ; addr_p = nxt)
132 switch (TREE_CODE (*addr_p))
134 case SSA_NAME:
135 return cbck (*addr_p, addr_p, data);
137 case INDIRECT_REF:
138 nxt = &TREE_OPERAND (*addr_p, 0);
139 return cbck (*addr_p, nxt, data);
141 case BIT_FIELD_REF:
142 case COMPONENT_REF:
143 case VIEW_CONVERT_EXPR:
144 case ARRAY_RANGE_REF:
145 nxt = &TREE_OPERAND (*addr_p, 0);
146 break;
148 case ARRAY_REF:
149 nxt = &TREE_OPERAND (*addr_p, 0);
150 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
151 return false;
152 break;
154 case VAR_DECL:
155 case PARM_DECL:
156 case STRING_CST:
157 case RESULT_DECL:
158 return true;
160 default:
161 abort ();
166 /* If it is possible to hoist the statement STMT unconditionally,
167 returns MOVE_POSSIBLE.
168 If it is possible to hoist the statement STMT, but we must avoid making
169 it executed if it would not be executed in the original program (e.g.
170 because it may trap), return MOVE_PRESERVE_EXECUTION.
171 Otherwise return MOVE_IMPOSSIBLE. */
173 static enum move_pos
174 movement_possibility (tree stmt)
176 tree lhs, rhs;
178 if (flag_unswitch_loops
179 && TREE_CODE (stmt) == COND_EXPR)
181 /* If we perform unswitching, force the operands of the invariant
182 condition to be moved out of the loop. */
183 get_stmt_operands (stmt);
185 return MOVE_POSSIBLE;
188 if (TREE_CODE (stmt) != MODIFY_EXPR)
189 return MOVE_IMPOSSIBLE;
191 if (stmt_ends_bb_p (stmt))
192 return MOVE_IMPOSSIBLE;
194 get_stmt_operands (stmt);
196 if (stmt_ann (stmt)->has_volatile_ops)
197 return MOVE_IMPOSSIBLE;
199 lhs = TREE_OPERAND (stmt, 0);
200 if (TREE_CODE (lhs) == SSA_NAME
201 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
202 return MOVE_IMPOSSIBLE;
204 rhs = TREE_OPERAND (stmt, 1);
206 if (TREE_SIDE_EFFECTS (rhs))
207 return MOVE_IMPOSSIBLE;
209 if (TREE_CODE (lhs) != SSA_NAME
210 || tree_could_trap_p (rhs))
211 return MOVE_PRESERVE_EXECUTION;
213 return MOVE_POSSIBLE;
216 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
217 loop to that we could move the expresion using DEF if it did not have
218 other operands, i.e. the outermost loop enclosing LOOP in that the value
219 of DEF is invariant. */
221 static struct loop *
222 outermost_invariant_loop (tree def, struct loop *loop)
224 tree def_stmt;
225 basic_block def_bb;
226 struct loop *max_loop;
228 if (TREE_CODE (def) != SSA_NAME)
229 return superloop_at_depth (loop, 1);
231 def_stmt = SSA_NAME_DEF_STMT (def);
232 def_bb = bb_for_stmt (def_stmt);
233 if (!def_bb)
234 return superloop_at_depth (loop, 1);
236 max_loop = find_common_loop (loop, def_bb->loop_father);
238 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
239 max_loop = find_common_loop (max_loop,
240 LIM_DATA (def_stmt)->max_loop->outer);
241 if (max_loop == loop)
242 return NULL;
243 max_loop = superloop_at_depth (loop, max_loop->depth + 1);
245 return max_loop;
248 /* Returns the outermost superloop of LOOP in that the expression EXPR is
249 invariant. */
251 static struct loop *
252 outermost_invariant_loop_expr (tree expr, struct loop *loop)
254 char class = TREE_CODE_CLASS (TREE_CODE (expr));
255 unsigned i, nops;
256 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
258 if (TREE_CODE (expr) == SSA_NAME
259 || TREE_CODE (expr) == INTEGER_CST
260 || is_gimple_min_invariant (expr))
261 return outermost_invariant_loop (expr, loop);
263 if (class != '1'
264 && class != '2'
265 && class != 'e'
266 && class != '<')
267 return NULL;
269 nops = first_rtl_op (TREE_CODE (expr));
270 for (i = 0; i < nops; i++)
272 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
273 if (!aloop)
274 return NULL;
276 if (flow_loop_nested_p (max_loop, aloop))
277 max_loop = aloop;
280 return max_loop;
283 /* DATA is a structure containing information associated with a statement
284 inside LOOP. DEF is one of the operands of this statement.
286 Find the outermost loop enclosing LOOP in that value of DEF is invariant
287 and record this in DATA->max_loop field. If DEF itself is defined inside
288 this loop as well (i.e. we need to hoist it out of the loop if we want
289 to hoist the statement represented by DATA), record the statement in that
290 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
291 add the cost of the computation of DEF to the DATA->cost.
293 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
295 static bool
296 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
297 bool add_cost)
299 tree def_stmt = SSA_NAME_DEF_STMT (def);
300 basic_block def_bb = bb_for_stmt (def_stmt);
301 struct loop *max_loop;
302 struct depend *dep;
304 if (!def_bb)
305 return true;
307 max_loop = outermost_invariant_loop (def, loop);
308 if (!max_loop)
309 return false;
311 if (flow_loop_nested_p (data->max_loop, max_loop))
312 data->max_loop = max_loop;
314 if (!LIM_DATA (def_stmt))
315 return true;
317 if (add_cost
318 /* Only add the cost if the statement defining DEF is inside LOOP,
319 i.e. if it is likely that by moving the invariants dependent
320 on it, we will be able to avoid creating a new register for
321 it (since it will be only used in these dependent invariants). */
322 && def_bb->loop_father == loop)
323 data->cost += LIM_DATA (def_stmt)->cost;
325 dep = xmalloc (sizeof (struct depend));
326 dep->stmt = def_stmt;
327 dep->next = data->depends;
328 data->depends = dep;
330 return true;
333 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
334 are just ad-hoc constants. The estimates should be based on target-specific
335 values. */
337 static unsigned
338 stmt_cost (tree stmt)
340 tree lhs, rhs;
341 unsigned cost = 1;
343 /* Always try to create possibilities for unswitching. */
344 if (TREE_CODE (stmt) == COND_EXPR)
345 return LIM_EXPENSIVE;
347 lhs = TREE_OPERAND (stmt, 0);
348 rhs = TREE_OPERAND (stmt, 1);
350 /* Hoisting memory references out should almost surely be a win. */
351 if (!is_gimple_variable (lhs))
352 cost += 20;
353 if (is_gimple_addressable (rhs) && !is_gimple_variable (rhs))
354 cost += 20;
356 switch (TREE_CODE (rhs))
358 case CALL_EXPR:
359 /* We should be hoisting calls if possible. */
361 /* Unless the call is a builtin_constant_p; this always folds to a
362 constant, so moving it is useless. */
363 rhs = get_callee_fndecl (rhs);
364 if (DECL_BUILT_IN (rhs)
365 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
366 return 0;
368 cost += 20;
369 break;
371 case MULT_EXPR:
372 case TRUNC_DIV_EXPR:
373 case CEIL_DIV_EXPR:
374 case FLOOR_DIV_EXPR:
375 case ROUND_DIV_EXPR:
376 case EXACT_DIV_EXPR:
377 case CEIL_MOD_EXPR:
378 case FLOOR_MOD_EXPR:
379 case ROUND_MOD_EXPR:
380 case TRUNC_MOD_EXPR:
381 /* Division and multiplication are usually expensive. */
382 cost += 20;
383 break;
385 default:
386 break;
389 return cost;
392 /* Determine the outermost loop to that it is possible to hoist a statement
393 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
394 the outermost loop in that the value computed by STMT is invariant.
395 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
396 we preserve the fact whether STMT is executed. It also fills other related
397 information to LIM_DATA (STMT).
399 The function returns false if STMT cannot be hoisted outside of the loop it
400 is defined in, and true otherwise. */
402 static bool
403 determine_max_movement (tree stmt, bool must_preserve_exec)
405 basic_block bb = bb_for_stmt (stmt);
406 struct loop *loop = bb->loop_father;
407 struct loop *level;
408 struct lim_aux_data *lim_data = LIM_DATA (stmt);
409 use_optype uses;
410 vuse_optype vuses;
411 v_may_def_optype v_may_defs;
412 stmt_ann_t ann = stmt_ann (stmt);
413 unsigned i;
415 if (must_preserve_exec)
416 level = ALWAYS_EXECUTED_IN (bb);
417 else
418 level = superloop_at_depth (loop, 1);
419 lim_data->max_loop = level;
421 uses = USE_OPS (ann);
422 for (i = 0; i < NUM_USES (uses); i++)
423 if (!add_dependency (USE_OP (uses, i), lim_data, loop, true))
424 return false;
426 vuses = VUSE_OPS (ann);
427 for (i = 0; i < NUM_VUSES (vuses); i++)
428 if (!add_dependency (VUSE_OP (vuses, i), lim_data, loop, false))
429 return false;
431 v_may_defs = V_MAY_DEF_OPS (ann);
432 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
433 if (!add_dependency (V_MAY_DEF_OP (v_may_defs, i), lim_data, loop, false))
434 return false;
436 lim_data->cost += stmt_cost (stmt);
438 return true;
441 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
442 and that one of the operands of this statement is computed by STMT.
443 Ensure that STMT (together with all the statements that define its
444 operands) is hoisted at least out of the loop LEVEL. */
446 static void
447 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
449 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
450 struct depend *dep;
452 stmt_loop = find_common_loop (orig_loop, stmt_loop);
453 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
454 stmt_loop = find_common_loop (stmt_loop,
455 LIM_DATA (stmt)->tgt_loop->outer);
456 if (flow_loop_nested_p (stmt_loop, level))
457 return;
459 if (!LIM_DATA (stmt))
460 abort ();
462 if (level != LIM_DATA (stmt)->max_loop
463 && !flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level))
464 abort ();
466 LIM_DATA (stmt)->tgt_loop = level;
467 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
468 set_level (dep->stmt, orig_loop, level);
471 /* Determines an outermost loop from that we want to hoist the statement STMT.
472 For now we chose the outermost possible loop. TODO -- use profiling
473 information to set it more sanely. */
475 static void
476 set_profitable_level (tree stmt)
478 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
481 /* Returns true if STMT is not a pure call. */
483 static bool
484 nonpure_call_p (tree stmt)
486 tree call = get_call_expr_in (stmt);
488 if (!call)
489 return false;
491 return TREE_SIDE_EFFECTS (call) != 0;
494 /* Releases the memory occupied by DATA. */
496 static void
497 free_lim_aux_data (struct lim_aux_data *data)
499 struct depend *dep, *next;
501 for (dep = data->depends; dep; dep = next)
503 next = dep->next;
504 free (dep);
506 free (data);
509 /* Determine the outermost loops in that statements in basic block BB are
510 invariant, and record them to the LIM_DATA associated with the statements.
511 Callback for walk_dominator_tree. */
513 static void
514 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
515 basic_block bb)
517 enum move_pos pos;
518 block_stmt_iterator bsi;
519 tree stmt;
520 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
521 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
523 if (!bb->loop_father->outer)
524 return;
526 if (dump_file && (dump_flags & TDF_DETAILS))
527 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
528 bb->index, bb->loop_father->num, bb->loop_father->depth);
530 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
532 stmt = bsi_stmt (bsi);
534 pos = movement_possibility (stmt);
535 if (pos == MOVE_IMPOSSIBLE)
537 if (nonpure_call_p (stmt))
539 maybe_never = true;
540 outermost = NULL;
542 continue;
545 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
546 LIM_DATA (stmt)->always_executed_in = outermost;
548 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
549 continue;
551 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
553 LIM_DATA (stmt)->max_loop = NULL;
554 continue;
557 if (dump_file && (dump_flags & TDF_DETAILS))
559 print_generic_stmt_indented (dump_file, stmt, 0, 2);
560 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
561 LIM_DATA (stmt)->max_loop->depth,
562 LIM_DATA (stmt)->cost);
565 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
566 set_profitable_level (stmt);
570 /* For each statement determines the outermost loop in that it is invariant,
571 statements on whose motion it depends and the cost of the computation.
572 This information is stored to the LIM_DATA structure associated with
573 each statement. */
575 static void
576 determine_invariantness (void)
578 struct dom_walk_data walk_data;
580 memset (&walk_data, 0, sizeof (struct dom_walk_data));
581 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
583 init_walk_dominator_tree (&walk_data);
584 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
585 fini_walk_dominator_tree (&walk_data);
588 /* Commits edge insertions and updates loop structures. */
590 void
591 loop_commit_inserts (void)
593 unsigned old_last_basic_block, i;
594 basic_block bb;
596 old_last_basic_block = last_basic_block;
597 bsi_commit_edge_inserts (NULL);
598 for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
600 bb = BASIC_BLOCK (i);
601 add_bb_to_loop (bb,
602 find_common_loop (bb->succ->dest->loop_father,
603 bb->pred->src->loop_father));
607 /* Hoist the statements in basic block BB out of the loops prescribed by
608 data stored in LIM_DATA structres associated with each statement. Callback
609 for walk_dominator_tree. */
611 static void
612 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
613 basic_block bb)
615 struct loop *level;
616 block_stmt_iterator bsi;
617 tree stmt;
618 unsigned cost = 0;
620 if (!bb->loop_father->outer)
621 return;
623 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
625 stmt = bsi_stmt (bsi);
627 if (!LIM_DATA (stmt))
629 bsi_next (&bsi);
630 continue;
633 cost = LIM_DATA (stmt)->cost;
634 level = LIM_DATA (stmt)->tgt_loop;
635 free_lim_aux_data (LIM_DATA (stmt));
636 stmt_ann (stmt)->common.aux = NULL;
638 if (!level)
640 bsi_next (&bsi);
641 continue;
644 /* We do not really want to move conditionals out of the loop; we just
645 placed it here to force its operands to be moved if necessary. */
646 if (TREE_CODE (stmt) == COND_EXPR)
647 continue;
649 if (dump_file && (dump_flags & TDF_DETAILS))
651 fprintf (dump_file, "Moving statement\n");
652 print_generic_stmt (dump_file, stmt, 0);
653 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
654 cost, level->num);
656 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
657 bsi_remove (&bsi);
661 /* Hoist the statements out of the loops prescribed by data stored in
662 LIM_DATA structres associated with each statement.*/
664 static void
665 move_computations (void)
667 struct dom_walk_data walk_data;
669 memset (&walk_data, 0, sizeof (struct dom_walk_data));
670 walk_data.before_dom_children_before_stmts = move_computations_stmt;
672 init_walk_dominator_tree (&walk_data);
673 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
674 fini_walk_dominator_tree (&walk_data);
676 loop_commit_inserts ();
677 rewrite_into_ssa (false);
678 if (bitmap_first_set_bit (vars_to_rename) >= 0)
680 /* The rewrite of ssa names may cause violation of loop closed ssa
681 form invariants. TODO -- avoid these rewrites completely.
682 Information in virtual phi nodes is sufficient for it. */
683 rewrite_into_loop_closed_ssa ();
685 bitmap_clear (vars_to_rename);
688 /* Checks whether the statement defining variable *INDEX can be hoisted
689 out of the loop passed in DATA. Callback for for_each_index. */
691 static bool
692 may_move_till (tree ref, tree *index, void *data)
694 struct loop *loop = data, *max_loop;
696 /* If REF is an array reference, check also that the step and the lower
697 bound is invariant in LOOP. */
698 if (TREE_CODE (ref) == ARRAY_REF)
700 tree step = array_ref_element_size (ref);
701 tree lbound = array_ref_low_bound (ref);
703 max_loop = outermost_invariant_loop_expr (step, loop);
704 if (!max_loop)
705 return false;
707 max_loop = outermost_invariant_loop_expr (lbound, loop);
708 if (!max_loop)
709 return false;
712 max_loop = outermost_invariant_loop (*index, loop);
713 if (!max_loop)
714 return false;
716 return true;
719 /* Forces statements definining (invariant) SSA names in expression EXPR to be
720 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
722 static void
723 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
725 char class = TREE_CODE_CLASS (TREE_CODE (expr));
726 unsigned i, nops;
728 if (TREE_CODE (expr) == SSA_NAME)
730 tree stmt = SSA_NAME_DEF_STMT (expr);
731 if (IS_EMPTY_STMT (stmt))
732 return;
734 set_level (stmt, orig_loop, loop);
735 return;
738 if (class != '1'
739 && class != '2'
740 && class != 'e'
741 && class != '<')
742 return;
744 nops = first_rtl_op (TREE_CODE (expr));
745 for (i = 0; i < nops; i++)
746 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
749 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
750 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
751 for_each_index. */
753 struct fmt_data
755 struct loop *loop;
756 struct loop *orig_loop;
759 static bool
760 force_move_till (tree ref, tree *index, void *data)
762 tree stmt;
763 struct fmt_data *fmt_data = data;
765 if (TREE_CODE (ref) == ARRAY_REF)
767 tree step = array_ref_element_size (ref);
768 tree lbound = array_ref_low_bound (ref);
770 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
771 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
774 if (TREE_CODE (*index) != SSA_NAME)
775 return true;
777 stmt = SSA_NAME_DEF_STMT (*index);
778 if (IS_EMPTY_STMT (stmt))
779 return true;
781 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
783 return true;
786 /* Records memory reference *REF (that occurs in statement STMT)
787 to the list MEM_REFS. */
789 static void
790 record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
792 struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
794 aref->stmt = stmt;
795 aref->ref = ref;
797 aref->next = *mem_refs;
798 *mem_refs = aref;
801 /* Releases list of memory references MEM_REFS. */
803 static void
804 free_mem_refs (struct mem_ref *mem_refs)
806 struct mem_ref *act;
808 while (mem_refs)
810 act = mem_refs;
811 mem_refs = mem_refs->next;
812 free (act);
816 /* If VAR is defined in LOOP and the statement it is defined in does not belong
817 to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
818 to the set SEEN. */
820 static void
821 maybe_queue_var (tree var, struct loop *loop,
822 sbitmap seen, tree *queue, unsigned *in_queue)
824 tree stmt = SSA_NAME_DEF_STMT (var);
825 basic_block def_bb = bb_for_stmt (stmt);
827 if (!def_bb
828 || !flow_bb_inside_loop_p (loop, def_bb)
829 || TEST_BIT (seen, stmt_ann (stmt)->uid))
830 return;
832 SET_BIT (seen, stmt_ann (stmt)->uid);
833 queue[(*in_queue)++] = stmt;
836 /* Determine whether all memory references inside the LOOP that correspond
837 to virtual ssa names defined in statement STMT are equal.
838 If so, store the list of the references to MEM_REFS, and return one
839 of them. Otherwise store NULL to MEM_REFS and return NULL_TREE. */
841 static tree
842 single_reachable_address (struct loop *loop, tree stmt,
843 struct mem_ref **mem_refs)
845 tree *queue = xmalloc (sizeof (tree) * max_uid);
846 sbitmap seen = sbitmap_alloc (max_uid);
847 tree common_ref = NULL, *aref;
848 unsigned in_queue = 1;
849 dataflow_t df;
850 unsigned i, n;
851 v_may_def_optype v_may_defs;
852 vuse_optype vuses;
854 sbitmap_zero (seen);
856 *mem_refs = NULL;
858 queue[0] = stmt;
859 SET_BIT (seen, stmt_ann (stmt)->uid);
861 while (in_queue)
863 stmt = queue[--in_queue];
865 if (LIM_DATA (stmt)
866 && LIM_DATA (stmt)->sm_done)
867 goto fail;
869 switch (TREE_CODE (stmt))
871 case MODIFY_EXPR:
872 aref = &TREE_OPERAND (stmt, 0);
873 if (is_gimple_reg (*aref)
874 || !is_gimple_lvalue (*aref))
875 aref = &TREE_OPERAND (stmt, 1);
876 if (is_gimple_reg (*aref)
877 || !is_gimple_lvalue (*aref)
878 || (common_ref && !operand_equal_p (*aref, common_ref, 0)))
879 goto fail;
880 common_ref = *aref;
882 record_mem_ref (mem_refs, stmt, aref);
884 /* Traverse also definitions of the VUSES (there may be other
885 distinct from the one we used to get to this statement). */
886 v_may_defs = STMT_V_MAY_DEF_OPS (stmt);
887 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
888 maybe_queue_var (V_MAY_DEF_OP (v_may_defs, i), loop,
889 seen, queue, &in_queue);
891 vuses = STMT_VUSE_OPS (stmt);
892 for (i = 0; i < NUM_VUSES (vuses); i++)
893 maybe_queue_var (VUSE_OP (vuses, i), loop,
894 seen, queue, &in_queue);
895 break;
897 case PHI_NODE:
898 for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
899 maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
900 seen, queue, &in_queue);
901 break;
903 default:
904 goto fail;
907 /* Find uses of virtual names. */
908 df = get_immediate_uses (stmt);
909 n = num_immediate_uses (df);
911 for (i = 0; i < n; i++)
913 stmt = immediate_use (df, i);
915 if (!flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
916 continue;
918 if (TEST_BIT (seen, stmt_ann (stmt)->uid))
919 continue;
920 SET_BIT (seen, stmt_ann (stmt)->uid);
922 queue[in_queue++] = stmt;
926 free (queue);
927 sbitmap_free (seen);
929 return common_ref;
931 fail:
932 free_mem_refs (*mem_refs);
933 *mem_refs = NULL;
934 free (queue);
935 sbitmap_free (seen);
937 return NULL;
940 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
942 static void
943 rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
945 v_may_def_optype v_may_defs;
946 v_must_def_optype v_must_defs;
947 vuse_optype vuses;
948 unsigned i;
949 tree var;
951 for (; mem_refs; mem_refs = mem_refs->next)
953 v_may_defs = STMT_V_MAY_DEF_OPS (mem_refs->stmt);
954 for (i = 0; i < NUM_V_MAY_DEFS (v_may_defs); i++)
956 var = SSA_NAME_VAR (V_MAY_DEF_RESULT (v_may_defs, i));
957 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
960 v_must_defs = STMT_V_MUST_DEF_OPS (mem_refs->stmt);
961 for (i = 0; i < NUM_V_MUST_DEFS (v_must_defs); i++)
963 var = SSA_NAME_VAR (V_MUST_DEF_OP (v_must_defs, i));
964 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
967 vuses = STMT_VUSE_OPS (mem_refs->stmt);
968 for (i = 0; i < NUM_VUSES (vuses); i++)
970 var = SSA_NAME_VAR (VUSE_OP (vuses, i));
971 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
974 *mem_refs->ref = tmp_var;
975 modify_stmt (mem_refs->stmt);
979 /* Records request for store motion of memory reference REF from LOOP.
980 MEM_REFS is the list of occurences of the reference REF inside LOOP;
981 these references are rewritten by a new temporary variable.
982 Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
983 The initialization of the temporary variable is put to the preheader
984 of the loop, and assignments to the reference from the temporary variable
985 are emitted to exits. */
987 static void
988 schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
989 struct mem_ref *mem_refs)
991 struct mem_ref *aref;
992 tree tmp_var;
993 unsigned i;
994 tree load, store;
995 struct fmt_data fmt_data;
997 tmp_var = make_rename_temp (TREE_TYPE (ref), "lsm_tmp");
999 fmt_data.loop = loop;
1000 fmt_data.orig_loop = loop;
1001 for_each_index (&ref, force_move_till, &fmt_data);
1003 rewrite_mem_refs (tmp_var, mem_refs);
1004 for (aref = mem_refs; aref; aref = aref->next)
1005 if (LIM_DATA (aref->stmt))
1006 LIM_DATA (aref->stmt)->sm_done = true;
1008 /* Emit the load & stores. */
1009 load = build (MODIFY_EXPR, void_type_node, tmp_var, ref);
1010 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1011 LIM_DATA (load)->max_loop = loop;
1012 LIM_DATA (load)->tgt_loop = loop;
1014 /* Put this into the latch, so that we are sure it will be processed after
1015 all dependencies. */
1016 bsi_insert_on_edge (loop_latch_edge (loop), load);
1018 for (i = 0; i < n_exits; i++)
1020 store = build (MODIFY_EXPR, void_type_node,
1021 unshare_expr (ref), tmp_var);
1022 bsi_insert_on_edge (exits[i], store);
1026 /* Determine whether all memory references inside LOOP corresponding to the
1027 virtual ssa name REG are equal to each other, and whether the address of
1028 this common reference can be hoisted outside of the loop. If this is true,
1029 prepare the statements that load the value of the memory reference to a
1030 temporary variable in the loop preheader, store it back on the loop exits,
1031 and replace all the references inside LOOP by this temporary variable.
1032 LOOP has N_EXITS stored in EXITS. */
1034 static void
1035 determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
1037 tree ref;
1038 struct mem_ref *mem_refs, *aref;
1039 struct loop *must_exec;
1041 if (is_gimple_reg (reg))
1042 return;
1044 ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs);
1045 if (!ref)
1046 return;
1048 if (!for_each_index (&ref, may_move_till, loop))
1050 free_mem_refs (mem_refs);
1051 return;
1054 if (tree_could_trap_p (ref))
1056 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1057 of the statements in that it occurs is always executed when the loop
1058 is entered. This way we know that by moving the load from the
1059 reference out of the loop we will not cause the error that would not
1060 occur otherwise.
1062 TODO -- in fact we would like to check for anticipability of the
1063 reference, i.e. that on each path from loop entry to loop exit at
1064 least one of the statements containing the memory reference is
1065 executed. */
1067 for (aref = mem_refs; aref; aref = aref->next)
1069 if (!LIM_DATA (aref->stmt))
1070 continue;
1072 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1073 if (!must_exec)
1074 continue;
1076 if (must_exec == loop
1077 || flow_loop_nested_p (must_exec, loop))
1078 break;
1081 if (!aref)
1083 free_mem_refs (mem_refs);
1084 return;
1088 schedule_sm (loop, exits, n_exits, ref, mem_refs);
1089 free_mem_refs (mem_refs);
1092 /* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
1093 for a store motion optimization (i.e. whether we can insert statement
1094 on its exits). */
1096 static bool
1097 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
1098 unsigned n_exits)
1100 unsigned i;
1102 for (i = 0; i < n_exits; i++)
1103 if (exits[i]->flags & EDGE_ABNORMAL)
1104 return false;
1106 return true;
1109 /* Try to perform store motion for all memory references modified inside
1110 LOOP. */
1112 static void
1113 determine_lsm_loop (struct loop *loop)
1115 tree phi;
1116 unsigned n_exits;
1117 edge *exits = get_loop_exit_edges (loop, &n_exits);
1119 if (!loop_suitable_for_sm (loop, exits, n_exits))
1121 free (exits);
1122 return;
1125 for (phi = phi_nodes (loop->header); phi; phi = TREE_CHAIN (phi))
1126 determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
1128 free (exits);
1131 /* Try to perform store motion for all memory references modified inside
1132 any of LOOPS. */
1134 static void
1135 determine_lsm (struct loops *loops)
1137 struct loop *loop;
1138 basic_block bb;
1140 /* Create a UID for each statement in the function. Ordering of the
1141 UIDs is not important for this pass. */
1142 max_uid = 0;
1143 FOR_EACH_BB (bb)
1145 block_stmt_iterator bsi;
1146 tree phi;
1148 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1149 stmt_ann (bsi_stmt (bsi))->uid = max_uid++;
1151 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
1152 stmt_ann (phi)->uid = max_uid++;
1155 compute_immediate_uses (TDFA_USE_VOPS, NULL);
1157 /* Pass the loops from the outermost. For each virtual operand loop phi node
1158 check whether all the references inside the loop correspond to a single
1159 address, and if so, move them. */
1161 loop = loops->tree_root->inner;
1162 while (1)
1164 determine_lsm_loop (loop);
1166 if (loop->inner)
1168 loop = loop->inner;
1169 continue;
1171 while (!loop->next)
1173 loop = loop->outer;
1174 if (loop == loops->tree_root)
1176 free_df ();
1177 loop_commit_inserts ();
1178 return;
1181 loop = loop->next;
1185 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1186 for each such basic block bb records the outermost loop for that execution
1187 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1188 blocks that contain a nonpure call. */
1190 static void
1191 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1193 basic_block bb = NULL, *bbs, last = NULL;
1194 unsigned i;
1195 edge e;
1196 struct loop *inn_loop = loop;
1198 if (!loop->header->aux)
1200 bbs = get_loop_body_in_dom_order (loop);
1202 for (i = 0; i < loop->num_nodes; i++)
1204 bb = bbs[i];
1206 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1207 last = bb;
1209 if (TEST_BIT (contains_call, bb->index))
1210 break;
1212 for (e = bb->succ; e; e = e->succ_next)
1213 if (!flow_bb_inside_loop_p (loop, e->dest))
1214 break;
1215 if (e)
1216 break;
1218 /* A loop might be infinite (TODO use simple loop analysis
1219 to disprove this if possible). */
1220 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1221 break;
1223 if (!flow_bb_inside_loop_p (inn_loop, bb))
1224 break;
1226 if (bb->loop_father->header == bb)
1228 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1229 break;
1231 /* In a loop that is always entered we may proceed anyway.
1232 But record that we entered it and stop once we leave it. */
1233 inn_loop = bb->loop_father;
1237 while (1)
1239 last->aux = loop;
1240 if (last == loop->header)
1241 break;
1242 last = get_immediate_dominator (CDI_DOMINATORS, last);
1245 free (bbs);
1248 for (loop = loop->inner; loop; loop = loop->next)
1249 fill_always_executed_in (loop, contains_call);
1252 /* Compute the global information needed by the loop invariant motion pass.
1253 LOOPS is the loop tree. */
1255 static void
1256 tree_ssa_lim_initialize (struct loops *loops)
1258 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1259 block_stmt_iterator bsi;
1260 struct loop *loop;
1261 basic_block bb;
1263 sbitmap_zero (contains_call);
1264 FOR_EACH_BB (bb)
1266 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1268 if (nonpure_call_p (bsi_stmt (bsi)))
1269 break;
1272 if (!bsi_end_p (bsi))
1273 SET_BIT (contains_call, bb->index);
1276 for (loop = loops->tree_root->inner; loop; loop = loop->next)
1277 fill_always_executed_in (loop, contains_call);
1279 sbitmap_free (contains_call);
1282 /* Cleans up after the invariant motion pass. */
1284 static void
1285 tree_ssa_lim_finalize (void)
1287 basic_block bb;
1289 FOR_EACH_BB (bb)
1291 bb->aux = NULL;
1295 /* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
1296 i.e. those that are likely to be win regardless of the register pressure. */
1298 void
1299 tree_ssa_lim (struct loops *loops)
1301 tree_ssa_lim_initialize (loops);
1303 /* For each statement determine the outermost loop in that it is
1304 invariant and cost for computing the invariant. */
1305 determine_invariantness ();
1307 /* For each memory reference determine whether it is possible to hoist it
1308 out of the loop. Force the necessary invariants to be moved out of the
1309 loops as well. */
1310 determine_lsm (loops);
1312 /* Move the expressions that are expensive enough. */
1313 move_computations ();
1315 tree_ssa_lim_finalize ();