* basic-block.h (ei_safe_edge): New function.
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blobb06e1d6a3584ca5bd0e3daddc332efcacd86e2c5
1 /* Loop invariant motion.
2 Copyright (C) 2003 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "cfgloop.h"
36 #include "domwalk.h"
37 #include "params.h"
38 #include "tree-pass.h"
39 #include "flags.h"
41 /* A type for the list of statements that have to be moved in order to be able
42 to hoist an invariant computation. */
44 struct depend
46 tree stmt;
47 struct depend *next;
50 /* The auxiliary data kept for each statement. */
52 struct lim_aux_data
54 struct loop *max_loop; /* The outermost loop in that the statement
55 is invariant. */
57 struct loop *tgt_loop; /* The loop out of that we want to move the
58 invariant. */
60 struct loop *always_executed_in;
61 /* The outermost loop for that we are sure
62 the statement is executed if the loop
63 is entered. */
65 bool sm_done; /* True iff the store motion for a memory
66 reference in the statement has already
67 been executed. */
69 unsigned cost; /* Cost of the computation performed by the
70 statement. */
72 struct depend *depends; /* List of statements that must be also hoisted
73 out of the loop when this statement is
74 hoisted; i.e. those that define the operands
75 of the statement and are inside of the
76 MAX_LOOP loop. */
79 #define LIM_DATA(STMT) ((struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
81 /* Description of a memory reference for store motion. */
83 struct mem_ref
85 tree *ref; /* The reference itself. */
86 tree stmt; /* The statement in that it occurs. */
87 struct mem_ref *next; /* Next use in the chain. */
90 /* Minimum cost of an expensive expression. */
91 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
93 /* The outermost loop for that execution of the header guarantees that the
94 block will be executed. */
95 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
97 /* Maximum uid in the statement in the function. */
99 static unsigned max_uid;
101 /* Calls CBCK for each index in memory reference ADDR_P. There are two
102 kinds situations handled; in each of these cases, the memory reference
103 and DATA are passed to the callback:
105 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
106 pass the pointer to the index to the callback.
108 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
109 pointer to addr to the callback.
111 If the callback returns false, the whole search stops and false is returned.
112 Otherwise the function returns true after traversing through the whole
113 reference *ADDR_P. */
115 bool
116 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
118 tree *nxt;
120 for (; ; addr_p = nxt)
122 switch (TREE_CODE (*addr_p))
124 case SSA_NAME:
125 return cbck (*addr_p, addr_p, data);
127 case INDIRECT_REF:
128 nxt = &TREE_OPERAND (*addr_p, 0);
129 return cbck (*addr_p, nxt, data);
131 case BIT_FIELD_REF:
132 case COMPONENT_REF:
133 case VIEW_CONVERT_EXPR:
134 case ARRAY_RANGE_REF:
135 case REALPART_EXPR:
136 case IMAGPART_EXPR:
137 nxt = &TREE_OPERAND (*addr_p, 0);
138 break;
140 case ARRAY_REF:
141 nxt = &TREE_OPERAND (*addr_p, 0);
142 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
143 return false;
144 break;
146 case VAR_DECL:
147 case PARM_DECL:
148 case STRING_CST:
149 case RESULT_DECL:
150 return true;
152 default:
153 gcc_unreachable ();
158 /* If it is possible to hoist the statement STMT unconditionally,
159 returns MOVE_POSSIBLE.
160 If it is possible to hoist the statement STMT, but we must avoid making
161 it executed if it would not be executed in the original program (e.g.
162 because it may trap), return MOVE_PRESERVE_EXECUTION.
163 Otherwise return MOVE_IMPOSSIBLE. */
165 enum move_pos
166 movement_possibility (tree stmt)
168 tree lhs, rhs;
170 if (flag_unswitch_loops
171 && TREE_CODE (stmt) == COND_EXPR)
173 /* If we perform unswitching, force the operands of the invariant
174 condition to be moved out of the loop. */
175 get_stmt_operands (stmt);
177 return MOVE_POSSIBLE;
180 if (TREE_CODE (stmt) != MODIFY_EXPR)
181 return MOVE_IMPOSSIBLE;
183 if (stmt_ends_bb_p (stmt))
184 return MOVE_IMPOSSIBLE;
186 get_stmt_operands (stmt);
188 if (stmt_ann (stmt)->has_volatile_ops)
189 return MOVE_IMPOSSIBLE;
191 lhs = TREE_OPERAND (stmt, 0);
192 if (TREE_CODE (lhs) == SSA_NAME
193 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
194 return MOVE_IMPOSSIBLE;
196 rhs = TREE_OPERAND (stmt, 1);
198 if (TREE_SIDE_EFFECTS (rhs))
199 return MOVE_IMPOSSIBLE;
201 if (TREE_CODE (lhs) != SSA_NAME
202 || tree_could_trap_p (rhs))
203 return MOVE_PRESERVE_EXECUTION;
205 return MOVE_POSSIBLE;
208 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
209 loop to that we could move the expression using DEF if it did not have
210 other operands, i.e. the outermost loop enclosing LOOP in that the value
211 of DEF is invariant. */
213 static struct loop *
214 outermost_invariant_loop (tree def, struct loop *loop)
216 tree def_stmt;
217 basic_block def_bb;
218 struct loop *max_loop;
220 if (TREE_CODE (def) != SSA_NAME)
221 return superloop_at_depth (loop, 1);
223 def_stmt = SSA_NAME_DEF_STMT (def);
224 def_bb = bb_for_stmt (def_stmt);
225 if (!def_bb)
226 return superloop_at_depth (loop, 1);
228 max_loop = find_common_loop (loop, def_bb->loop_father);
230 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
231 max_loop = find_common_loop (max_loop,
232 LIM_DATA (def_stmt)->max_loop->outer);
233 if (max_loop == loop)
234 return NULL;
235 max_loop = superloop_at_depth (loop, max_loop->depth + 1);
237 return max_loop;
240 /* Returns the outermost superloop of LOOP in that the expression EXPR is
241 invariant. */
243 static struct loop *
244 outermost_invariant_loop_expr (tree expr, struct loop *loop)
246 char class = TREE_CODE_CLASS (TREE_CODE (expr));
247 unsigned i, nops;
248 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
250 if (TREE_CODE (expr) == SSA_NAME
251 || TREE_CODE (expr) == INTEGER_CST
252 || is_gimple_min_invariant (expr))
253 return outermost_invariant_loop (expr, loop);
255 if (class != '1'
256 && class != '2'
257 && class != 'e'
258 && class != '<')
259 return NULL;
261 nops = first_rtl_op (TREE_CODE (expr));
262 for (i = 0; i < nops; i++)
264 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
265 if (!aloop)
266 return NULL;
268 if (flow_loop_nested_p (max_loop, aloop))
269 max_loop = aloop;
272 return max_loop;
275 /* DATA is a structure containing information associated with a statement
276 inside LOOP. DEF is one of the operands of this statement.
278 Find the outermost loop enclosing LOOP in that value of DEF is invariant
279 and record this in DATA->max_loop field. If DEF itself is defined inside
280 this loop as well (i.e. we need to hoist it out of the loop if we want
281 to hoist the statement represented by DATA), record the statement in that
282 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
283 add the cost of the computation of DEF to the DATA->cost.
285 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
287 static bool
288 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
289 bool add_cost)
291 tree def_stmt = SSA_NAME_DEF_STMT (def);
292 basic_block def_bb = bb_for_stmt (def_stmt);
293 struct loop *max_loop;
294 struct depend *dep;
296 if (!def_bb)
297 return true;
299 max_loop = outermost_invariant_loop (def, loop);
300 if (!max_loop)
301 return false;
303 if (flow_loop_nested_p (data->max_loop, max_loop))
304 data->max_loop = max_loop;
306 if (!LIM_DATA (def_stmt))
307 return true;
309 if (add_cost
310 /* Only add the cost if the statement defining DEF is inside LOOP,
311 i.e. if it is likely that by moving the invariants dependent
312 on it, we will be able to avoid creating a new register for
313 it (since it will be only used in these dependent invariants). */
314 && def_bb->loop_father == loop)
315 data->cost += LIM_DATA (def_stmt)->cost;
317 dep = xmalloc (sizeof (struct depend));
318 dep->stmt = def_stmt;
319 dep->next = data->depends;
320 data->depends = dep;
322 return true;
325 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
326 are just ad-hoc constants. The estimates should be based on target-specific
327 values. */
329 static unsigned
330 stmt_cost (tree stmt)
332 tree lhs, rhs;
333 unsigned cost = 1;
335 /* Always try to create possibilities for unswitching. */
336 if (TREE_CODE (stmt) == COND_EXPR)
337 return LIM_EXPENSIVE;
339 lhs = TREE_OPERAND (stmt, 0);
340 rhs = TREE_OPERAND (stmt, 1);
342 /* Hoisting memory references out should almost surely be a win. */
343 if (!is_gimple_variable (lhs))
344 cost += 20;
345 if (is_gimple_addressable (rhs) && !is_gimple_variable (rhs))
346 cost += 20;
348 switch (TREE_CODE (rhs))
350 case CALL_EXPR:
351 /* We should be hoisting calls if possible. */
353 /* Unless the call is a builtin_constant_p; this always folds to a
354 constant, so moving it is useless. */
355 rhs = get_callee_fndecl (rhs);
356 if (DECL_BUILT_IN (rhs)
357 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
358 return 0;
360 cost += 20;
361 break;
363 case MULT_EXPR:
364 case TRUNC_DIV_EXPR:
365 case CEIL_DIV_EXPR:
366 case FLOOR_DIV_EXPR:
367 case ROUND_DIV_EXPR:
368 case EXACT_DIV_EXPR:
369 case CEIL_MOD_EXPR:
370 case FLOOR_MOD_EXPR:
371 case ROUND_MOD_EXPR:
372 case TRUNC_MOD_EXPR:
373 /* Division and multiplication are usually expensive. */
374 cost += 20;
375 break;
377 default:
378 break;
381 return cost;
384 /* Determine the outermost loop to that it is possible to hoist a statement
385 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
386 the outermost loop in that the value computed by STMT is invariant.
387 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
388 we preserve the fact whether STMT is executed. It also fills other related
389 information to LIM_DATA (STMT).
391 The function returns false if STMT cannot be hoisted outside of the loop it
392 is defined in, and true otherwise. */
394 static bool
395 determine_max_movement (tree stmt, bool must_preserve_exec)
397 basic_block bb = bb_for_stmt (stmt);
398 struct loop *loop = bb->loop_father;
399 struct loop *level;
400 struct lim_aux_data *lim_data = LIM_DATA (stmt);
401 tree val;
402 ssa_op_iter iter;
404 if (must_preserve_exec)
405 level = ALWAYS_EXECUTED_IN (bb);
406 else
407 level = superloop_at_depth (loop, 1);
408 lim_data->max_loop = level;
410 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
411 if (!add_dependency (val, lim_data, loop, true))
412 return false;
414 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
415 if (!add_dependency (val, lim_data, loop, false))
416 return false;
418 lim_data->cost += stmt_cost (stmt);
420 return true;
423 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
424 and that one of the operands of this statement is computed by STMT.
425 Ensure that STMT (together with all the statements that define its
426 operands) is hoisted at least out of the loop LEVEL. */
428 static void
429 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
431 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
432 struct depend *dep;
434 stmt_loop = find_common_loop (orig_loop, stmt_loop);
435 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
436 stmt_loop = find_common_loop (stmt_loop,
437 LIM_DATA (stmt)->tgt_loop->outer);
438 if (flow_loop_nested_p (stmt_loop, level))
439 return;
441 gcc_assert (LIM_DATA (stmt));
442 gcc_assert (level == LIM_DATA (stmt)->max_loop
443 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
445 LIM_DATA (stmt)->tgt_loop = level;
446 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
447 set_level (dep->stmt, orig_loop, level);
450 /* Determines an outermost loop from that we want to hoist the statement STMT.
451 For now we chose the outermost possible loop. TODO -- use profiling
452 information to set it more sanely. */
454 static void
455 set_profitable_level (tree stmt)
457 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
460 /* Returns true if STMT is not a pure call. */
462 static bool
463 nonpure_call_p (tree stmt)
465 tree call = get_call_expr_in (stmt);
467 if (!call)
468 return false;
470 return TREE_SIDE_EFFECTS (call) != 0;
473 /* Releases the memory occupied by DATA. */
475 static void
476 free_lim_aux_data (struct lim_aux_data *data)
478 struct depend *dep, *next;
480 for (dep = data->depends; dep; dep = next)
482 next = dep->next;
483 free (dep);
485 free (data);
488 /* Determine the outermost loops in that statements in basic block BB are
489 invariant, and record them to the LIM_DATA associated with the statements.
490 Callback for walk_dominator_tree. */
492 static void
493 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
494 basic_block bb)
496 enum move_pos pos;
497 block_stmt_iterator bsi;
498 tree stmt;
499 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
500 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
502 if (!bb->loop_father->outer)
503 return;
505 if (dump_file && (dump_flags & TDF_DETAILS))
506 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
507 bb->index, bb->loop_father->num, bb->loop_father->depth);
509 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
511 stmt = bsi_stmt (bsi);
513 pos = movement_possibility (stmt);
514 if (pos == MOVE_IMPOSSIBLE)
516 if (nonpure_call_p (stmt))
518 maybe_never = true;
519 outermost = NULL;
521 continue;
524 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
525 LIM_DATA (stmt)->always_executed_in = outermost;
527 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
528 continue;
530 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
532 LIM_DATA (stmt)->max_loop = NULL;
533 continue;
536 if (dump_file && (dump_flags & TDF_DETAILS))
538 print_generic_stmt_indented (dump_file, stmt, 0, 2);
539 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
540 LIM_DATA (stmt)->max_loop->depth,
541 LIM_DATA (stmt)->cost);
544 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
545 set_profitable_level (stmt);
549 /* For each statement determines the outermost loop in that it is invariant,
550 statements on whose motion it depends and the cost of the computation.
551 This information is stored to the LIM_DATA structure associated with
552 each statement. */
554 static void
555 determine_invariantness (void)
557 struct dom_walk_data walk_data;
559 memset (&walk_data, 0, sizeof (struct dom_walk_data));
560 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
562 init_walk_dominator_tree (&walk_data);
563 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
564 fini_walk_dominator_tree (&walk_data);
567 /* Commits edge insertions and updates loop structures. */
569 void
570 loop_commit_inserts (void)
572 unsigned old_last_basic_block, i;
573 basic_block bb;
575 old_last_basic_block = last_basic_block;
576 bsi_commit_edge_inserts (NULL);
577 for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
579 bb = BASIC_BLOCK (i);
580 add_bb_to_loop (bb,
581 find_common_loop (EDGE_SUCC (bb, 0)->dest->loop_father,
582 EDGE_PRED (bb, 0)->src->loop_father));
586 /* Hoist the statements in basic block BB out of the loops prescribed by
587 data stored in LIM_DATA structures associated with each statement. Callback
588 for walk_dominator_tree. */
590 static void
591 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
592 basic_block bb)
594 struct loop *level;
595 block_stmt_iterator bsi;
596 tree stmt;
597 unsigned cost = 0;
599 if (!bb->loop_father->outer)
600 return;
602 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
604 stmt = bsi_stmt (bsi);
606 if (!LIM_DATA (stmt))
608 bsi_next (&bsi);
609 continue;
612 cost = LIM_DATA (stmt)->cost;
613 level = LIM_DATA (stmt)->tgt_loop;
614 free_lim_aux_data (LIM_DATA (stmt));
615 stmt_ann (stmt)->common.aux = NULL;
617 if (!level)
619 bsi_next (&bsi);
620 continue;
623 /* We do not really want to move conditionals out of the loop; we just
624 placed it here to force its operands to be moved if necessary. */
625 if (TREE_CODE (stmt) == COND_EXPR)
626 continue;
628 if (dump_file && (dump_flags & TDF_DETAILS))
630 fprintf (dump_file, "Moving statement\n");
631 print_generic_stmt (dump_file, stmt, 0);
632 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
633 cost, level->num);
635 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
636 bsi_remove (&bsi);
640 /* Hoist the statements out of the loops prescribed by data stored in
641 LIM_DATA structures associated with each statement.*/
643 static void
644 move_computations (void)
646 struct dom_walk_data walk_data;
648 memset (&walk_data, 0, sizeof (struct dom_walk_data));
649 walk_data.before_dom_children_before_stmts = move_computations_stmt;
651 init_walk_dominator_tree (&walk_data);
652 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
653 fini_walk_dominator_tree (&walk_data);
655 loop_commit_inserts ();
656 rewrite_into_ssa (false);
657 if (bitmap_first_set_bit (vars_to_rename) >= 0)
659 /* The rewrite of ssa names may cause violation of loop closed ssa
660 form invariants. TODO -- avoid these rewrites completely.
661 Information in virtual phi nodes is sufficient for it. */
662 rewrite_into_loop_closed_ssa ();
664 bitmap_clear (vars_to_rename);
667 /* Checks whether the statement defining variable *INDEX can be hoisted
668 out of the loop passed in DATA. Callback for for_each_index. */
670 static bool
671 may_move_till (tree ref, tree *index, void *data)
673 struct loop *loop = data, *max_loop;
675 /* If REF is an array reference, check also that the step and the lower
676 bound is invariant in LOOP. */
677 if (TREE_CODE (ref) == ARRAY_REF)
679 tree step = array_ref_element_size (ref);
680 tree lbound = array_ref_low_bound (ref);
682 max_loop = outermost_invariant_loop_expr (step, loop);
683 if (!max_loop)
684 return false;
686 max_loop = outermost_invariant_loop_expr (lbound, loop);
687 if (!max_loop)
688 return false;
691 max_loop = outermost_invariant_loop (*index, loop);
692 if (!max_loop)
693 return false;
695 return true;
698 /* Forces statements defining (invariant) SSA names in expression EXPR to be
699 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
701 static void
702 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
704 char class = TREE_CODE_CLASS (TREE_CODE (expr));
705 unsigned i, nops;
707 if (TREE_CODE (expr) == SSA_NAME)
709 tree stmt = SSA_NAME_DEF_STMT (expr);
710 if (IS_EMPTY_STMT (stmt))
711 return;
713 set_level (stmt, orig_loop, loop);
714 return;
717 if (class != '1'
718 && class != '2'
719 && class != 'e'
720 && class != '<')
721 return;
723 nops = first_rtl_op (TREE_CODE (expr));
724 for (i = 0; i < nops; i++)
725 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
728 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
729 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
730 for_each_index. */
732 struct fmt_data
734 struct loop *loop;
735 struct loop *orig_loop;
738 static bool
739 force_move_till (tree ref, tree *index, void *data)
741 tree stmt;
742 struct fmt_data *fmt_data = data;
744 if (TREE_CODE (ref) == ARRAY_REF)
746 tree step = array_ref_element_size (ref);
747 tree lbound = array_ref_low_bound (ref);
749 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
750 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
753 if (TREE_CODE (*index) != SSA_NAME)
754 return true;
756 stmt = SSA_NAME_DEF_STMT (*index);
757 if (IS_EMPTY_STMT (stmt))
758 return true;
760 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
762 return true;
765 /* Records memory reference *REF (that occurs in statement STMT)
766 to the list MEM_REFS. */
768 static void
769 record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
771 struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
773 aref->stmt = stmt;
774 aref->ref = ref;
776 aref->next = *mem_refs;
777 *mem_refs = aref;
780 /* Releases list of memory references MEM_REFS. */
782 static void
783 free_mem_refs (struct mem_ref *mem_refs)
785 struct mem_ref *act;
787 while (mem_refs)
789 act = mem_refs;
790 mem_refs = mem_refs->next;
791 free (act);
795 /* If VAR is defined in LOOP and the statement it is defined in does not belong
796 to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
797 to the set SEEN. */
799 static void
800 maybe_queue_var (tree var, struct loop *loop,
801 sbitmap seen, tree *queue, unsigned *in_queue)
803 tree stmt = SSA_NAME_DEF_STMT (var);
804 basic_block def_bb = bb_for_stmt (stmt);
806 if (!def_bb
807 || !flow_bb_inside_loop_p (loop, def_bb)
808 || TEST_BIT (seen, stmt_ann (stmt)->uid))
809 return;
811 SET_BIT (seen, stmt_ann (stmt)->uid);
812 queue[(*in_queue)++] = stmt;
815 /* If COMMON_REF is NULL, set COMMON_REF to *OP and return true.
816 Otherwise return true if the memory reference *OP is equal to COMMON_REF.
817 Record the reference OP to list MEM_REFS. STMT is the statement in that
818 the reference occurs. */
820 struct sra_data
822 struct mem_ref **mem_refs;
823 tree common_ref;
824 tree stmt;
827 static bool
828 fem_single_reachable_address (tree *op, void *data)
830 struct sra_data *sra_data = data;
832 if (sra_data->common_ref
833 && !operand_equal_p (*op, sra_data->common_ref, 0))
834 return false;
835 sra_data->common_ref = *op;
837 record_mem_ref (sra_data->mem_refs, sra_data->stmt, op);
838 return true;
841 /* Runs CALLBACK for each operand of STMT that is a memory reference. DATA
842 is passed to the CALLBACK as well. The traversal stops if CALLBACK
843 returns false, for_each_memref then returns false as well. Otherwise
844 for_each_memref returns true. */
846 static bool
847 for_each_memref (tree stmt, bool (*callback)(tree *, void *), void *data)
849 tree *op;
851 if (TREE_CODE (stmt) == RETURN_EXPR)
852 stmt = TREE_OPERAND (stmt, 1);
854 if (TREE_CODE (stmt) == MODIFY_EXPR)
856 op = &TREE_OPERAND (stmt, 0);
857 if (TREE_CODE (*op) != SSA_NAME
858 && !callback (op, data))
859 return false;
861 op = &TREE_OPERAND (stmt, 1);
862 if (TREE_CODE (*op) != SSA_NAME
863 && is_gimple_lvalue (*op)
864 && !callback (op, data))
865 return false;
867 stmt = TREE_OPERAND (stmt, 1);
870 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
871 stmt = TREE_OPERAND (stmt, 0);
873 if (TREE_CODE (stmt) == CALL_EXPR)
875 tree args;
877 for (args = TREE_OPERAND (stmt, 1); args; args = TREE_CHAIN (args))
879 op = &TREE_VALUE (args);
881 if (TREE_CODE (*op) != SSA_NAME
882 && is_gimple_lvalue (*op)
883 && !callback (op, data))
884 return false;
888 return true;
891 /* Determine whether all memory references inside the LOOP that correspond
892 to virtual ssa names defined in statement STMT are equal.
893 If so, store the list of the references to MEM_REFS, and return one
894 of them. Otherwise store NULL to MEM_REFS and return NULL_TREE.
895 *SEEN_CALL_STMT is set to true if the virtual operands suggest
896 that the reference might be clobbered by a call inside the LOOP. */
898 static tree
899 single_reachable_address (struct loop *loop, tree stmt,
900 struct mem_ref **mem_refs,
901 bool *seen_call_stmt)
903 tree *queue = xmalloc (sizeof (tree) * max_uid);
904 sbitmap seen = sbitmap_alloc (max_uid);
905 unsigned in_queue = 1;
906 dataflow_t df;
907 unsigned i, n;
908 struct sra_data sra_data;
909 tree call;
910 tree val;
911 ssa_op_iter iter;
913 sbitmap_zero (seen);
915 *mem_refs = NULL;
916 sra_data.mem_refs = mem_refs;
917 sra_data.common_ref = NULL_TREE;
919 queue[0] = stmt;
920 SET_BIT (seen, stmt_ann (stmt)->uid);
921 *seen_call_stmt = false;
923 while (in_queue)
925 stmt = queue[--in_queue];
926 sra_data.stmt = stmt;
928 if (LIM_DATA (stmt)
929 && LIM_DATA (stmt)->sm_done)
930 goto fail;
932 switch (TREE_CODE (stmt))
934 case MODIFY_EXPR:
935 case CALL_EXPR:
936 case RETURN_EXPR:
937 if (!for_each_memref (stmt, fem_single_reachable_address,
938 &sra_data))
939 goto fail;
941 /* If this is a function that may depend on the memory location,
942 record the fact. We cannot directly refuse call clobbered
943 operands here, since sra_data.common_ref did not have
944 to be set yet. */
945 call = get_call_expr_in (stmt);
946 if (call
947 && !(call_expr_flags (call) & ECF_CONST))
948 *seen_call_stmt = true;
950 /* Traverse also definitions of the VUSES (there may be other
951 distinct from the one we used to get to this statement). */
952 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
953 maybe_queue_var (val, loop, seen, queue, &in_queue);
955 break;
957 case PHI_NODE:
958 for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
959 maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
960 seen, queue, &in_queue);
961 break;
963 default:
964 goto fail;
967 /* Find uses of virtual names. */
968 df = get_immediate_uses (stmt);
969 n = num_immediate_uses (df);
971 for (i = 0; i < n; i++)
973 stmt = immediate_use (df, i);
975 if (!flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
976 continue;
978 if (TEST_BIT (seen, stmt_ann (stmt)->uid))
979 continue;
980 SET_BIT (seen, stmt_ann (stmt)->uid);
982 queue[in_queue++] = stmt;
986 free (queue);
987 sbitmap_free (seen);
989 return sra_data.common_ref;
991 fail:
992 free_mem_refs (*mem_refs);
993 *mem_refs = NULL;
994 free (queue);
995 sbitmap_free (seen);
997 return NULL;
1000 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
1002 static void
1003 rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
1005 tree var;
1006 ssa_op_iter iter;
1008 for (; mem_refs; mem_refs = mem_refs->next)
1010 FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter,
1011 (SSA_OP_VIRTUAL_DEFS | SSA_OP_VUSE))
1013 var = SSA_NAME_VAR (var);
1014 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1017 *mem_refs->ref = tmp_var;
1018 modify_stmt (mem_refs->stmt);
1022 /* Records request for store motion of memory reference REF from LOOP.
1023 MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1024 these references are rewritten by a new temporary variable.
1025 Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
1026 The initialization of the temporary variable is put to the preheader
1027 of the loop, and assignments to the reference from the temporary variable
1028 are emitted to exits. */
1030 static void
1031 schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
1032 struct mem_ref *mem_refs)
1034 struct mem_ref *aref;
1035 tree tmp_var;
1036 unsigned i;
1037 tree load, store;
1038 struct fmt_data fmt_data;
1040 if (dump_file && (dump_flags & TDF_DETAILS))
1042 fprintf (dump_file, "Executing store motion of ");
1043 print_generic_expr (dump_file, ref, 0);
1044 fprintf (dump_file, " from loop %d\n", loop->num);
1047 tmp_var = make_rename_temp (TREE_TYPE (ref), "lsm_tmp");
1049 fmt_data.loop = loop;
1050 fmt_data.orig_loop = loop;
1051 for_each_index (&ref, force_move_till, &fmt_data);
1053 rewrite_mem_refs (tmp_var, mem_refs);
1054 for (aref = mem_refs; aref; aref = aref->next)
1055 if (LIM_DATA (aref->stmt))
1056 LIM_DATA (aref->stmt)->sm_done = true;
1058 /* Emit the load & stores. */
1059 load = build (MODIFY_EXPR, void_type_node, tmp_var, ref);
1060 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1061 LIM_DATA (load)->max_loop = loop;
1062 LIM_DATA (load)->tgt_loop = loop;
1064 /* Put this into the latch, so that we are sure it will be processed after
1065 all dependencies. */
1066 bsi_insert_on_edge (loop_latch_edge (loop), load);
1068 for (i = 0; i < n_exits; i++)
1070 store = build (MODIFY_EXPR, void_type_node,
1071 unshare_expr (ref), tmp_var);
1072 bsi_insert_on_edge (exits[i], store);
1076 /* Returns true if REF may be clobbered by calls. */
1078 static bool
1079 is_call_clobbered_ref (tree ref)
1081 tree base;
1083 base = get_base_address (ref);
1084 if (!base)
1085 return true;
1087 if (DECL_P (base))
1088 return is_call_clobbered (base);
1090 if (TREE_CODE (base) == INDIRECT_REF)
1092 /* Check whether the alias tags associated with the pointer
1093 are call clobbered. */
1094 tree ptr = TREE_OPERAND (base, 0);
1095 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1096 tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
1097 tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
1099 if ((nmt && is_call_clobbered (nmt))
1100 || (tmt && is_call_clobbered (tmt)))
1101 return true;
1103 return false;
1106 gcc_unreachable ();
1109 /* Determine whether all memory references inside LOOP corresponding to the
1110 virtual ssa name REG are equal to each other, and whether the address of
1111 this common reference can be hoisted outside of the loop. If this is true,
1112 prepare the statements that load the value of the memory reference to a
1113 temporary variable in the loop preheader, store it back on the loop exits,
1114 and replace all the references inside LOOP by this temporary variable.
1115 LOOP has N_EXITS stored in EXITS. */
1117 static void
1118 determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
1120 tree ref;
1121 struct mem_ref *mem_refs, *aref;
1122 struct loop *must_exec;
1123 bool sees_call;
1125 if (is_gimple_reg (reg))
1126 return;
1128 ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs,
1129 &sees_call);
1130 if (!ref)
1131 return;
1133 /* If we cannot create a ssa name for the result, give up. */
1134 if (!is_gimple_reg_type (TREE_TYPE (ref))
1135 || TREE_THIS_VOLATILE (ref))
1136 goto fail;
1138 /* If there is a call that may use the location, give up as well. */
1139 if (sees_call
1140 && is_call_clobbered_ref (ref))
1141 goto fail;
1143 if (!for_each_index (&ref, may_move_till, loop))
1144 goto fail;
1146 if (tree_could_trap_p (ref))
1148 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1149 of the statements in that it occurs is always executed when the loop
1150 is entered. This way we know that by moving the load from the
1151 reference out of the loop we will not cause the error that would not
1152 occur otherwise.
1154 TODO -- in fact we would like to check for anticipability of the
1155 reference, i.e. that on each path from loop entry to loop exit at
1156 least one of the statements containing the memory reference is
1157 executed. */
1159 for (aref = mem_refs; aref; aref = aref->next)
1161 if (!LIM_DATA (aref->stmt))
1162 continue;
1164 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1165 if (!must_exec)
1166 continue;
1168 if (must_exec == loop
1169 || flow_loop_nested_p (must_exec, loop))
1170 break;
1173 if (!aref)
1174 goto fail;
1177 schedule_sm (loop, exits, n_exits, ref, mem_refs);
1179 fail: ;
1180 free_mem_refs (mem_refs);
1183 /* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
1184 for a store motion optimization (i.e. whether we can insert statement
1185 on its exits). */
1187 static bool
1188 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
1189 unsigned n_exits)
1191 unsigned i;
1193 for (i = 0; i < n_exits; i++)
1194 if (exits[i]->flags & EDGE_ABNORMAL)
1195 return false;
1197 return true;
1200 /* Try to perform store motion for all memory references modified inside
1201 LOOP. */
1203 static void
1204 determine_lsm_loop (struct loop *loop)
1206 tree phi;
1207 unsigned n_exits;
1208 edge *exits = get_loop_exit_edges (loop, &n_exits);
1210 if (!loop_suitable_for_sm (loop, exits, n_exits))
1212 free (exits);
1213 return;
1216 for (phi = phi_nodes (loop->header); phi; phi = TREE_CHAIN (phi))
1217 determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
1219 free (exits);
1222 /* Try to perform store motion for all memory references modified inside
1223 any of LOOPS. */
1225 static void
1226 determine_lsm (struct loops *loops)
1228 struct loop *loop;
1229 basic_block bb;
1231 /* Create a UID for each statement in the function. Ordering of the
1232 UIDs is not important for this pass. */
1233 max_uid = 0;
1234 FOR_EACH_BB (bb)
1236 block_stmt_iterator bsi;
1237 tree phi;
1239 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1240 stmt_ann (bsi_stmt (bsi))->uid = max_uid++;
1242 for (phi = phi_nodes (bb); phi; phi = TREE_CHAIN (phi))
1243 stmt_ann (phi)->uid = max_uid++;
1246 compute_immediate_uses (TDFA_USE_VOPS, NULL);
1248 /* Pass the loops from the outermost. For each virtual operand loop phi node
1249 check whether all the references inside the loop correspond to a single
1250 address, and if so, move them. */
1252 loop = loops->tree_root->inner;
1253 while (1)
1255 determine_lsm_loop (loop);
1257 if (loop->inner)
1259 loop = loop->inner;
1260 continue;
1262 while (!loop->next)
1264 loop = loop->outer;
1265 if (loop == loops->tree_root)
1267 free_df ();
1268 loop_commit_inserts ();
1269 return;
1272 loop = loop->next;
1276 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1277 for each such basic block bb records the outermost loop for that execution
1278 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1279 blocks that contain a nonpure call. */
1281 static void
1282 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1284 basic_block bb = NULL, *bbs, last = NULL;
1285 unsigned i;
1286 edge e;
1287 struct loop *inn_loop = loop;
1289 if (!loop->header->aux)
1291 bbs = get_loop_body_in_dom_order (loop);
1293 for (i = 0; i < loop->num_nodes; i++)
1295 edge_iterator ei;
1296 bb = bbs[i];
1298 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1299 last = bb;
1301 if (TEST_BIT (contains_call, bb->index))
1302 break;
1304 FOR_EACH_EDGE (e, ei, bb->succs)
1306 if (!flow_bb_inside_loop_p (loop, e->dest))
1307 break;
1310 if (e)
1311 break;
1313 /* A loop might be infinite (TODO use simple loop analysis
1314 to disprove this if possible). */
1315 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1316 break;
1318 if (!flow_bb_inside_loop_p (inn_loop, bb))
1319 break;
1321 if (bb->loop_father->header == bb)
1323 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1324 break;
1326 /* In a loop that is always entered we may proceed anyway.
1327 But record that we entered it and stop once we leave it. */
1328 inn_loop = bb->loop_father;
1332 while (1)
1334 last->aux = loop;
1335 if (last == loop->header)
1336 break;
1337 last = get_immediate_dominator (CDI_DOMINATORS, last);
1340 free (bbs);
1343 for (loop = loop->inner; loop; loop = loop->next)
1344 fill_always_executed_in (loop, contains_call);
1347 /* Compute the global information needed by the loop invariant motion pass.
1348 LOOPS is the loop tree. */
1350 static void
1351 tree_ssa_lim_initialize (struct loops *loops)
1353 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1354 block_stmt_iterator bsi;
1355 struct loop *loop;
1356 basic_block bb;
1358 sbitmap_zero (contains_call);
1359 FOR_EACH_BB (bb)
1361 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1363 if (nonpure_call_p (bsi_stmt (bsi)))
1364 break;
1367 if (!bsi_end_p (bsi))
1368 SET_BIT (contains_call, bb->index);
1371 for (loop = loops->tree_root->inner; loop; loop = loop->next)
1372 fill_always_executed_in (loop, contains_call);
1374 sbitmap_free (contains_call);
1377 /* Cleans up after the invariant motion pass. */
1379 static void
1380 tree_ssa_lim_finalize (void)
1382 basic_block bb;
1384 FOR_EACH_BB (bb)
1386 bb->aux = NULL;
1390 /* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
1391 i.e. those that are likely to be win regardless of the register pressure. */
1393 void
1394 tree_ssa_lim (struct loops *loops)
1396 tree_ssa_lim_initialize (loops);
1398 /* For each statement determine the outermost loop in that it is
1399 invariant and cost for computing the invariant. */
1400 determine_invariantness ();
1402 /* For each memory reference determine whether it is possible to hoist it
1403 out of the loop. Force the necessary invariants to be moved out of the
1404 loops as well. */
1405 determine_lsm (loops);
1407 /* Move the expressions that are expensive enough. */
1408 move_computations ();
1410 tree_ssa_lim_finalize ();