linux-unwind.h: Add exception clause to copyright.
[official-gcc.git] / gcc / tree-ssa-loop-im.c
blob65a2a5fd48ad39e74d59a4c8acf460af2094c4ae
1 /* Loop invariant motion.
2 Copyright (C) 2003, 2004, 2005 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 2, or (at your option) any
9 later version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING. If not, write to the Free
18 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
19 02111-1307, USA. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "tree.h"
26 #include "rtl.h"
27 #include "tm_p.h"
28 #include "hard-reg-set.h"
29 #include "basic-block.h"
30 #include "output.h"
31 #include "diagnostic.h"
32 #include "tree-flow.h"
33 #include "tree-dump.h"
34 #include "timevar.h"
35 #include "cfgloop.h"
36 #include "domwalk.h"
37 #include "params.h"
38 #include "tree-pass.h"
39 #include "flags.h"
41 /* A type for the list of statements that have to be moved in order to be able
42 to hoist an invariant computation. */
44 struct depend
46 tree stmt;
47 struct depend *next;
50 /* The auxiliary data kept for each statement. */
52 struct lim_aux_data
54 struct loop *max_loop; /* The outermost loop in that the statement
55 is invariant. */
57 struct loop *tgt_loop; /* The loop out of that we want to move the
58 invariant. */
60 struct loop *always_executed_in;
61 /* The outermost loop for that we are sure
62 the statement is executed if the loop
63 is entered. */
65 bool sm_done; /* True iff the store motion for a memory
66 reference in the statement has already
67 been executed. */
69 unsigned cost; /* Cost of the computation performed by the
70 statement. */
72 struct depend *depends; /* List of statements that must be also hoisted
73 out of the loop when this statement is
74 hoisted; i.e. those that define the operands
75 of the statement and are inside of the
76 MAX_LOOP loop. */
79 #define LIM_DATA(STMT) (TREE_CODE (STMT) == PHI_NODE \
80 ? NULL \
81 : (struct lim_aux_data *) (stmt_ann (STMT)->common.aux))
83 /* Description of a memory reference for store motion. */
85 struct mem_ref
87 tree *ref; /* The reference itself. */
88 tree stmt; /* The statement in that it occurs. */
89 struct mem_ref *next; /* Next use in the chain. */
92 /* Minimum cost of an expensive expression. */
93 #define LIM_EXPENSIVE ((unsigned) PARAM_VALUE (PARAM_LIM_EXPENSIVE))
95 /* The outermost loop for that execution of the header guarantees that the
96 block will be executed. */
97 #define ALWAYS_EXECUTED_IN(BB) ((struct loop *) (BB)->aux)
99 static unsigned max_stmt_uid; /* Maximal uid of a statement. Uids to phi
100 nodes are assigned using the versions of
101 ssa names they define. */
103 /* Returns uid of statement STMT. */
105 static unsigned
106 get_stmt_uid (tree stmt)
108 if (TREE_CODE (stmt) == PHI_NODE)
109 return SSA_NAME_VERSION (PHI_RESULT (stmt)) + max_stmt_uid;
111 return stmt_ann (stmt)->uid;
114 /* Calls CBCK for each index in memory reference ADDR_P. There are two
115 kinds situations handled; in each of these cases, the memory reference
116 and DATA are passed to the callback:
118 Access to an array: ARRAY_{RANGE_}REF (base, index). In this case we also
119 pass the pointer to the index to the callback.
121 Pointer dereference: INDIRECT_REF (addr). In this case we also pass the
122 pointer to addr to the callback.
124 If the callback returns false, the whole search stops and false is returned.
125 Otherwise the function returns true after traversing through the whole
126 reference *ADDR_P. */
128 bool
129 for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
131 tree *nxt, *idx;
133 for (; ; addr_p = nxt)
135 switch (TREE_CODE (*addr_p))
137 case SSA_NAME:
138 return cbck (*addr_p, addr_p, data);
140 case MISALIGNED_INDIRECT_REF:
141 case ALIGN_INDIRECT_REF:
142 case INDIRECT_REF:
143 nxt = &TREE_OPERAND (*addr_p, 0);
144 return cbck (*addr_p, nxt, data);
146 case BIT_FIELD_REF:
147 case VIEW_CONVERT_EXPR:
148 case ARRAY_RANGE_REF:
149 case REALPART_EXPR:
150 case IMAGPART_EXPR:
151 nxt = &TREE_OPERAND (*addr_p, 0);
152 break;
154 case COMPONENT_REF:
155 /* If the component has varying offset, it behaves like index
156 as well. */
157 idx = &TREE_OPERAND (*addr_p, 2);
158 if (*idx
159 && !cbck (*addr_p, idx, data))
160 return false;
162 nxt = &TREE_OPERAND (*addr_p, 0);
163 break;
165 case ARRAY_REF:
166 nxt = &TREE_OPERAND (*addr_p, 0);
167 if (!cbck (*addr_p, &TREE_OPERAND (*addr_p, 1), data))
168 return false;
169 break;
171 case VAR_DECL:
172 case PARM_DECL:
173 case STRING_CST:
174 case RESULT_DECL:
175 return true;
177 default:
178 gcc_unreachable ();
183 /* If it is possible to hoist the statement STMT unconditionally,
184 returns MOVE_POSSIBLE.
185 If it is possible to hoist the statement STMT, but we must avoid making
186 it executed if it would not be executed in the original program (e.g.
187 because it may trap), return MOVE_PRESERVE_EXECUTION.
188 Otherwise return MOVE_IMPOSSIBLE. */
190 enum move_pos
191 movement_possibility (tree stmt)
193 tree lhs, rhs;
195 if (flag_unswitch_loops
196 && TREE_CODE (stmt) == COND_EXPR)
198 /* If we perform unswitching, force the operands of the invariant
199 condition to be moved out of the loop. */
200 get_stmt_operands (stmt);
202 return MOVE_POSSIBLE;
205 if (TREE_CODE (stmt) != MODIFY_EXPR)
206 return MOVE_IMPOSSIBLE;
208 if (stmt_ends_bb_p (stmt))
209 return MOVE_IMPOSSIBLE;
211 get_stmt_operands (stmt);
213 if (stmt_ann (stmt)->has_volatile_ops)
214 return MOVE_IMPOSSIBLE;
216 lhs = TREE_OPERAND (stmt, 0);
217 if (TREE_CODE (lhs) == SSA_NAME
218 && SSA_NAME_OCCURS_IN_ABNORMAL_PHI (lhs))
219 return MOVE_IMPOSSIBLE;
221 rhs = TREE_OPERAND (stmt, 1);
223 if (TREE_SIDE_EFFECTS (rhs))
224 return MOVE_IMPOSSIBLE;
226 if (TREE_CODE (lhs) != SSA_NAME
227 || tree_could_trap_p (rhs))
228 return MOVE_PRESERVE_EXECUTION;
230 return MOVE_POSSIBLE;
233 /* Suppose that operand DEF is used inside the LOOP. Returns the outermost
234 loop to that we could move the expression using DEF if it did not have
235 other operands, i.e. the outermost loop enclosing LOOP in that the value
236 of DEF is invariant. */
238 static struct loop *
239 outermost_invariant_loop (tree def, struct loop *loop)
241 tree def_stmt;
242 basic_block def_bb;
243 struct loop *max_loop;
245 if (TREE_CODE (def) != SSA_NAME)
246 return superloop_at_depth (loop, 1);
248 def_stmt = SSA_NAME_DEF_STMT (def);
249 def_bb = bb_for_stmt (def_stmt);
250 if (!def_bb)
251 return superloop_at_depth (loop, 1);
253 max_loop = find_common_loop (loop, def_bb->loop_father);
255 if (LIM_DATA (def_stmt) && LIM_DATA (def_stmt)->max_loop)
256 max_loop = find_common_loop (max_loop,
257 LIM_DATA (def_stmt)->max_loop->outer);
258 if (max_loop == loop)
259 return NULL;
260 max_loop = superloop_at_depth (loop, max_loop->depth + 1);
262 return max_loop;
265 /* Returns the outermost superloop of LOOP in that the expression EXPR is
266 invariant. */
268 static struct loop *
269 outermost_invariant_loop_expr (tree expr, struct loop *loop)
271 enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
272 unsigned i, nops;
273 struct loop *max_loop = superloop_at_depth (loop, 1), *aloop;
275 if (TREE_CODE (expr) == SSA_NAME
276 || TREE_CODE (expr) == INTEGER_CST
277 || is_gimple_min_invariant (expr))
278 return outermost_invariant_loop (expr, loop);
280 if (class != tcc_unary
281 && class != tcc_binary
282 && class != tcc_expression
283 && class != tcc_comparison)
284 return NULL;
286 nops = TREE_CODE_LENGTH (TREE_CODE (expr));
287 for (i = 0; i < nops; i++)
289 aloop = outermost_invariant_loop_expr (TREE_OPERAND (expr, i), loop);
290 if (!aloop)
291 return NULL;
293 if (flow_loop_nested_p (max_loop, aloop))
294 max_loop = aloop;
297 return max_loop;
300 /* DATA is a structure containing information associated with a statement
301 inside LOOP. DEF is one of the operands of this statement.
303 Find the outermost loop enclosing LOOP in that value of DEF is invariant
304 and record this in DATA->max_loop field. If DEF itself is defined inside
305 this loop as well (i.e. we need to hoist it out of the loop if we want
306 to hoist the statement represented by DATA), record the statement in that
307 DEF is defined to the DATA->depends list. Additionally if ADD_COST is true,
308 add the cost of the computation of DEF to the DATA->cost.
310 If DEF is not invariant in LOOP, return false. Otherwise return TRUE. */
312 static bool
313 add_dependency (tree def, struct lim_aux_data *data, struct loop *loop,
314 bool add_cost)
316 tree def_stmt = SSA_NAME_DEF_STMT (def);
317 basic_block def_bb = bb_for_stmt (def_stmt);
318 struct loop *max_loop;
319 struct depend *dep;
321 if (!def_bb)
322 return true;
324 max_loop = outermost_invariant_loop (def, loop);
325 if (!max_loop)
326 return false;
328 if (flow_loop_nested_p (data->max_loop, max_loop))
329 data->max_loop = max_loop;
331 if (!LIM_DATA (def_stmt))
332 return true;
334 if (add_cost
335 /* Only add the cost if the statement defining DEF is inside LOOP,
336 i.e. if it is likely that by moving the invariants dependent
337 on it, we will be able to avoid creating a new register for
338 it (since it will be only used in these dependent invariants). */
339 && def_bb->loop_father == loop)
340 data->cost += LIM_DATA (def_stmt)->cost;
342 dep = xmalloc (sizeof (struct depend));
343 dep->stmt = def_stmt;
344 dep->next = data->depends;
345 data->depends = dep;
347 return true;
350 /* Returns an estimate for a cost of statement STMT. TODO -- the values here
351 are just ad-hoc constants. The estimates should be based on target-specific
352 values. */
354 static unsigned
355 stmt_cost (tree stmt)
357 tree lhs, rhs;
358 unsigned cost = 1;
360 /* Always try to create possibilities for unswitching. */
361 if (TREE_CODE (stmt) == COND_EXPR)
362 return LIM_EXPENSIVE;
364 lhs = TREE_OPERAND (stmt, 0);
365 rhs = TREE_OPERAND (stmt, 1);
367 /* Hoisting memory references out should almost surely be a win. */
368 if (stmt_references_memory_p (stmt))
369 cost += 20;
371 switch (TREE_CODE (rhs))
373 case CALL_EXPR:
374 /* We should be hoisting calls if possible. */
376 /* Unless the call is a builtin_constant_p; this always folds to a
377 constant, so moving it is useless. */
378 rhs = get_callee_fndecl (rhs);
379 if (DECL_BUILT_IN_CLASS (rhs) == BUILT_IN_NORMAL
380 && DECL_FUNCTION_CODE (rhs) == BUILT_IN_CONSTANT_P)
381 return 0;
383 cost += 20;
384 break;
386 case MULT_EXPR:
387 case TRUNC_DIV_EXPR:
388 case CEIL_DIV_EXPR:
389 case FLOOR_DIV_EXPR:
390 case ROUND_DIV_EXPR:
391 case EXACT_DIV_EXPR:
392 case CEIL_MOD_EXPR:
393 case FLOOR_MOD_EXPR:
394 case ROUND_MOD_EXPR:
395 case TRUNC_MOD_EXPR:
396 /* Division and multiplication are usually expensive. */
397 cost += 20;
398 break;
400 default:
401 break;
404 return cost;
407 /* Determine the outermost loop to that it is possible to hoist a statement
408 STMT and store it to LIM_DATA (STMT)->max_loop. To do this we determine
409 the outermost loop in that the value computed by STMT is invariant.
410 If MUST_PRESERVE_EXEC is true, additionally choose such a loop that
411 we preserve the fact whether STMT is executed. It also fills other related
412 information to LIM_DATA (STMT).
414 The function returns false if STMT cannot be hoisted outside of the loop it
415 is defined in, and true otherwise. */
417 static bool
418 determine_max_movement (tree stmt, bool must_preserve_exec)
420 basic_block bb = bb_for_stmt (stmt);
421 struct loop *loop = bb->loop_father;
422 struct loop *level;
423 struct lim_aux_data *lim_data = LIM_DATA (stmt);
424 tree val;
425 ssa_op_iter iter;
427 if (must_preserve_exec)
428 level = ALWAYS_EXECUTED_IN (bb);
429 else
430 level = superloop_at_depth (loop, 1);
431 lim_data->max_loop = level;
433 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_USE)
434 if (!add_dependency (val, lim_data, loop, true))
435 return false;
437 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES | SSA_OP_VIRTUAL_KILLS)
438 if (!add_dependency (val, lim_data, loop, false))
439 return false;
441 lim_data->cost += stmt_cost (stmt);
443 return true;
446 /* Suppose that some statement in ORIG_LOOP is hoisted to the loop LEVEL,
447 and that one of the operands of this statement is computed by STMT.
448 Ensure that STMT (together with all the statements that define its
449 operands) is hoisted at least out of the loop LEVEL. */
451 static void
452 set_level (tree stmt, struct loop *orig_loop, struct loop *level)
454 struct loop *stmt_loop = bb_for_stmt (stmt)->loop_father;
455 struct depend *dep;
457 stmt_loop = find_common_loop (orig_loop, stmt_loop);
458 if (LIM_DATA (stmt) && LIM_DATA (stmt)->tgt_loop)
459 stmt_loop = find_common_loop (stmt_loop,
460 LIM_DATA (stmt)->tgt_loop->outer);
461 if (flow_loop_nested_p (stmt_loop, level))
462 return;
464 gcc_assert (LIM_DATA (stmt));
465 gcc_assert (level == LIM_DATA (stmt)->max_loop
466 || flow_loop_nested_p (LIM_DATA (stmt)->max_loop, level));
468 LIM_DATA (stmt)->tgt_loop = level;
469 for (dep = LIM_DATA (stmt)->depends; dep; dep = dep->next)
470 set_level (dep->stmt, orig_loop, level);
473 /* Determines an outermost loop from that we want to hoist the statement STMT.
474 For now we chose the outermost possible loop. TODO -- use profiling
475 information to set it more sanely. */
477 static void
478 set_profitable_level (tree stmt)
480 set_level (stmt, bb_for_stmt (stmt)->loop_father, LIM_DATA (stmt)->max_loop);
483 /* Returns true if STMT is not a pure call. */
485 static bool
486 nonpure_call_p (tree stmt)
488 tree call = get_call_expr_in (stmt);
490 if (!call)
491 return false;
493 return TREE_SIDE_EFFECTS (call) != 0;
496 /* Releases the memory occupied by DATA. */
498 static void
499 free_lim_aux_data (struct lim_aux_data *data)
501 struct depend *dep, *next;
503 for (dep = data->depends; dep; dep = next)
505 next = dep->next;
506 free (dep);
508 free (data);
511 /* Determine the outermost loops in that statements in basic block BB are
512 invariant, and record them to the LIM_DATA associated with the statements.
513 Callback for walk_dominator_tree. */
515 static void
516 determine_invariantness_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
517 basic_block bb)
519 enum move_pos pos;
520 block_stmt_iterator bsi;
521 tree stmt;
522 bool maybe_never = ALWAYS_EXECUTED_IN (bb) == NULL;
523 struct loop *outermost = ALWAYS_EXECUTED_IN (bb);
525 if (!bb->loop_father->outer)
526 return;
528 if (dump_file && (dump_flags & TDF_DETAILS))
529 fprintf (dump_file, "Basic block %d (loop %d -- depth %d):\n\n",
530 bb->index, bb->loop_father->num, bb->loop_father->depth);
532 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
534 stmt = bsi_stmt (bsi);
536 pos = movement_possibility (stmt);
537 if (pos == MOVE_IMPOSSIBLE)
539 if (nonpure_call_p (stmt))
541 maybe_never = true;
542 outermost = NULL;
544 continue;
547 stmt_ann (stmt)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
548 LIM_DATA (stmt)->always_executed_in = outermost;
550 if (maybe_never && pos == MOVE_PRESERVE_EXECUTION)
551 continue;
553 if (!determine_max_movement (stmt, pos == MOVE_PRESERVE_EXECUTION))
555 LIM_DATA (stmt)->max_loop = NULL;
556 continue;
559 if (dump_file && (dump_flags & TDF_DETAILS))
561 print_generic_stmt_indented (dump_file, stmt, 0, 2);
562 fprintf (dump_file, " invariant up to level %d, cost %d.\n\n",
563 LIM_DATA (stmt)->max_loop->depth,
564 LIM_DATA (stmt)->cost);
567 if (LIM_DATA (stmt)->cost >= LIM_EXPENSIVE)
568 set_profitable_level (stmt);
572 /* For each statement determines the outermost loop in that it is invariant,
573 statements on whose motion it depends and the cost of the computation.
574 This information is stored to the LIM_DATA structure associated with
575 each statement. */
577 static void
578 determine_invariantness (void)
580 struct dom_walk_data walk_data;
582 memset (&walk_data, 0, sizeof (struct dom_walk_data));
583 walk_data.before_dom_children_before_stmts = determine_invariantness_stmt;
585 init_walk_dominator_tree (&walk_data);
586 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
587 fini_walk_dominator_tree (&walk_data);
590 /* Commits edge insertions and updates loop structures. */
592 void
593 loop_commit_inserts (void)
595 unsigned old_last_basic_block, i;
596 basic_block bb;
598 old_last_basic_block = last_basic_block;
599 bsi_commit_edge_inserts ();
600 for (i = old_last_basic_block; i < (unsigned) last_basic_block; i++)
602 bb = BASIC_BLOCK (i);
603 add_bb_to_loop (bb,
604 find_common_loop (EDGE_SUCC (bb, 0)->dest->loop_father,
605 EDGE_PRED (bb, 0)->src->loop_father));
609 /* Hoist the statements in basic block BB out of the loops prescribed by
610 data stored in LIM_DATA structures associated with each statement. Callback
611 for walk_dominator_tree. */
613 static void
614 move_computations_stmt (struct dom_walk_data *dw_data ATTRIBUTE_UNUSED,
615 basic_block bb)
617 struct loop *level;
618 block_stmt_iterator bsi;
619 tree stmt;
620 unsigned cost = 0;
622 if (!bb->loop_father->outer)
623 return;
625 for (bsi = bsi_start (bb); !bsi_end_p (bsi); )
627 stmt = bsi_stmt (bsi);
629 if (!LIM_DATA (stmt))
631 bsi_next (&bsi);
632 continue;
635 cost = LIM_DATA (stmt)->cost;
636 level = LIM_DATA (stmt)->tgt_loop;
637 free_lim_aux_data (LIM_DATA (stmt));
638 stmt_ann (stmt)->common.aux = NULL;
640 if (!level)
642 bsi_next (&bsi);
643 continue;
646 /* We do not really want to move conditionals out of the loop; we just
647 placed it here to force its operands to be moved if necessary. */
648 if (TREE_CODE (stmt) == COND_EXPR)
649 continue;
651 if (dump_file && (dump_flags & TDF_DETAILS))
653 fprintf (dump_file, "Moving statement\n");
654 print_generic_stmt (dump_file, stmt, 0);
655 fprintf (dump_file, "(cost %u) out of loop %d.\n\n",
656 cost, level->num);
658 bsi_insert_on_edge (loop_preheader_edge (level), stmt);
659 bsi_remove (&bsi);
663 /* Hoist the statements out of the loops prescribed by data stored in
664 LIM_DATA structures associated with each statement.*/
666 static void
667 move_computations (void)
669 struct dom_walk_data walk_data;
671 memset (&walk_data, 0, sizeof (struct dom_walk_data));
672 walk_data.before_dom_children_before_stmts = move_computations_stmt;
674 init_walk_dominator_tree (&walk_data);
675 walk_dominator_tree (&walk_data, ENTRY_BLOCK_PTR);
676 fini_walk_dominator_tree (&walk_data);
678 loop_commit_inserts ();
679 rewrite_into_ssa (false);
680 if (!bitmap_empty_p (vars_to_rename))
682 /* The rewrite of ssa names may cause violation of loop closed ssa
683 form invariants. TODO -- avoid these rewrites completely.
684 Information in virtual phi nodes is sufficient for it. */
685 rewrite_into_loop_closed_ssa ();
687 bitmap_clear (vars_to_rename);
690 /* Checks whether the statement defining variable *INDEX can be hoisted
691 out of the loop passed in DATA. Callback for for_each_index. */
693 static bool
694 may_move_till (tree ref, tree *index, void *data)
696 struct loop *loop = data, *max_loop;
698 /* If REF is an array reference, check also that the step and the lower
699 bound is invariant in LOOP. */
700 if (TREE_CODE (ref) == ARRAY_REF)
702 tree step = array_ref_element_size (ref);
703 tree lbound = array_ref_low_bound (ref);
705 max_loop = outermost_invariant_loop_expr (step, loop);
706 if (!max_loop)
707 return false;
709 max_loop = outermost_invariant_loop_expr (lbound, loop);
710 if (!max_loop)
711 return false;
714 max_loop = outermost_invariant_loop (*index, loop);
715 if (!max_loop)
716 return false;
718 return true;
721 /* Forces statements defining (invariant) SSA names in expression EXPR to be
722 moved out of the LOOP. ORIG_LOOP is the loop in that EXPR is used. */
724 static void
725 force_move_till_expr (tree expr, struct loop *orig_loop, struct loop *loop)
727 enum tree_code_class class = TREE_CODE_CLASS (TREE_CODE (expr));
728 unsigned i, nops;
730 if (TREE_CODE (expr) == SSA_NAME)
732 tree stmt = SSA_NAME_DEF_STMT (expr);
733 if (IS_EMPTY_STMT (stmt))
734 return;
736 set_level (stmt, orig_loop, loop);
737 return;
740 if (class != tcc_unary
741 && class != tcc_binary
742 && class != tcc_expression
743 && class != tcc_comparison)
744 return;
746 nops = TREE_CODE_LENGTH (TREE_CODE (expr));
747 for (i = 0; i < nops; i++)
748 force_move_till_expr (TREE_OPERAND (expr, i), orig_loop, loop);
751 /* Forces statement defining invariants in REF (and *INDEX) to be moved out of
752 the LOOP. The reference REF is used in the loop ORIG_LOOP. Callback for
753 for_each_index. */
755 struct fmt_data
757 struct loop *loop;
758 struct loop *orig_loop;
761 static bool
762 force_move_till (tree ref, tree *index, void *data)
764 tree stmt;
765 struct fmt_data *fmt_data = data;
767 if (TREE_CODE (ref) == ARRAY_REF)
769 tree step = array_ref_element_size (ref);
770 tree lbound = array_ref_low_bound (ref);
772 force_move_till_expr (step, fmt_data->orig_loop, fmt_data->loop);
773 force_move_till_expr (lbound, fmt_data->orig_loop, fmt_data->loop);
776 if (TREE_CODE (*index) != SSA_NAME)
777 return true;
779 stmt = SSA_NAME_DEF_STMT (*index);
780 if (IS_EMPTY_STMT (stmt))
781 return true;
783 set_level (stmt, fmt_data->orig_loop, fmt_data->loop);
785 return true;
788 /* Records memory reference *REF (that occurs in statement STMT)
789 to the list MEM_REFS. */
791 static void
792 record_mem_ref (struct mem_ref **mem_refs, tree stmt, tree *ref)
794 struct mem_ref *aref = xmalloc (sizeof (struct mem_ref));
796 aref->stmt = stmt;
797 aref->ref = ref;
799 aref->next = *mem_refs;
800 *mem_refs = aref;
803 /* Releases list of memory references MEM_REFS. */
805 static void
806 free_mem_refs (struct mem_ref *mem_refs)
808 struct mem_ref *act;
810 while (mem_refs)
812 act = mem_refs;
813 mem_refs = mem_refs->next;
814 free (act);
818 /* If VAR is defined in LOOP and the statement it is defined in does not belong
819 to the set SEEN, add the statement to QUEUE of length IN_QUEUE and
820 to the set SEEN. */
822 static void
823 maybe_queue_var (tree var, struct loop *loop,
824 sbitmap seen, tree *queue, unsigned *in_queue)
826 tree stmt = SSA_NAME_DEF_STMT (var);
827 basic_block def_bb = bb_for_stmt (stmt);
829 if (!def_bb
830 || !flow_bb_inside_loop_p (loop, def_bb)
831 || TEST_BIT (seen, get_stmt_uid (stmt)))
832 return;
834 SET_BIT (seen, get_stmt_uid (stmt));
835 queue[(*in_queue)++] = stmt;
838 /* If COMMON_REF is NULL, set COMMON_REF to *OP and return true.
839 Otherwise return true if the memory reference *OP is equal to COMMON_REF.
840 Record the reference OP to list MEM_REFS. STMT is the statement in that
841 the reference occurs. */
843 struct sra_data
845 struct mem_ref **mem_refs;
846 tree common_ref;
847 tree stmt;
850 static bool
851 fem_single_reachable_address (tree *op, void *data)
853 struct sra_data *sra_data = data;
855 if (sra_data->common_ref
856 && !operand_equal_p (*op, sra_data->common_ref, 0))
857 return false;
858 sra_data->common_ref = *op;
860 record_mem_ref (sra_data->mem_refs, sra_data->stmt, op);
861 return true;
864 /* Runs CALLBACK for each operand of STMT that is a memory reference. DATA
865 is passed to the CALLBACK as well. The traversal stops if CALLBACK
866 returns false, for_each_memref then returns false as well. Otherwise
867 for_each_memref returns true. */
869 static bool
870 for_each_memref (tree stmt, bool (*callback)(tree *, void *), void *data)
872 tree *op;
874 if (TREE_CODE (stmt) == RETURN_EXPR)
875 stmt = TREE_OPERAND (stmt, 1);
877 if (TREE_CODE (stmt) == MODIFY_EXPR)
879 op = &TREE_OPERAND (stmt, 0);
880 if (TREE_CODE (*op) != SSA_NAME
881 && !callback (op, data))
882 return false;
884 op = &TREE_OPERAND (stmt, 1);
885 if (TREE_CODE (*op) != SSA_NAME
886 && is_gimple_lvalue (*op)
887 && !callback (op, data))
888 return false;
890 stmt = TREE_OPERAND (stmt, 1);
893 if (TREE_CODE (stmt) == WITH_SIZE_EXPR)
894 stmt = TREE_OPERAND (stmt, 0);
896 if (TREE_CODE (stmt) == CALL_EXPR)
898 tree args;
900 for (args = TREE_OPERAND (stmt, 1); args; args = TREE_CHAIN (args))
902 op = &TREE_VALUE (args);
904 if (TREE_CODE (*op) != SSA_NAME
905 && is_gimple_lvalue (*op)
906 && !callback (op, data))
907 return false;
911 return true;
914 /* Determine whether all memory references inside the LOOP that correspond
915 to virtual ssa names defined in statement STMT are equal.
916 If so, store the list of the references to MEM_REFS, and return one
917 of them. Otherwise store NULL to MEM_REFS and return NULL_TREE.
918 *SEEN_CALL_STMT is set to true if the virtual operands suggest
919 that the reference might be clobbered by a call inside the LOOP. */
921 static tree
922 single_reachable_address (struct loop *loop, tree stmt,
923 struct mem_ref **mem_refs,
924 bool *seen_call_stmt)
926 unsigned max_uid = max_stmt_uid + num_ssa_names;
927 tree *queue = xmalloc (sizeof (tree) * max_uid);
928 sbitmap seen = sbitmap_alloc (max_uid);
929 unsigned in_queue = 1;
930 dataflow_t df;
931 unsigned i, n;
932 struct sra_data sra_data;
933 tree call;
934 tree val;
935 ssa_op_iter iter;
937 sbitmap_zero (seen);
939 *mem_refs = NULL;
940 sra_data.mem_refs = mem_refs;
941 sra_data.common_ref = NULL_TREE;
943 queue[0] = stmt;
944 SET_BIT (seen, get_stmt_uid (stmt));
945 *seen_call_stmt = false;
947 while (in_queue)
949 stmt = queue[--in_queue];
950 sra_data.stmt = stmt;
952 if (LIM_DATA (stmt)
953 && LIM_DATA (stmt)->sm_done)
954 goto fail;
956 switch (TREE_CODE (stmt))
958 case MODIFY_EXPR:
959 case CALL_EXPR:
960 case RETURN_EXPR:
961 if (!for_each_memref (stmt, fem_single_reachable_address,
962 &sra_data))
963 goto fail;
965 /* If this is a function that may depend on the memory location,
966 record the fact. We cannot directly refuse call clobbered
967 operands here, since sra_data.common_ref did not have
968 to be set yet. */
969 call = get_call_expr_in (stmt);
970 if (call
971 && !(call_expr_flags (call) & ECF_CONST))
972 *seen_call_stmt = true;
974 /* Traverse also definitions of the VUSES (there may be other
975 distinct from the one we used to get to this statement). */
976 FOR_EACH_SSA_TREE_OPERAND (val, stmt, iter, SSA_OP_VIRTUAL_USES)
977 maybe_queue_var (val, loop, seen, queue, &in_queue);
979 break;
981 case PHI_NODE:
982 for (i = 0; i < (unsigned) PHI_NUM_ARGS (stmt); i++)
983 if (TREE_CODE (PHI_ARG_DEF (stmt, i)) == SSA_NAME)
984 maybe_queue_var (PHI_ARG_DEF (stmt, i), loop,
985 seen, queue, &in_queue);
986 break;
988 default:
989 goto fail;
992 /* Find uses of virtual names. */
993 df = get_immediate_uses (stmt);
994 n = num_immediate_uses (df);
996 for (i = 0; i < n; i++)
998 stmt = immediate_use (df, i);
1000 if (!flow_bb_inside_loop_p (loop, bb_for_stmt (stmt)))
1001 continue;
1003 if (TEST_BIT (seen, get_stmt_uid (stmt)))
1004 continue;
1005 SET_BIT (seen, get_stmt_uid (stmt));
1007 queue[in_queue++] = stmt;
1011 free (queue);
1012 sbitmap_free (seen);
1014 return sra_data.common_ref;
1016 fail:
1017 free_mem_refs (*mem_refs);
1018 *mem_refs = NULL;
1019 free (queue);
1020 sbitmap_free (seen);
1022 return NULL;
1025 /* Rewrites memory references in list MEM_REFS by variable TMP_VAR. */
1027 static void
1028 rewrite_mem_refs (tree tmp_var, struct mem_ref *mem_refs)
1030 tree var;
1031 ssa_op_iter iter;
1033 for (; mem_refs; mem_refs = mem_refs->next)
1035 FOR_EACH_SSA_TREE_OPERAND (var, mem_refs->stmt, iter, SSA_OP_ALL_VIRTUALS)
1037 var = SSA_NAME_VAR (var);
1038 bitmap_set_bit (vars_to_rename, var_ann (var)->uid);
1041 *mem_refs->ref = tmp_var;
1042 modify_stmt (mem_refs->stmt);
1046 /* Records request for store motion of memory reference REF from LOOP.
1047 MEM_REFS is the list of occurrences of the reference REF inside LOOP;
1048 these references are rewritten by a new temporary variable.
1049 Exits from the LOOP are stored in EXITS, there are N_EXITS of them.
1050 The initialization of the temporary variable is put to the preheader
1051 of the loop, and assignments to the reference from the temporary variable
1052 are emitted to exits. */
1054 static void
1055 schedule_sm (struct loop *loop, edge *exits, unsigned n_exits, tree ref,
1056 struct mem_ref *mem_refs)
1058 struct mem_ref *aref;
1059 tree tmp_var;
1060 unsigned i;
1061 tree load, store;
1062 struct fmt_data fmt_data;
1064 if (dump_file && (dump_flags & TDF_DETAILS))
1066 fprintf (dump_file, "Executing store motion of ");
1067 print_generic_expr (dump_file, ref, 0);
1068 fprintf (dump_file, " from loop %d\n", loop->num);
1071 tmp_var = make_rename_temp (TREE_TYPE (ref), "lsm_tmp");
1073 fmt_data.loop = loop;
1074 fmt_data.orig_loop = loop;
1075 for_each_index (&ref, force_move_till, &fmt_data);
1077 rewrite_mem_refs (tmp_var, mem_refs);
1078 for (aref = mem_refs; aref; aref = aref->next)
1079 if (LIM_DATA (aref->stmt))
1080 LIM_DATA (aref->stmt)->sm_done = true;
1082 /* Emit the load & stores. */
1083 load = build (MODIFY_EXPR, void_type_node, tmp_var, ref);
1084 get_stmt_ann (load)->common.aux = xcalloc (1, sizeof (struct lim_aux_data));
1085 LIM_DATA (load)->max_loop = loop;
1086 LIM_DATA (load)->tgt_loop = loop;
1088 /* Put this into the latch, so that we are sure it will be processed after
1089 all dependencies. */
1090 bsi_insert_on_edge (loop_latch_edge (loop), load);
1092 for (i = 0; i < n_exits; i++)
1094 store = build (MODIFY_EXPR, void_type_node,
1095 unshare_expr (ref), tmp_var);
1096 bsi_insert_on_edge (exits[i], store);
1100 /* Returns true if REF may be clobbered by calls. */
1102 static bool
1103 is_call_clobbered_ref (tree ref)
1105 tree base;
1107 base = get_base_address (ref);
1108 if (!base)
1109 return true;
1111 if (DECL_P (base))
1112 return is_call_clobbered (base);
1114 if (INDIRECT_REF_P (base))
1116 /* Check whether the alias tags associated with the pointer
1117 are call clobbered. */
1118 tree ptr = TREE_OPERAND (base, 0);
1119 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (ptr);
1120 tree nmt = (pi) ? pi->name_mem_tag : NULL_TREE;
1121 tree tmt = var_ann (SSA_NAME_VAR (ptr))->type_mem_tag;
1123 if ((nmt && is_call_clobbered (nmt))
1124 || (tmt && is_call_clobbered (tmt)))
1125 return true;
1127 return false;
1130 gcc_unreachable ();
1133 /* Determine whether all memory references inside LOOP corresponding to the
1134 virtual ssa name REG are equal to each other, and whether the address of
1135 this common reference can be hoisted outside of the loop. If this is true,
1136 prepare the statements that load the value of the memory reference to a
1137 temporary variable in the loop preheader, store it back on the loop exits,
1138 and replace all the references inside LOOP by this temporary variable.
1139 LOOP has N_EXITS stored in EXITS. */
1141 static void
1142 determine_lsm_reg (struct loop *loop, edge *exits, unsigned n_exits, tree reg)
1144 tree ref;
1145 struct mem_ref *mem_refs, *aref;
1146 struct loop *must_exec;
1147 bool sees_call;
1149 if (is_gimple_reg (reg))
1150 return;
1152 ref = single_reachable_address (loop, SSA_NAME_DEF_STMT (reg), &mem_refs,
1153 &sees_call);
1154 if (!ref)
1155 return;
1157 /* If we cannot create a ssa name for the result, give up. */
1158 if (!is_gimple_reg_type (TREE_TYPE (ref))
1159 || TREE_THIS_VOLATILE (ref))
1160 goto fail;
1162 /* If there is a call that may use the location, give up as well. */
1163 if (sees_call
1164 && is_call_clobbered_ref (ref))
1165 goto fail;
1167 if (!for_each_index (&ref, may_move_till, loop))
1168 goto fail;
1170 if (tree_could_trap_p (ref))
1172 /* If the memory access is unsafe (i.e. it might trap), ensure that some
1173 of the statements in that it occurs is always executed when the loop
1174 is entered. This way we know that by moving the load from the
1175 reference out of the loop we will not cause the error that would not
1176 occur otherwise.
1178 TODO -- in fact we would like to check for anticipability of the
1179 reference, i.e. that on each path from loop entry to loop exit at
1180 least one of the statements containing the memory reference is
1181 executed. */
1183 for (aref = mem_refs; aref; aref = aref->next)
1185 if (!LIM_DATA (aref->stmt))
1186 continue;
1188 must_exec = LIM_DATA (aref->stmt)->always_executed_in;
1189 if (!must_exec)
1190 continue;
1192 if (must_exec == loop
1193 || flow_loop_nested_p (must_exec, loop))
1194 break;
1197 if (!aref)
1198 goto fail;
1201 schedule_sm (loop, exits, n_exits, ref, mem_refs);
1203 fail: ;
1204 free_mem_refs (mem_refs);
1207 /* Checks whether LOOP (with N_EXITS exits stored in EXITS array) is suitable
1208 for a store motion optimization (i.e. whether we can insert statement
1209 on its exits). */
1211 static bool
1212 loop_suitable_for_sm (struct loop *loop ATTRIBUTE_UNUSED, edge *exits,
1213 unsigned n_exits)
1215 unsigned i;
1217 for (i = 0; i < n_exits; i++)
1218 if (exits[i]->flags & EDGE_ABNORMAL)
1219 return false;
1221 return true;
1224 /* Try to perform store motion for all memory references modified inside
1225 LOOP. */
1227 static void
1228 determine_lsm_loop (struct loop *loop)
1230 tree phi;
1231 unsigned n_exits;
1232 edge *exits = get_loop_exit_edges (loop, &n_exits);
1234 if (!loop_suitable_for_sm (loop, exits, n_exits))
1236 free (exits);
1237 return;
1240 for (phi = phi_nodes (loop->header); phi; phi = PHI_CHAIN (phi))
1241 determine_lsm_reg (loop, exits, n_exits, PHI_RESULT (phi));
1243 free (exits);
1246 /* Try to perform store motion for all memory references modified inside
1247 any of LOOPS. */
1249 static void
1250 determine_lsm (struct loops *loops)
1252 struct loop *loop;
1253 basic_block bb;
1255 if (!loops->tree_root->inner)
1256 return;
1258 /* Create a UID for each statement in the function. Ordering of the
1259 UIDs is not important for this pass. */
1260 max_stmt_uid = 0;
1261 FOR_EACH_BB (bb)
1263 block_stmt_iterator bsi;
1265 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1266 stmt_ann (bsi_stmt (bsi))->uid = max_stmt_uid++;
1269 compute_immediate_uses (TDFA_USE_VOPS, NULL);
1271 /* Pass the loops from the outermost. For each virtual operand loop phi node
1272 check whether all the references inside the loop correspond to a single
1273 address, and if so, move them. */
1275 loop = loops->tree_root->inner;
1276 while (1)
1278 determine_lsm_loop (loop);
1280 if (loop->inner)
1282 loop = loop->inner;
1283 continue;
1285 while (!loop->next)
1287 loop = loop->outer;
1288 if (loop == loops->tree_root)
1290 free_df ();
1291 loop_commit_inserts ();
1292 return;
1295 loop = loop->next;
1299 /* Fills ALWAYS_EXECUTED_IN information for basic blocks of LOOP, i.e.
1300 for each such basic block bb records the outermost loop for that execution
1301 of its header implies execution of bb. CONTAINS_CALL is the bitmap of
1302 blocks that contain a nonpure call. */
1304 static void
1305 fill_always_executed_in (struct loop *loop, sbitmap contains_call)
1307 basic_block bb = NULL, *bbs, last = NULL;
1308 unsigned i;
1309 edge e;
1310 struct loop *inn_loop = loop;
1312 if (!loop->header->aux)
1314 bbs = get_loop_body_in_dom_order (loop);
1316 for (i = 0; i < loop->num_nodes; i++)
1318 edge_iterator ei;
1319 bb = bbs[i];
1321 if (dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1322 last = bb;
1324 if (TEST_BIT (contains_call, bb->index))
1325 break;
1327 FOR_EACH_EDGE (e, ei, bb->succs)
1328 if (!flow_bb_inside_loop_p (loop, e->dest))
1329 break;
1330 if (e)
1331 break;
1333 /* A loop might be infinite (TODO use simple loop analysis
1334 to disprove this if possible). */
1335 if (bb->flags & BB_IRREDUCIBLE_LOOP)
1336 break;
1338 if (!flow_bb_inside_loop_p (inn_loop, bb))
1339 break;
1341 if (bb->loop_father->header == bb)
1343 if (!dominated_by_p (CDI_DOMINATORS, loop->latch, bb))
1344 break;
1346 /* In a loop that is always entered we may proceed anyway.
1347 But record that we entered it and stop once we leave it. */
1348 inn_loop = bb->loop_father;
1352 while (1)
1354 last->aux = loop;
1355 if (last == loop->header)
1356 break;
1357 last = get_immediate_dominator (CDI_DOMINATORS, last);
1360 free (bbs);
1363 for (loop = loop->inner; loop; loop = loop->next)
1364 fill_always_executed_in (loop, contains_call);
1367 /* Compute the global information needed by the loop invariant motion pass.
1368 LOOPS is the loop tree. */
1370 static void
1371 tree_ssa_lim_initialize (struct loops *loops)
1373 sbitmap contains_call = sbitmap_alloc (last_basic_block);
1374 block_stmt_iterator bsi;
1375 struct loop *loop;
1376 basic_block bb;
1378 sbitmap_zero (contains_call);
1379 FOR_EACH_BB (bb)
1381 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1383 if (nonpure_call_p (bsi_stmt (bsi)))
1384 break;
1387 if (!bsi_end_p (bsi))
1388 SET_BIT (contains_call, bb->index);
1391 for (loop = loops->tree_root->inner; loop; loop = loop->next)
1392 fill_always_executed_in (loop, contains_call);
1394 sbitmap_free (contains_call);
1397 /* Cleans up after the invariant motion pass. */
1399 static void
1400 tree_ssa_lim_finalize (void)
1402 basic_block bb;
1404 FOR_EACH_BB (bb)
1406 bb->aux = NULL;
1410 /* Moves invariants from LOOPS. Only "expensive" invariants are moved out --
1411 i.e. those that are likely to be win regardless of the register pressure. */
1413 void
1414 tree_ssa_lim (struct loops *loops)
1416 tree_ssa_lim_initialize (loops);
1418 /* For each statement determine the outermost loop in that it is
1419 invariant and cost for computing the invariant. */
1420 determine_invariantness ();
1422 /* For each memory reference determine whether it is possible to hoist it
1423 out of the loop. Force the necessary invariants to be moved out of the
1424 loops as well. */
1425 determine_lsm (loops);
1427 /* Move the expressions that are expensive enough. */
1428 move_computations ();
1430 tree_ssa_lim_finalize ();