Implement TARGET_IRA_CHANGE_PSEUDO_ALLOCNO_CLASS hook.
[official-gcc.git] / gcc / store-motion.c
blobaeed299c0a760a12fc028db00f7da42708a40ff7
1 /* Store motion via Lazy Code Motion on the reverse CFG.
2 Copyright (C) 1997-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
25 #include "toplev.h"
27 #include "rtl.h"
28 #include "input.h"
29 #include "alias.h"
30 #include "symtab.h"
31 #include "tree.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "flags.h"
36 #include "insn-config.h"
37 #include "recog.h"
38 #include "predict.h"
39 #include "function.h"
40 #include "dominance.h"
41 #include "cfg.h"
42 #include "cfgrtl.h"
43 #include "cfganal.h"
44 #include "lcm.h"
45 #include "cfgcleanup.h"
46 #include "basic-block.h"
47 #include "expmed.h"
48 #include "dojump.h"
49 #include "explow.h"
50 #include "calls.h"
51 #include "emit-rtl.h"
52 #include "varasm.h"
53 #include "stmt.h"
54 #include "expr.h"
55 #include "except.h"
56 #include "intl.h"
57 #include "tree-pass.h"
58 #include "df.h"
59 #include "dbgcnt.h"
60 #include "rtl-iter.h"
62 /* This pass implements downward store motion.
63 As of May 1, 2009, the pass is not enabled by default on any target,
64 but bootstrap completes on ia64 and x86_64 with the pass enabled. */
66 /* TODO:
67 - remove_reachable_equiv_notes is an incomprehensible pile of goo and
68 a compile time hog that needs a rewrite (maybe cache st_exprs to
69 invalidate REG_EQUAL/REG_EQUIV notes for?).
70 - pattern_regs in st_expr should be a regset (on its own obstack).
71 - antic_stores and avail_stores should be VECs instead of lists.
72 - store_motion_mems should be a vec instead of a list.
73 - there should be an alloc pool for struct st_expr objects.
74 - investigate whether it is helpful to make the address of an st_expr
75 a cselib VALUE.
76 - when GIMPLE alias information is exported, the effectiveness of this
77 pass should be re-evaluated.
80 /* This is a list of store expressions (MEMs). The structure is used
81 as an expression table to track stores which look interesting, and
82 might be moveable towards the exit block. */
84 struct st_expr
86 /* Pattern of this mem. */
87 rtx pattern;
88 /* List of registers mentioned by the mem. */
89 rtx pattern_regs;
90 /* INSN list of stores that are locally anticipatable. */
91 rtx_insn_list *antic_stores;
92 /* INSN list of stores that are locally available. */
93 rtx_insn_list *avail_stores;
94 /* Next in the list. */
95 struct st_expr * next;
96 /* Store ID in the dataflow bitmaps. */
97 int index;
98 /* Hash value for the hash table. */
99 unsigned int hash_index;
100 /* Register holding the stored expression when a store is moved.
101 This field is also used as a cache in find_moveable_store, see
102 LAST_AVAIL_CHECK_FAILURE below. */
103 rtx reaching_reg;
106 /* Head of the list of load/store memory refs. */
107 static struct st_expr * store_motion_mems = NULL;
109 /* These bitmaps will hold the local dataflow properties per basic block. */
110 static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
112 /* Nonzero for expressions which should be inserted on a specific edge. */
113 static sbitmap *st_insert_map;
115 /* Nonzero for expressions which should be deleted in a specific block. */
116 static sbitmap *st_delete_map;
118 /* Global holding the number of store expressions we are dealing with. */
119 static int num_stores;
121 /* Contains the edge_list returned by pre_edge_lcm. */
122 static struct edge_list *edge_list;
124 /* Hashtable helpers. */
126 struct st_expr_hasher : typed_noop_remove <st_expr>
128 typedef st_expr *value_type;
129 typedef st_expr *compare_type;
130 static inline hashval_t hash (const st_expr *);
131 static inline bool equal (const st_expr *, const st_expr *);
134 inline hashval_t
135 st_expr_hasher::hash (const st_expr *x)
137 int do_not_record_p = 0;
138 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
141 inline bool
142 st_expr_hasher::equal (const st_expr *ptr1, const st_expr *ptr2)
144 return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
147 /* Hashtable for the load/store memory refs. */
148 static hash_table<st_expr_hasher> *store_motion_mems_table;
150 /* This will search the st_expr list for a matching expression. If it
151 doesn't find one, we create one and initialize it. */
153 static struct st_expr *
154 st_expr_entry (rtx x)
156 int do_not_record_p = 0;
157 struct st_expr * ptr;
158 unsigned int hash;
159 st_expr **slot;
160 struct st_expr e;
162 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
163 NULL, /*have_reg_qty=*/false);
165 e.pattern = x;
166 slot = store_motion_mems_table->find_slot_with_hash (&e, hash, INSERT);
167 if (*slot)
168 return *slot;
170 ptr = XNEW (struct st_expr);
172 ptr->next = store_motion_mems;
173 ptr->pattern = x;
174 ptr->pattern_regs = NULL_RTX;
175 ptr->antic_stores = NULL;
176 ptr->avail_stores = NULL;
177 ptr->reaching_reg = NULL_RTX;
178 ptr->index = 0;
179 ptr->hash_index = hash;
180 store_motion_mems = ptr;
181 *slot = ptr;
183 return ptr;
186 /* Free up an individual st_expr entry. */
188 static void
189 free_st_expr_entry (struct st_expr * ptr)
191 free_INSN_LIST_list (& ptr->antic_stores);
192 free_INSN_LIST_list (& ptr->avail_stores);
194 free (ptr);
197 /* Free up all memory associated with the st_expr list. */
199 static void
200 free_store_motion_mems (void)
202 delete store_motion_mems_table;
203 store_motion_mems_table = NULL;
205 while (store_motion_mems)
207 struct st_expr * tmp = store_motion_mems;
208 store_motion_mems = store_motion_mems->next;
209 free_st_expr_entry (tmp);
211 store_motion_mems = NULL;
214 /* Assign each element of the list of mems a monotonically increasing value. */
216 static int
217 enumerate_store_motion_mems (void)
219 struct st_expr * ptr;
220 int n = 0;
222 for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
223 ptr->index = n++;
225 return n;
228 /* Return first item in the list. */
230 static inline struct st_expr *
231 first_st_expr (void)
233 return store_motion_mems;
236 /* Return the next item in the list after the specified one. */
238 static inline struct st_expr *
239 next_st_expr (struct st_expr * ptr)
241 return ptr->next;
244 /* Dump debugging info about the store_motion_mems list. */
246 static void
247 print_store_motion_mems (FILE * file)
249 struct st_expr * ptr;
251 fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
253 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
255 fprintf (file, " Pattern (%3d): ", ptr->index);
257 print_rtl (file, ptr->pattern);
259 fprintf (file, "\n ANTIC stores : ");
261 if (ptr->antic_stores)
262 print_rtl (file, ptr->antic_stores);
263 else
264 fprintf (file, "(nil)");
266 fprintf (file, "\n AVAIL stores : ");
268 if (ptr->avail_stores)
269 print_rtl (file, ptr->avail_stores);
270 else
271 fprintf (file, "(nil)");
273 fprintf (file, "\n\n");
276 fprintf (file, "\n");
279 /* Return zero if some of the registers in list X are killed
280 due to set of registers in bitmap REGS_SET. */
282 static bool
283 store_ops_ok (const_rtx x, int *regs_set)
285 const_rtx reg;
287 for (; x; x = XEXP (x, 1))
289 reg = XEXP (x, 0);
290 if (regs_set[REGNO (reg)])
291 return false;
294 return true;
297 /* Returns a list of registers mentioned in X.
298 FIXME: A regset would be prettier and less expensive. */
300 static rtx_expr_list *
301 extract_mentioned_regs (rtx x)
303 rtx_expr_list *mentioned_regs = NULL;
304 subrtx_var_iterator::array_type array;
305 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
307 rtx x = *iter;
308 if (REG_P (x))
309 mentioned_regs = alloc_EXPR_LIST (0, x, mentioned_regs);
311 return mentioned_regs;
314 /* Check to see if the load X is aliased with STORE_PATTERN.
315 AFTER is true if we are checking the case when STORE_PATTERN occurs
316 after the X. */
318 static bool
319 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
321 if (after)
322 return anti_dependence (x, store_pattern);
323 else
324 return true_dependence (store_pattern, GET_MODE (store_pattern), x);
327 /* Go through the entire rtx X, looking for any loads which might alias
328 STORE_PATTERN. Return true if found.
329 AFTER is true if we are checking the case when STORE_PATTERN occurs
330 after the insn X. */
332 static bool
333 find_loads (const_rtx x, const_rtx store_pattern, int after)
335 const char * fmt;
336 int i, j;
337 int ret = false;
339 if (!x)
340 return false;
342 if (GET_CODE (x) == SET)
343 x = SET_SRC (x);
345 if (MEM_P (x))
347 if (load_kills_store (x, store_pattern, after))
348 return true;
351 /* Recursively process the insn. */
352 fmt = GET_RTX_FORMAT (GET_CODE (x));
354 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
356 if (fmt[i] == 'e')
357 ret |= find_loads (XEXP (x, i), store_pattern, after);
358 else if (fmt[i] == 'E')
359 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
360 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
362 return ret;
365 /* Go through pattern PAT looking for any loads which might kill the
366 store in X. Return true if found.
367 AFTER is true if we are checking the case when loads kill X occurs
368 after the insn for PAT. */
370 static inline bool
371 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
373 if (GET_CODE (pat) == SET)
375 rtx dest = SET_DEST (pat);
377 if (GET_CODE (dest) == ZERO_EXTRACT)
378 dest = XEXP (dest, 0);
380 /* Check for memory stores to aliased objects. */
381 if (MEM_P (dest)
382 && !exp_equiv_p (dest, x, 0, true))
384 if (after)
386 if (output_dependence (dest, x))
387 return true;
389 else
391 if (output_dependence (x, dest))
392 return true;
397 if (find_loads (pat, x, after))
398 return true;
400 return false;
403 /* Check if INSN kills the store pattern X (is aliased with it).
404 AFTER is true if we are checking the case when store X occurs
405 after the insn. Return true if it does. */
407 static bool
408 store_killed_in_insn (const_rtx x, const_rtx x_regs, const rtx_insn *insn, int after)
410 const_rtx reg, note, pat;
412 if (! NONDEBUG_INSN_P (insn))
413 return false;
415 if (CALL_P (insn))
417 /* A normal or pure call might read from pattern,
418 but a const call will not. */
419 if (!RTL_CONST_CALL_P (insn))
420 return true;
422 /* But even a const call reads its parameters. Check whether the
423 base of some of registers used in mem is stack pointer. */
424 for (reg = x_regs; reg; reg = XEXP (reg, 1))
425 if (may_be_sp_based_p (XEXP (reg, 0)))
426 return true;
428 return false;
431 pat = PATTERN (insn);
432 if (GET_CODE (pat) == SET)
434 if (store_killed_in_pat (x, pat, after))
435 return true;
437 else if (GET_CODE (pat) == PARALLEL)
439 int i;
441 for (i = 0; i < XVECLEN (pat, 0); i++)
442 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
443 return true;
445 else if (find_loads (PATTERN (insn), x, after))
446 return true;
448 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
449 location aliased with X, then this insn kills X. */
450 note = find_reg_equal_equiv_note (insn);
451 if (! note)
452 return false;
453 note = XEXP (note, 0);
455 /* However, if the note represents a must alias rather than a may
456 alias relationship, then it does not kill X. */
457 if (exp_equiv_p (note, x, 0, true))
458 return false;
460 /* See if there are any aliased loads in the note. */
461 return find_loads (note, x, after);
464 /* Returns true if the expression X is loaded or clobbered on or after INSN
465 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
466 or after the insn. X_REGS is list of registers mentioned in X. If the store
467 is killed, return the last insn in that it occurs in FAIL_INSN. */
469 static bool
470 store_killed_after (const_rtx x, const_rtx x_regs, const rtx_insn *insn,
471 const_basic_block bb,
472 int *regs_set_after, rtx *fail_insn)
474 rtx_insn *last = BB_END (bb), *act;
476 if (!store_ops_ok (x_regs, regs_set_after))
478 /* We do not know where it will happen. */
479 if (fail_insn)
480 *fail_insn = NULL_RTX;
481 return true;
484 /* Scan from the end, so that fail_insn is determined correctly. */
485 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
486 if (store_killed_in_insn (x, x_regs, act, false))
488 if (fail_insn)
489 *fail_insn = act;
490 return true;
493 return false;
496 /* Returns true if the expression X is loaded or clobbered on or before INSN
497 within basic block BB. X_REGS is list of registers mentioned in X.
498 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
499 static bool
500 store_killed_before (const_rtx x, const_rtx x_regs, const rtx_insn *insn,
501 const_basic_block bb, int *regs_set_before)
503 rtx_insn *first = BB_HEAD (bb);
505 if (!store_ops_ok (x_regs, regs_set_before))
506 return true;
508 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
509 if (store_killed_in_insn (x, x_regs, insn, true))
510 return true;
512 return false;
515 /* The last insn in the basic block that compute_store_table is processing,
516 where store_killed_after is true for X.
517 Since we go through the basic block from BB_END to BB_HEAD, this is
518 also the available store at the end of the basic block. Therefore
519 this is in effect a cache, to avoid calling store_killed_after for
520 equivalent aliasing store expressions.
521 This value is only meaningful during the computation of the store
522 table. We hi-jack the REACHING_REG field of struct st_expr to save
523 a bit of memory. */
524 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
526 /* Determine whether INSN is MEM store pattern that we will consider moving.
527 REGS_SET_BEFORE is bitmap of registers set before (and including) the
528 current insn, REGS_SET_AFTER is bitmap of registers set after (and
529 including) the insn in this basic block. We must be passing through BB from
530 head to end, as we are using this fact to speed things up.
532 The results are stored this way:
534 -- the first anticipatable expression is added into ANTIC_STORES
535 -- if the processed expression is not anticipatable, NULL_RTX is added
536 there instead, so that we can use it as indicator that no further
537 expression of this type may be anticipatable
538 -- if the expression is available, it is added as head of AVAIL_STORES;
539 consequently, all of them but this head are dead and may be deleted.
540 -- if the expression is not available, the insn due to that it fails to be
541 available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
543 The things are complicated a bit by fact that there already may be stores
544 to the same MEM from other blocks; also caller must take care of the
545 necessary cleanup of the temporary markers after end of the basic block.
548 static void
549 find_moveable_store (rtx_insn *insn, int *regs_set_before, int *regs_set_after)
551 struct st_expr * ptr;
552 rtx dest, set;
553 int check_anticipatable, check_available;
554 basic_block bb = BLOCK_FOR_INSN (insn);
556 set = single_set (insn);
557 if (!set)
558 return;
560 dest = SET_DEST (set);
562 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
563 || GET_MODE (dest) == BLKmode)
564 return;
566 if (side_effects_p (dest))
567 return;
569 /* If we are handling exceptions, we must be careful with memory references
570 that may trap. If we are not, the behavior is undefined, so we may just
571 continue. */
572 if (cfun->can_throw_non_call_exceptions && may_trap_p (dest))
573 return;
575 /* Even if the destination cannot trap, the source may. In this case we'd
576 need to handle updating the REG_EH_REGION note. */
577 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
578 return;
580 /* Make sure that the SET_SRC of this store insns can be assigned to
581 a register, or we will fail later on in replace_store_insn, which
582 assumes that we can do this. But sometimes the target machine has
583 oddities like MEM read-modify-write instruction. See for example
584 PR24257. */
585 if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set)))
586 return;
588 ptr = st_expr_entry (dest);
589 if (!ptr->pattern_regs)
590 ptr->pattern_regs = extract_mentioned_regs (dest);
592 /* Do not check for anticipatability if we either found one anticipatable
593 store already, or tested for one and found out that it was killed. */
594 check_anticipatable = 0;
595 if (!ptr->antic_stores)
596 check_anticipatable = 1;
597 else
599 rtx_insn *tmp = ptr->antic_stores->insn ();
600 if (tmp != NULL_RTX
601 && BLOCK_FOR_INSN (tmp) != bb)
602 check_anticipatable = 1;
604 if (check_anticipatable)
606 rtx_insn *tmp;
607 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
608 tmp = NULL;
609 else
610 tmp = insn;
611 ptr->antic_stores = alloc_INSN_LIST (tmp, ptr->antic_stores);
614 /* It is not necessary to check whether store is available if we did
615 it successfully before; if we failed before, do not bother to check
616 until we reach the insn that caused us to fail. */
617 check_available = 0;
618 if (!ptr->avail_stores)
619 check_available = 1;
620 else
622 rtx_insn *tmp = ptr->avail_stores->insn ();
623 if (BLOCK_FOR_INSN (tmp) != bb)
624 check_available = 1;
626 if (check_available)
628 /* Check that we have already reached the insn at that the check
629 failed last time. */
630 if (LAST_AVAIL_CHECK_FAILURE (ptr))
632 rtx_insn *tmp;
633 for (tmp = BB_END (bb);
634 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
635 tmp = PREV_INSN (tmp))
636 continue;
637 if (tmp == insn)
638 check_available = 0;
640 else
641 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
642 bb, regs_set_after,
643 &LAST_AVAIL_CHECK_FAILURE (ptr));
645 if (!check_available)
646 ptr->avail_stores = alloc_INSN_LIST (insn, ptr->avail_stores);
649 /* Find available and anticipatable stores. */
651 static int
652 compute_store_table (void)
654 int ret;
655 basic_block bb;
656 #ifdef ENABLE_CHECKING
657 unsigned regno;
658 #endif
659 rtx_insn *insn;
660 rtx_insn *tmp;
661 df_ref def;
662 int *last_set_in, *already_set;
663 struct st_expr * ptr, **prev_next_ptr_ptr;
664 unsigned int max_gcse_regno = max_reg_num ();
666 store_motion_mems = NULL;
667 store_motion_mems_table = new hash_table<st_expr_hasher> (13);
668 last_set_in = XCNEWVEC (int, max_gcse_regno);
669 already_set = XNEWVEC (int, max_gcse_regno);
671 /* Find all the stores we care about. */
672 FOR_EACH_BB_FN (bb, cfun)
674 /* First compute the registers set in this block. */
675 FOR_BB_INSNS (bb, insn)
678 if (! NONDEBUG_INSN_P (insn))
679 continue;
681 FOR_EACH_INSN_DEF (def, insn)
682 last_set_in[DF_REF_REGNO (def)] = INSN_UID (insn);
685 /* Now find the stores. */
686 memset (already_set, 0, sizeof (int) * max_gcse_regno);
687 FOR_BB_INSNS (bb, insn)
689 if (! NONDEBUG_INSN_P (insn))
690 continue;
692 FOR_EACH_INSN_DEF (def, insn)
693 already_set[DF_REF_REGNO (def)] = INSN_UID (insn);
695 /* Now that we've marked regs, look for stores. */
696 find_moveable_store (insn, already_set, last_set_in);
698 /* Unmark regs that are no longer set. */
699 FOR_EACH_INSN_DEF (def, insn)
700 if (last_set_in[DF_REF_REGNO (def)] == INSN_UID (insn))
701 last_set_in[DF_REF_REGNO (def)] = 0;
704 #ifdef ENABLE_CHECKING
705 /* last_set_in should now be all-zero. */
706 for (regno = 0; regno < max_gcse_regno; regno++)
707 gcc_assert (!last_set_in[regno]);
708 #endif
710 /* Clear temporary marks. */
711 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
713 LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
714 if (ptr->antic_stores
715 && (tmp = ptr->antic_stores->insn ()) == NULL_RTX)
716 ptr->antic_stores = ptr->antic_stores->next ();
720 /* Remove the stores that are not available anywhere, as there will
721 be no opportunity to optimize them. */
722 for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
723 ptr != NULL;
724 ptr = *prev_next_ptr_ptr)
726 if (! ptr->avail_stores)
728 *prev_next_ptr_ptr = ptr->next;
729 store_motion_mems_table->remove_elt_with_hash (ptr, ptr->hash_index);
730 free_st_expr_entry (ptr);
732 else
733 prev_next_ptr_ptr = &ptr->next;
736 ret = enumerate_store_motion_mems ();
738 if (dump_file)
739 print_store_motion_mems (dump_file);
741 free (last_set_in);
742 free (already_set);
743 return ret;
746 /* In all code following after this, REACHING_REG has its original
747 meaning again. Avoid confusion, and undef the accessor macro for
748 the temporary marks usage in compute_store_table. */
749 #undef LAST_AVAIL_CHECK_FAILURE
751 /* Insert an instruction at the beginning of a basic block, and update
752 the BB_HEAD if needed. */
754 static void
755 insert_insn_start_basic_block (rtx_insn *insn, basic_block bb)
757 /* Insert at start of successor block. */
758 rtx_insn *prev = PREV_INSN (BB_HEAD (bb));
759 rtx_insn *before = BB_HEAD (bb);
760 while (before != 0)
762 if (! LABEL_P (before)
763 && !NOTE_INSN_BASIC_BLOCK_P (before))
764 break;
765 prev = before;
766 if (prev == BB_END (bb))
767 break;
768 before = NEXT_INSN (before);
771 insn = emit_insn_after_noloc (insn, prev, bb);
773 if (dump_file)
775 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
776 bb->index);
777 print_inline_rtx (dump_file, insn, 6);
778 fprintf (dump_file, "\n");
782 /* This routine will insert a store on an edge. EXPR is the st_expr entry for
783 the memory reference, and E is the edge to insert it on. Returns nonzero
784 if an edge insertion was performed. */
786 static int
787 insert_store (struct st_expr * expr, edge e)
789 rtx reg;
790 rtx_insn *insn;
791 basic_block bb;
792 edge tmp;
793 edge_iterator ei;
795 /* We did all the deleted before this insert, so if we didn't delete a
796 store, then we haven't set the reaching reg yet either. */
797 if (expr->reaching_reg == NULL_RTX)
798 return 0;
800 if (e->flags & EDGE_FAKE)
801 return 0;
803 reg = expr->reaching_reg;
804 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
806 /* If we are inserting this expression on ALL predecessor edges of a BB,
807 insert it at the start of the BB, and reset the insert bits on the other
808 edges so we don't try to insert it on the other edges. */
809 bb = e->dest;
810 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
811 if (!(tmp->flags & EDGE_FAKE))
813 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
815 gcc_assert (index != EDGE_INDEX_NO_EDGE);
816 if (! bitmap_bit_p (st_insert_map[index], expr->index))
817 break;
820 /* If tmp is NULL, we found an insertion on every edge, blank the
821 insertion vector for these edges, and insert at the start of the BB. */
822 if (!tmp && bb != EXIT_BLOCK_PTR_FOR_FN (cfun))
824 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
826 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
827 bitmap_clear_bit (st_insert_map[index], expr->index);
829 insert_insn_start_basic_block (insn, bb);
830 return 0;
833 /* We can't put stores in the front of blocks pointed to by abnormal
834 edges since that may put a store where one didn't used to be. */
835 gcc_assert (!(e->flags & EDGE_ABNORMAL));
837 insert_insn_on_edge (insn, e);
839 if (dump_file)
841 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
842 e->src->index, e->dest->index);
843 print_inline_rtx (dump_file, insn, 6);
844 fprintf (dump_file, "\n");
847 return 1;
850 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
851 memory location in SMEXPR set in basic block BB.
853 This could be rather expensive. */
855 static void
856 remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
858 edge_iterator *stack, ei;
859 int sp;
860 edge act;
861 sbitmap visited = sbitmap_alloc (last_basic_block_for_fn (cfun));
862 rtx last, note;
863 rtx_insn *insn;
864 rtx mem = smexpr->pattern;
866 stack = XNEWVEC (edge_iterator, n_basic_blocks_for_fn (cfun));
867 sp = 0;
868 ei = ei_start (bb->succs);
870 bitmap_clear (visited);
872 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
873 while (1)
875 if (!act)
877 if (!sp)
879 free (stack);
880 sbitmap_free (visited);
881 return;
883 act = ei_edge (stack[--sp]);
885 bb = act->dest;
887 if (bb == EXIT_BLOCK_PTR_FOR_FN (cfun)
888 || bitmap_bit_p (visited, bb->index))
890 if (!ei_end_p (ei))
891 ei_next (&ei);
892 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
893 continue;
895 bitmap_set_bit (visited, bb->index);
897 if (bitmap_bit_p (st_antloc[bb->index], smexpr->index))
899 for (last = smexpr->antic_stores;
900 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
901 last = XEXP (last, 1))
902 continue;
903 last = XEXP (last, 0);
905 else
906 last = NEXT_INSN (BB_END (bb));
908 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
909 if (NONDEBUG_INSN_P (insn))
911 note = find_reg_equal_equiv_note (insn);
912 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
913 continue;
915 if (dump_file)
916 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
917 INSN_UID (insn));
918 remove_note (insn, note);
921 if (!ei_end_p (ei))
922 ei_next (&ei);
923 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
925 if (EDGE_COUNT (bb->succs) > 0)
927 if (act)
928 stack[sp++] = ei;
929 ei = ei_start (bb->succs);
930 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
935 /* This routine will replace a store with a SET to a specified register. */
937 static void
938 replace_store_insn (rtx reg, rtx_insn *del, basic_block bb,
939 struct st_expr *smexpr)
941 rtx_insn *insn;
942 rtx mem, note, set, ptr;
944 mem = smexpr->pattern;
945 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
947 for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
948 if (XEXP (ptr, 0) == del)
950 XEXP (ptr, 0) = insn;
951 break;
954 /* Move the notes from the deleted insn to its replacement. */
955 REG_NOTES (insn) = REG_NOTES (del);
957 /* Emit the insn AFTER all the notes are transferred.
958 This is cheaper since we avoid df rescanning for the note change. */
959 insn = emit_insn_after (insn, del);
961 if (dump_file)
963 fprintf (dump_file,
964 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
965 print_inline_rtx (dump_file, del, 6);
966 fprintf (dump_file, "\nSTORE_MOTION replaced with insn:\n ");
967 print_inline_rtx (dump_file, insn, 6);
968 fprintf (dump_file, "\n");
971 delete_insn (del);
973 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
974 they are no longer accurate provided that they are reached by this
975 definition, so drop them. */
976 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
977 if (NONDEBUG_INSN_P (insn))
979 set = single_set (insn);
980 if (!set)
981 continue;
982 if (exp_equiv_p (SET_DEST (set), mem, 0, true))
983 return;
984 note = find_reg_equal_equiv_note (insn);
985 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
986 continue;
988 if (dump_file)
989 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
990 INSN_UID (insn));
991 remove_note (insn, note);
993 remove_reachable_equiv_notes (bb, smexpr);
997 /* Delete a store, but copy the value that would have been stored into
998 the reaching_reg for later storing. */
1000 static void
1001 delete_store (struct st_expr * expr, basic_block bb)
1003 rtx reg;
1005 if (expr->reaching_reg == NULL_RTX)
1006 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
1008 reg = expr->reaching_reg;
1010 for (rtx_insn_list *i = expr->avail_stores; i; i = i->next ())
1012 rtx_insn *del = i->insn ();
1013 if (BLOCK_FOR_INSN (del) == bb)
1015 /* We know there is only one since we deleted redundant
1016 ones during the available computation. */
1017 replace_store_insn (reg, del, bb, expr);
1018 break;
1023 /* Fill in available, anticipatable, transparent and kill vectors in
1024 STORE_DATA, based on lists of available and anticipatable stores. */
1025 static void
1026 build_store_vectors (void)
1028 basic_block bb;
1029 int *regs_set_in_block;
1030 rtx_insn *insn;
1031 rtx_insn_list *st;
1032 struct st_expr * ptr;
1033 unsigned int max_gcse_regno = max_reg_num ();
1035 /* Build the gen_vector. This is any store in the table which is not killed
1036 by aliasing later in its block. */
1037 st_avloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1038 num_stores);
1039 bitmap_vector_clear (st_avloc, last_basic_block_for_fn (cfun));
1041 st_antloc = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1042 num_stores);
1043 bitmap_vector_clear (st_antloc, last_basic_block_for_fn (cfun));
1045 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1047 for (st = ptr->avail_stores; st != NULL; st = st->next ())
1049 insn = st->insn ();
1050 bb = BLOCK_FOR_INSN (insn);
1052 /* If we've already seen an available expression in this block,
1053 we can delete this one (It occurs earlier in the block). We'll
1054 copy the SRC expression to an unused register in case there
1055 are any side effects. */
1056 if (bitmap_bit_p (st_avloc[bb->index], ptr->index))
1058 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1059 if (dump_file)
1060 fprintf (dump_file, "Removing redundant store:\n");
1061 replace_store_insn (r, st->insn (), bb, ptr);
1062 continue;
1064 bitmap_set_bit (st_avloc[bb->index], ptr->index);
1067 for (st = ptr->antic_stores; st != NULL; st = st->next ())
1069 insn = st->insn ();
1070 bb = BLOCK_FOR_INSN (insn);
1071 bitmap_set_bit (st_antloc[bb->index], ptr->index);
1075 st_kill = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1076 bitmap_vector_clear (st_kill, last_basic_block_for_fn (cfun));
1078 st_transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun), num_stores);
1079 bitmap_vector_clear (st_transp, last_basic_block_for_fn (cfun));
1080 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1082 FOR_EACH_BB_FN (bb, cfun)
1084 memset (regs_set_in_block, 0, sizeof (int) * max_gcse_regno);
1086 FOR_BB_INSNS (bb, insn)
1087 if (NONDEBUG_INSN_P (insn))
1089 df_ref def;
1090 FOR_EACH_INSN_DEF (def, insn)
1092 unsigned int ref_regno = DF_REF_REGNO (def);
1093 if (ref_regno < max_gcse_regno)
1094 regs_set_in_block[DF_REF_REGNO (def)] = 1;
1098 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1100 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1101 bb, regs_set_in_block, NULL))
1103 /* It should not be necessary to consider the expression
1104 killed if it is both anticipatable and available. */
1105 if (!bitmap_bit_p (st_antloc[bb->index], ptr->index)
1106 || !bitmap_bit_p (st_avloc[bb->index], ptr->index))
1107 bitmap_set_bit (st_kill[bb->index], ptr->index);
1109 else
1110 bitmap_set_bit (st_transp[bb->index], ptr->index);
1114 free (regs_set_in_block);
1116 if (dump_file)
1118 dump_bitmap_vector (dump_file, "st_antloc", "", st_antloc,
1119 last_basic_block_for_fn (cfun));
1120 dump_bitmap_vector (dump_file, "st_kill", "", st_kill,
1121 last_basic_block_for_fn (cfun));
1122 dump_bitmap_vector (dump_file, "st_transp", "", st_transp,
1123 last_basic_block_for_fn (cfun));
1124 dump_bitmap_vector (dump_file, "st_avloc", "", st_avloc,
1125 last_basic_block_for_fn (cfun));
1129 /* Free memory used by store motion. */
1131 static void
1132 free_store_memory (void)
1134 free_store_motion_mems ();
1136 if (st_avloc)
1137 sbitmap_vector_free (st_avloc);
1138 if (st_kill)
1139 sbitmap_vector_free (st_kill);
1140 if (st_transp)
1141 sbitmap_vector_free (st_transp);
1142 if (st_antloc)
1143 sbitmap_vector_free (st_antloc);
1144 if (st_insert_map)
1145 sbitmap_vector_free (st_insert_map);
1146 if (st_delete_map)
1147 sbitmap_vector_free (st_delete_map);
1149 st_avloc = st_kill = st_transp = st_antloc = NULL;
1150 st_insert_map = st_delete_map = NULL;
1153 /* Perform store motion. Much like gcse, except we move expressions the
1154 other way by looking at the flowgraph in reverse.
1155 Return non-zero if transformations are performed by the pass. */
1157 static int
1158 one_store_motion_pass (void)
1160 basic_block bb;
1161 int x;
1162 struct st_expr * ptr;
1163 int did_edge_inserts = 0;
1164 int n_stores_deleted = 0;
1165 int n_stores_created = 0;
1167 init_alias_analysis ();
1169 /* Find all the available and anticipatable stores. */
1170 num_stores = compute_store_table ();
1171 if (num_stores == 0)
1173 delete store_motion_mems_table;
1174 store_motion_mems_table = NULL;
1175 end_alias_analysis ();
1176 return 0;
1179 /* Now compute kill & transp vectors. */
1180 build_store_vectors ();
1181 add_noreturn_fake_exit_edges ();
1182 connect_infinite_loops_to_exit ();
1184 edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1185 st_antloc, st_kill, &st_insert_map,
1186 &st_delete_map);
1188 /* Now we want to insert the new stores which are going to be needed. */
1189 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1191 /* If any of the edges we have above are abnormal, we can't move this
1192 store. */
1193 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
1194 if (bitmap_bit_p (st_insert_map[x], ptr->index)
1195 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1196 break;
1198 if (x >= 0)
1200 if (dump_file != NULL)
1201 fprintf (dump_file,
1202 "Can't replace store %d: abnormal edge from %d to %d\n",
1203 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1204 INDEX_EDGE (edge_list, x)->dest->index);
1205 continue;
1208 /* Now we want to insert the new stores which are going to be needed. */
1210 FOR_EACH_BB_FN (bb, cfun)
1211 if (bitmap_bit_p (st_delete_map[bb->index], ptr->index))
1213 delete_store (ptr, bb);
1214 n_stores_deleted++;
1217 for (x = 0; x < NUM_EDGES (edge_list); x++)
1218 if (bitmap_bit_p (st_insert_map[x], ptr->index))
1220 did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1221 n_stores_created++;
1225 if (did_edge_inserts)
1226 commit_edge_insertions ();
1228 free_store_memory ();
1229 free_edge_list (edge_list);
1230 remove_fake_exit_edges ();
1231 end_alias_analysis ();
1233 if (dump_file)
1235 fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
1236 current_function_name (), n_basic_blocks_for_fn (cfun));
1237 fprintf (dump_file, "%d insns deleted, %d insns created\n",
1238 n_stores_deleted, n_stores_created);
1241 return (n_stores_deleted > 0 || n_stores_created > 0);
1245 static unsigned int
1246 execute_rtl_store_motion (void)
1248 delete_unreachable_blocks ();
1249 df_analyze ();
1250 flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1251 return 0;
1254 namespace {
1256 const pass_data pass_data_rtl_store_motion =
1258 RTL_PASS, /* type */
1259 "store_motion", /* name */
1260 OPTGROUP_NONE, /* optinfo_flags */
1261 TV_LSM, /* tv_id */
1262 PROP_cfglayout, /* properties_required */
1263 0, /* properties_provided */
1264 0, /* properties_destroyed */
1265 0, /* todo_flags_start */
1266 TODO_df_finish, /* todo_flags_finish */
1269 class pass_rtl_store_motion : public rtl_opt_pass
1271 public:
1272 pass_rtl_store_motion (gcc::context *ctxt)
1273 : rtl_opt_pass (pass_data_rtl_store_motion, ctxt)
1276 /* opt_pass methods: */
1277 virtual bool gate (function *);
1278 virtual unsigned int execute (function *)
1280 return execute_rtl_store_motion ();
1283 }; // class pass_rtl_store_motion
1285 bool
1286 pass_rtl_store_motion::gate (function *fun)
1288 return optimize > 0 && flag_gcse_sm
1289 && !fun->calls_setjmp
1290 && optimize_function_for_speed_p (fun)
1291 && dbg_cnt (store_motion);
1294 } // anon namespace
1296 rtl_opt_pass *
1297 make_pass_rtl_store_motion (gcc::context *ctxt)
1299 return new pass_rtl_store_motion (ctxt);