2009-05-06 Tobias Burnus <burnus@net-b.de>
[official-gcc/alias-decl.git] / gcc / store-motion.c
blob5dabd1a8f2036bd43618123b341df31ecc64095e
1 /* Store motion via Lazy Code Motion on the reverse CFG.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "toplev.h"
27 #include "rtl.h"
28 #include "tree.h"
29 #include "tm_p.h"
30 #include "regs.h"
31 #include "hard-reg-set.h"
32 #include "flags.h"
33 #include "real.h"
34 #include "insn-config.h"
35 #include "recog.h"
36 #include "basic-block.h"
37 #include "output.h"
38 #include "function.h"
39 #include "expr.h"
40 #include "except.h"
41 #include "ggc.h"
42 #include "params.h"
43 #include "intl.h"
44 #include "timevar.h"
45 #include "tree-pass.h"
46 #include "hashtab.h"
47 #include "df.h"
48 #include "dbgcnt.h"
50 /* This pass implements downward store motion.
51 As of May 1, 2009, the pass is not enabled by default on any target,
52 but bootstrap completes on ia64 and x86_64 with the pass enabled. */
54 /* TODO:
55 - remove_reachable_equiv_notes is an incomprehensible pile of goo and
56 a compile time hog that needs a rewrite (maybe cache st_exprs to
57 invalidate REG_EQUAL/REG_EQUIV notes for?).
58 - pattern_regs in st_expr should be a regset (on its own obstack).
59 - antic_stores and avail_stores should be VECs instead of lists.
60 - store_motion_mems should be a VEC instead of a list.
61 - there should be an alloc pool for struct st_expr objects.
62 - investigate whether it is helpful to make the address of an st_expr
63 a cselib VALUE.
64 - when GIMPLE alias information is exported, the effectiveness of this
65 pass should be re-evaluated.
68 /* This is a list of store expressions (MEMs). The structure is used
69 as an expression table to track stores which look interesting, and
70 might be moveable towards the exit block. */
72 struct st_expr
74 /* Pattern of this mem. */
75 rtx pattern;
76 /* List of registers mentioned by the mem. */
77 rtx pattern_regs;
78 /* INSN list of stores that are locally anticipatable. */
79 rtx antic_stores;
80 /* INSN list of stores that are locally available. */
81 rtx avail_stores;
82 /* Next in the list. */
83 struct st_expr * next;
84 /* Store ID in the dataflow bitmaps. */
85 int index;
86 /* Hash value for the hash table. */
87 unsigned int hash_index;
88 /* Register holding the stored expression when a store is moved.
89 This field is also used as a cache in find_moveable_store, see
90 LAST_AVAIL_CHECK_FAILURE below. */
91 rtx reaching_reg;
94 /* Head of the list of load/store memory refs. */
95 static struct st_expr * store_motion_mems = NULL;
97 /* Hashtable for the load/store memory refs. */
98 static htab_t store_motion_mems_table = NULL;
100 /* These bitmaps will hold the local dataflow properties per basic block. */
101 static sbitmap *st_kill, *st_avloc, *st_antloc, *st_transp;
103 /* Nonzero for expressions which should be inserted on a specific edge. */
104 static sbitmap *st_insert_map;
106 /* Nonzero for expressions which should be deleted in a specific block. */
107 static sbitmap *st_delete_map;
109 /* Global holding the number of store expressions we are dealing with. */
110 static int num_stores;
112 /* Contains the edge_list returned by pre_edge_lcm. */
113 static struct edge_list *edge_list;
115 static hashval_t
116 pre_st_expr_hash (const void *p)
118 int do_not_record_p = 0;
119 const struct st_expr *const x = (const struct st_expr *) p;
120 return hash_rtx (x->pattern, GET_MODE (x->pattern), &do_not_record_p, NULL, false);
123 static int
124 pre_st_expr_eq (const void *p1, const void *p2)
126 const struct st_expr *const ptr1 = (const struct st_expr *) p1,
127 *const ptr2 = (const struct st_expr *) p2;
128 return exp_equiv_p (ptr1->pattern, ptr2->pattern, 0, true);
131 /* This will search the st_expr list for a matching expression. If it
132 doesn't find one, we create one and initialize it. */
134 static struct st_expr *
135 st_expr_entry (rtx x)
137 int do_not_record_p = 0;
138 struct st_expr * ptr;
139 unsigned int hash;
140 void **slot;
141 struct st_expr e;
143 hash = hash_rtx (x, GET_MODE (x), &do_not_record_p,
144 NULL, /*have_reg_qty=*/false);
146 e.pattern = x;
147 slot = htab_find_slot_with_hash (store_motion_mems_table, &e, hash, INSERT);
148 if (*slot)
149 return (struct st_expr *)*slot;
151 ptr = XNEW (struct st_expr);
153 ptr->next = store_motion_mems;
154 ptr->pattern = x;
155 ptr->pattern_regs = NULL_RTX;
156 ptr->antic_stores = NULL_RTX;
157 ptr->avail_stores = NULL_RTX;
158 ptr->reaching_reg = NULL_RTX;
159 ptr->index = 0;
160 ptr->hash_index = hash;
161 store_motion_mems = ptr;
162 *slot = ptr;
164 return ptr;
167 /* Free up an individual st_expr entry. */
169 static void
170 free_st_expr_entry (struct st_expr * ptr)
172 free_INSN_LIST_list (& ptr->antic_stores);
173 free_INSN_LIST_list (& ptr->avail_stores);
175 free (ptr);
178 /* Free up all memory associated with the st_expr list. */
180 static void
181 free_store_motion_mems (void)
183 if (store_motion_mems_table)
184 htab_delete (store_motion_mems_table);
185 store_motion_mems_table = NULL;
187 while (store_motion_mems)
189 struct st_expr * tmp = store_motion_mems;
190 store_motion_mems = store_motion_mems->next;
191 free_st_expr_entry (tmp);
193 store_motion_mems = NULL;
196 /* Assign each element of the list of mems a monotonically increasing value. */
198 static int
199 enumerate_store_motion_mems (void)
201 struct st_expr * ptr;
202 int n = 0;
204 for (ptr = store_motion_mems; ptr != NULL; ptr = ptr->next)
205 ptr->index = n++;
207 return n;
210 /* Return first item in the list. */
212 static inline struct st_expr *
213 first_st_expr (void)
215 return store_motion_mems;
218 /* Return the next item in the list after the specified one. */
220 static inline struct st_expr *
221 next_st_expr (struct st_expr * ptr)
223 return ptr->next;
226 /* Dump debugging info about the store_motion_mems list. */
228 static void
229 print_store_motion_mems (FILE * file)
231 struct st_expr * ptr;
233 fprintf (dump_file, "STORE_MOTION list of MEM exprs considered:\n");
235 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
237 fprintf (file, " Pattern (%3d): ", ptr->index);
239 print_rtl (file, ptr->pattern);
241 fprintf (file, "\n ANTIC stores : ");
243 if (ptr->antic_stores)
244 print_rtl (file, ptr->antic_stores);
245 else
246 fprintf (file, "(nil)");
248 fprintf (file, "\n AVAIL stores : ");
250 if (ptr->avail_stores)
251 print_rtl (file, ptr->avail_stores);
252 else
253 fprintf (file, "(nil)");
255 fprintf (file, "\n\n");
258 fprintf (file, "\n");
261 /* Return zero if some of the registers in list X are killed
262 due to set of registers in bitmap REGS_SET. */
264 static bool
265 store_ops_ok (const_rtx x, int *regs_set)
267 const_rtx reg;
269 for (; x; x = XEXP (x, 1))
271 reg = XEXP (x, 0);
272 if (regs_set[REGNO(reg)])
273 return false;
276 return true;
279 /* Helper for extract_mentioned_regs. */
281 static int
282 extract_mentioned_regs_1 (rtx *loc, void *data)
284 rtx *mentioned_regs_p = (rtx *) data;
286 if (REG_P (*loc))
287 *mentioned_regs_p = alloc_EXPR_LIST (0, *loc, *mentioned_regs_p);
289 return 0;
292 /* Returns a list of registers mentioned in X.
293 FIXME: A regset would be prettier and less expensive. */
295 static rtx
296 extract_mentioned_regs (rtx x)
298 rtx mentioned_regs = NULL;
299 for_each_rtx (&x, extract_mentioned_regs_1, &mentioned_regs);
300 return mentioned_regs;
303 /* Check to see if the load X is aliased with STORE_PATTERN.
304 AFTER is true if we are checking the case when STORE_PATTERN occurs
305 after the X. */
307 static bool
308 load_kills_store (const_rtx x, const_rtx store_pattern, int after)
310 if (after)
311 return anti_dependence (x, store_pattern);
312 else
313 return true_dependence (store_pattern, GET_MODE (store_pattern), x,
314 rtx_addr_varies_p);
317 /* Go through the entire rtx X, looking for any loads which might alias
318 STORE_PATTERN. Return true if found.
319 AFTER is true if we are checking the case when STORE_PATTERN occurs
320 after the insn X. */
322 static bool
323 find_loads (const_rtx x, const_rtx store_pattern, int after)
325 const char * fmt;
326 int i, j;
327 int ret = false;
329 if (!x)
330 return false;
332 if (GET_CODE (x) == SET)
333 x = SET_SRC (x);
335 if (MEM_P (x))
337 if (load_kills_store (x, store_pattern, after))
338 return true;
341 /* Recursively process the insn. */
342 fmt = GET_RTX_FORMAT (GET_CODE (x));
344 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0 && !ret; i--)
346 if (fmt[i] == 'e')
347 ret |= find_loads (XEXP (x, i), store_pattern, after);
348 else if (fmt[i] == 'E')
349 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
350 ret |= find_loads (XVECEXP (x, i, j), store_pattern, after);
352 return ret;
355 /* Go through pattern PAT looking for any loads which might kill the
356 store in X. Return true if found.
357 AFTER is true if we are checking the case when loads kill X occurs
358 after the insn for PAT. */
360 static inline bool
361 store_killed_in_pat (const_rtx x, const_rtx pat, int after)
363 if (GET_CODE (pat) == SET)
365 rtx dest = SET_DEST (pat);
367 if (GET_CODE (dest) == ZERO_EXTRACT)
368 dest = XEXP (dest, 0);
370 /* Check for memory stores to aliased objects. */
371 if (MEM_P (dest)
372 && !exp_equiv_p (dest, x, 0, true))
374 if (after)
376 if (output_dependence (dest, x))
377 return true;
379 else
381 if (output_dependence (x, dest))
382 return true;
387 if (find_loads (pat, x, after))
388 return true;
390 return false;
393 /* Check if INSN kills the store pattern X (is aliased with it).
394 AFTER is true if we are checking the case when store X occurs
395 after the insn. Return true if it does. */
397 static bool
398 store_killed_in_insn (const_rtx x, const_rtx x_regs, const_rtx insn, int after)
400 const_rtx reg, base, note, pat;
402 if (!INSN_P (insn))
403 return false;
405 if (CALL_P (insn))
407 /* A normal or pure call might read from pattern,
408 but a const call will not. */
409 if (!RTL_CONST_CALL_P (insn))
410 return true;
412 /* But even a const call reads its parameters. Check whether the
413 base of some of registers used in mem is stack pointer. */
414 for (reg = x_regs; reg; reg = XEXP (reg, 1))
416 base = find_base_term (XEXP (reg, 0));
417 if (!base
418 || (GET_CODE (base) == ADDRESS
419 && GET_MODE (base) == Pmode
420 && XEXP (base, 0) == stack_pointer_rtx))
421 return true;
424 return false;
427 pat = PATTERN (insn);
428 if (GET_CODE (pat) == SET)
430 if (store_killed_in_pat (x, pat, after))
431 return true;
433 else if (GET_CODE (pat) == PARALLEL)
435 int i;
437 for (i = 0; i < XVECLEN (pat, 0); i++)
438 if (store_killed_in_pat (x, XVECEXP (pat, 0, i), after))
439 return true;
441 else if (find_loads (PATTERN (insn), x, after))
442 return true;
444 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
445 location aliased with X, then this insn kills X. */
446 note = find_reg_equal_equiv_note (insn);
447 if (! note)
448 return false;
449 note = XEXP (note, 0);
451 /* However, if the note represents a must alias rather than a may
452 alias relationship, then it does not kill X. */
453 if (exp_equiv_p (note, x, 0, true))
454 return false;
456 /* See if there are any aliased loads in the note. */
457 return find_loads (note, x, after);
460 /* Returns true if the expression X is loaded or clobbered on or after INSN
461 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
462 or after the insn. X_REGS is list of registers mentioned in X. If the store
463 is killed, return the last insn in that it occurs in FAIL_INSN. */
465 static bool
466 store_killed_after (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
467 int *regs_set_after, rtx *fail_insn)
469 rtx last = BB_END (bb), act;
471 if (!store_ops_ok (x_regs, regs_set_after))
473 /* We do not know where it will happen. */
474 if (fail_insn)
475 *fail_insn = NULL_RTX;
476 return true;
479 /* Scan from the end, so that fail_insn is determined correctly. */
480 for (act = last; act != PREV_INSN (insn); act = PREV_INSN (act))
481 if (store_killed_in_insn (x, x_regs, act, false))
483 if (fail_insn)
484 *fail_insn = act;
485 return true;
488 return false;
491 /* Returns true if the expression X is loaded or clobbered on or before INSN
492 within basic block BB. X_REGS is list of registers mentioned in X.
493 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
494 static bool
495 store_killed_before (const_rtx x, const_rtx x_regs, const_rtx insn, const_basic_block bb,
496 int *regs_set_before)
498 rtx first = BB_HEAD (bb);
500 if (!store_ops_ok (x_regs, regs_set_before))
501 return true;
503 for ( ; insn != PREV_INSN (first); insn = PREV_INSN (insn))
504 if (store_killed_in_insn (x, x_regs, insn, true))
505 return true;
507 return false;
510 /* The last insn in the basic block that compute_store_table is processing,
511 where store_killed_after is true for X.
512 Since we go through the basic block from BB_END to BB_HEAD, this is
513 also the available store at the end of the basic block. Therefore
514 this is in effect a cache, to avoid calling store_killed_after for
515 equivalent aliasing store expressions.
516 This value is only meaningful during the computation of the store
517 table. We hi-jack the REACHING_REG field of struct st_expr to save
518 a bit of memory. */
519 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
521 /* Determine whether INSN is MEM store pattern that we will consider moving.
522 REGS_SET_BEFORE is bitmap of registers set before (and including) the
523 current insn, REGS_SET_AFTER is bitmap of registers set after (and
524 including) the insn in this basic block. We must be passing through BB from
525 head to end, as we are using this fact to speed things up.
527 The results are stored this way:
529 -- the first anticipatable expression is added into ANTIC_STORES
530 -- if the processed expression is not anticipatable, NULL_RTX is added
531 there instead, so that we can use it as indicator that no further
532 expression of this type may be anticipatable
533 -- if the expression is available, it is added as head of AVAIL_STORES;
534 consequently, all of them but this head are dead and may be deleted.
535 -- if the expression is not available, the insn due to that it fails to be
536 available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
538 The things are complicated a bit by fact that there already may be stores
539 to the same MEM from other blocks; also caller must take care of the
540 necessary cleanup of the temporary markers after end of the basic block.
543 static void
544 find_moveable_store (rtx insn, int *regs_set_before, int *regs_set_after)
546 struct st_expr * ptr;
547 rtx dest, set, tmp;
548 int check_anticipatable, check_available;
549 basic_block bb = BLOCK_FOR_INSN (insn);
551 set = single_set (insn);
552 if (!set)
553 return;
555 dest = SET_DEST (set);
557 if (! MEM_P (dest) || MEM_VOLATILE_P (dest)
558 || GET_MODE (dest) == BLKmode)
559 return;
561 if (side_effects_p (dest))
562 return;
564 /* If we are handling exceptions, we must be careful with memory references
565 that may trap. If we are not, the behavior is undefined, so we may just
566 continue. */
567 if (flag_non_call_exceptions && may_trap_p (dest))
568 return;
570 /* Even if the destination cannot trap, the source may. In this case we'd
571 need to handle updating the REG_EH_REGION note. */
572 if (find_reg_note (insn, REG_EH_REGION, NULL_RTX))
573 return;
575 /* Make sure that the SET_SRC of this store insns can be assigned to
576 a register, or we will fail later on in replace_store_insn, which
577 assumes that we can do this. But sometimes the target machine has
578 oddities like MEM read-modify-write instruction. See for example
579 PR24257. */
580 if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set)))
581 return;
583 ptr = st_expr_entry (dest);
584 if (!ptr->pattern_regs)
585 ptr->pattern_regs = extract_mentioned_regs (dest);
587 /* Do not check for anticipatability if we either found one anticipatable
588 store already, or tested for one and found out that it was killed. */
589 check_anticipatable = 0;
590 if (!ptr->antic_stores)
591 check_anticipatable = 1;
592 else
594 tmp = XEXP (ptr->antic_stores, 0);
595 if (tmp != NULL_RTX
596 && BLOCK_FOR_INSN (tmp) != bb)
597 check_anticipatable = 1;
599 if (check_anticipatable)
601 if (store_killed_before (dest, ptr->pattern_regs, insn, bb, regs_set_before))
602 tmp = NULL_RTX;
603 else
604 tmp = insn;
605 ptr->antic_stores = alloc_INSN_LIST (tmp, ptr->antic_stores);
608 /* It is not necessary to check whether store is available if we did
609 it successfully before; if we failed before, do not bother to check
610 until we reach the insn that caused us to fail. */
611 check_available = 0;
612 if (!ptr->avail_stores)
613 check_available = 1;
614 else
616 tmp = XEXP (ptr->avail_stores, 0);
617 if (BLOCK_FOR_INSN (tmp) != bb)
618 check_available = 1;
620 if (check_available)
622 /* Check that we have already reached the insn at that the check
623 failed last time. */
624 if (LAST_AVAIL_CHECK_FAILURE (ptr))
626 for (tmp = BB_END (bb);
627 tmp != insn && tmp != LAST_AVAIL_CHECK_FAILURE (ptr);
628 tmp = PREV_INSN (tmp))
629 continue;
630 if (tmp == insn)
631 check_available = 0;
633 else
634 check_available = store_killed_after (dest, ptr->pattern_regs, insn,
635 bb, regs_set_after,
636 &LAST_AVAIL_CHECK_FAILURE (ptr));
638 if (!check_available)
639 ptr->avail_stores = alloc_INSN_LIST (insn, ptr->avail_stores);
642 /* Find available and anticipatable stores. */
644 static int
645 compute_store_table (void)
647 int ret;
648 basic_block bb;
649 #ifdef ENABLE_CHECKING
650 unsigned regno;
651 #endif
652 rtx insn, tmp;
653 df_ref *def_rec;
654 int *last_set_in, *already_set;
655 struct st_expr * ptr, **prev_next_ptr_ptr;
656 unsigned int max_gcse_regno = max_reg_num ();
658 store_motion_mems = NULL;
659 store_motion_mems_table = htab_create (13, pre_st_expr_hash,
660 pre_st_expr_eq, NULL);
661 last_set_in = XCNEWVEC (int, max_gcse_regno);
662 already_set = XNEWVEC (int, max_gcse_regno);
664 /* Find all the stores we care about. */
665 FOR_EACH_BB (bb)
667 /* First compute the registers set in this block. */
668 FOR_BB_INSNS (bb, insn)
671 if (! INSN_P (insn))
672 continue;
674 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
675 last_set_in[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
678 /* Now find the stores. */
679 memset (already_set, 0, sizeof (int) * max_gcse_regno);
680 FOR_BB_INSNS (bb, insn)
682 if (! INSN_P (insn))
683 continue;
685 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
686 already_set[DF_REF_REGNO (*def_rec)] = INSN_UID (insn);
688 /* Now that we've marked regs, look for stores. */
689 find_moveable_store (insn, already_set, last_set_in);
691 /* Unmark regs that are no longer set. */
692 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
693 if (last_set_in[DF_REF_REGNO (*def_rec)] == INSN_UID (insn))
694 last_set_in[DF_REF_REGNO (*def_rec)] = 0;
697 #ifdef ENABLE_CHECKING
698 /* last_set_in should now be all-zero. */
699 for (regno = 0; regno < max_gcse_regno; regno++)
700 gcc_assert (!last_set_in[regno]);
701 #endif
703 /* Clear temporary marks. */
704 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
706 LAST_AVAIL_CHECK_FAILURE (ptr) = NULL_RTX;
707 if (ptr->antic_stores
708 && (tmp = XEXP (ptr->antic_stores, 0)) == NULL_RTX)
709 ptr->antic_stores = XEXP (ptr->antic_stores, 1);
713 /* Remove the stores that are not available anywhere, as there will
714 be no opportunity to optimize them. */
715 for (ptr = store_motion_mems, prev_next_ptr_ptr = &store_motion_mems;
716 ptr != NULL;
717 ptr = *prev_next_ptr_ptr)
719 if (! ptr->avail_stores)
721 *prev_next_ptr_ptr = ptr->next;
722 htab_remove_elt_with_hash (store_motion_mems_table,
723 ptr, ptr->hash_index);
724 free_st_expr_entry (ptr);
726 else
727 prev_next_ptr_ptr = &ptr->next;
730 ret = enumerate_store_motion_mems ();
732 if (dump_file)
733 print_store_motion_mems (dump_file);
735 free (last_set_in);
736 free (already_set);
737 return ret;
740 /* In all code following after this, REACHING_REG has its original
741 meaning again. Avoid confusion, and undef the accessor macro for
742 the temporary marks usage in compute_store_table. */
743 #undef LAST_AVAIL_CHECK_FAILURE
745 /* Insert an instruction at the beginning of a basic block, and update
746 the BB_HEAD if needed. */
748 static void
749 insert_insn_start_basic_block (rtx insn, basic_block bb)
751 /* Insert at start of successor block. */
752 rtx prev = PREV_INSN (BB_HEAD (bb));
753 rtx before = BB_HEAD (bb);
754 while (before != 0)
756 if (! LABEL_P (before)
757 && !NOTE_INSN_BASIC_BLOCK_P (before))
758 break;
759 prev = before;
760 if (prev == BB_END (bb))
761 break;
762 before = NEXT_INSN (before);
765 insn = emit_insn_after_noloc (insn, prev, bb);
767 if (dump_file)
769 fprintf (dump_file, "STORE_MOTION insert store at start of BB %d:\n",
770 bb->index);
771 print_inline_rtx (dump_file, insn, 6);
772 fprintf (dump_file, "\n");
776 /* This routine will insert a store on an edge. EXPR is the st_expr entry for
777 the memory reference, and E is the edge to insert it on. Returns nonzero
778 if an edge insertion was performed. */
780 static int
781 insert_store (struct st_expr * expr, edge e)
783 rtx reg, insn;
784 basic_block bb;
785 edge tmp;
786 edge_iterator ei;
788 /* We did all the deleted before this insert, so if we didn't delete a
789 store, then we haven't set the reaching reg yet either. */
790 if (expr->reaching_reg == NULL_RTX)
791 return 0;
793 if (e->flags & EDGE_FAKE)
794 return 0;
796 reg = expr->reaching_reg;
797 insn = gen_move_insn (copy_rtx (expr->pattern), reg);
799 /* If we are inserting this expression on ALL predecessor edges of a BB,
800 insert it at the start of the BB, and reset the insert bits on the other
801 edges so we don't try to insert it on the other edges. */
802 bb = e->dest;
803 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
804 if (!(tmp->flags & EDGE_FAKE))
806 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
808 gcc_assert (index != EDGE_INDEX_NO_EDGE);
809 if (! TEST_BIT (st_insert_map[index], expr->index))
810 break;
813 /* If tmp is NULL, we found an insertion on every edge, blank the
814 insertion vector for these edges, and insert at the start of the BB. */
815 if (!tmp && bb != EXIT_BLOCK_PTR)
817 FOR_EACH_EDGE (tmp, ei, e->dest->preds)
819 int index = EDGE_INDEX (edge_list, tmp->src, tmp->dest);
820 RESET_BIT (st_insert_map[index], expr->index);
822 insert_insn_start_basic_block (insn, bb);
823 return 0;
826 /* We can't put stores in the front of blocks pointed to by abnormal
827 edges since that may put a store where one didn't used to be. */
828 gcc_assert (!(e->flags & EDGE_ABNORMAL));
830 insert_insn_on_edge (insn, e);
832 if (dump_file)
834 fprintf (dump_file, "STORE_MOTION insert insn on edge (%d, %d):\n",
835 e->src->index, e->dest->index);
836 print_inline_rtx (dump_file, insn, 6);
837 fprintf (dump_file, "\n");
840 return 1;
843 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
844 memory location in SMEXPR set in basic block BB.
846 This could be rather expensive. */
848 static void
849 remove_reachable_equiv_notes (basic_block bb, struct st_expr *smexpr)
851 edge_iterator *stack, ei;
852 int sp;
853 edge act;
854 sbitmap visited = sbitmap_alloc (last_basic_block);
855 rtx last, insn, note;
856 rtx mem = smexpr->pattern;
858 stack = XNEWVEC (edge_iterator, n_basic_blocks);
859 sp = 0;
860 ei = ei_start (bb->succs);
862 sbitmap_zero (visited);
864 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
865 while (1)
867 if (!act)
869 if (!sp)
871 free (stack);
872 sbitmap_free (visited);
873 return;
875 act = ei_edge (stack[--sp]);
877 bb = act->dest;
879 if (bb == EXIT_BLOCK_PTR
880 || TEST_BIT (visited, bb->index))
882 if (!ei_end_p (ei))
883 ei_next (&ei);
884 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
885 continue;
887 SET_BIT (visited, bb->index);
889 if (TEST_BIT (st_antloc[bb->index], smexpr->index))
891 for (last = smexpr->antic_stores;
892 BLOCK_FOR_INSN (XEXP (last, 0)) != bb;
893 last = XEXP (last, 1))
894 continue;
895 last = XEXP (last, 0);
897 else
898 last = NEXT_INSN (BB_END (bb));
900 for (insn = BB_HEAD (bb); insn != last; insn = NEXT_INSN (insn))
901 if (INSN_P (insn))
903 note = find_reg_equal_equiv_note (insn);
904 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
905 continue;
907 if (dump_file)
908 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
909 INSN_UID (insn));
910 remove_note (insn, note);
913 if (!ei_end_p (ei))
914 ei_next (&ei);
915 act = (! ei_end_p (ei)) ? ei_edge (ei) : NULL;
917 if (EDGE_COUNT (bb->succs) > 0)
919 if (act)
920 stack[sp++] = ei;
921 ei = ei_start (bb->succs);
922 act = (EDGE_COUNT (ei_container (ei)) > 0 ? EDGE_I (ei_container (ei), 0) : NULL);
927 /* This routine will replace a store with a SET to a specified register. */
929 static void
930 replace_store_insn (rtx reg, rtx del, basic_block bb, struct st_expr *smexpr)
932 rtx insn, mem, note, set, ptr;
934 mem = smexpr->pattern;
935 insn = gen_move_insn (reg, SET_SRC (single_set (del)));
937 for (ptr = smexpr->antic_stores; ptr; ptr = XEXP (ptr, 1))
938 if (XEXP (ptr, 0) == del)
940 XEXP (ptr, 0) = insn;
941 break;
944 /* Move the notes from the deleted insn to its replacement. */
945 REG_NOTES (insn) = REG_NOTES (del);
947 /* Emit the insn AFTER all the notes are transferred.
948 This is cheaper since we avoid df rescanning for the note change. */
949 insn = emit_insn_after (insn, del);
951 if (dump_file)
953 fprintf (dump_file,
954 "STORE_MOTION delete insn in BB %d:\n ", bb->index);
955 print_inline_rtx (dump_file, del, 6);
956 fprintf (dump_file, "\nSTORE_MOTION replaced with insn:\n ");
957 print_inline_rtx (dump_file, insn, 6);
958 fprintf (dump_file, "\n");
961 delete_insn (del);
963 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
964 they are no longer accurate provided that they are reached by this
965 definition, so drop them. */
966 for (; insn != NEXT_INSN (BB_END (bb)); insn = NEXT_INSN (insn))
967 if (INSN_P (insn))
969 set = single_set (insn);
970 if (!set)
971 continue;
972 if (exp_equiv_p (SET_DEST (set), mem, 0, true))
973 return;
974 note = find_reg_equal_equiv_note (insn);
975 if (!note || !exp_equiv_p (XEXP (note, 0), mem, 0, true))
976 continue;
978 if (dump_file)
979 fprintf (dump_file, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
980 INSN_UID (insn));
981 remove_note (insn, note);
983 remove_reachable_equiv_notes (bb, smexpr);
987 /* Delete a store, but copy the value that would have been stored into
988 the reaching_reg for later storing. */
990 static void
991 delete_store (struct st_expr * expr, basic_block bb)
993 rtx reg, i, del;
995 if (expr->reaching_reg == NULL_RTX)
996 expr->reaching_reg = gen_reg_rtx_and_attrs (expr->pattern);
998 reg = expr->reaching_reg;
1000 for (i = expr->avail_stores; i; i = XEXP (i, 1))
1002 del = XEXP (i, 0);
1003 if (BLOCK_FOR_INSN (del) == bb)
1005 /* We know there is only one since we deleted redundant
1006 ones during the available computation. */
1007 replace_store_insn (reg, del, bb, expr);
1008 break;
1013 /* Fill in available, anticipatable, transparent and kill vectors in
1014 STORE_DATA, based on lists of available and anticipatable stores. */
1015 static void
1016 build_store_vectors (void)
1018 basic_block bb;
1019 int *regs_set_in_block;
1020 rtx insn, st;
1021 struct st_expr * ptr;
1022 unsigned int max_gcse_regno = max_reg_num ();
1024 /* Build the gen_vector. This is any store in the table which is not killed
1025 by aliasing later in its block. */
1026 st_avloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1027 sbitmap_vector_zero (st_avloc, last_basic_block);
1029 st_antloc = sbitmap_vector_alloc (last_basic_block, num_stores);
1030 sbitmap_vector_zero (st_antloc, last_basic_block);
1032 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1034 for (st = ptr->avail_stores; st != NULL; st = XEXP (st, 1))
1036 insn = XEXP (st, 0);
1037 bb = BLOCK_FOR_INSN (insn);
1039 /* If we've already seen an available expression in this block,
1040 we can delete this one (It occurs earlier in the block). We'll
1041 copy the SRC expression to an unused register in case there
1042 are any side effects. */
1043 if (TEST_BIT (st_avloc[bb->index], ptr->index))
1045 rtx r = gen_reg_rtx_and_attrs (ptr->pattern);
1046 if (dump_file)
1047 fprintf (dump_file, "Removing redundant store:\n");
1048 replace_store_insn (r, XEXP (st, 0), bb, ptr);
1049 continue;
1051 SET_BIT (st_avloc[bb->index], ptr->index);
1054 for (st = ptr->antic_stores; st != NULL; st = XEXP (st, 1))
1056 insn = XEXP (st, 0);
1057 bb = BLOCK_FOR_INSN (insn);
1058 SET_BIT (st_antloc[bb->index], ptr->index);
1062 st_kill = sbitmap_vector_alloc (last_basic_block, num_stores);
1063 sbitmap_vector_zero (st_kill, last_basic_block);
1065 st_transp = sbitmap_vector_alloc (last_basic_block, num_stores);
1066 sbitmap_vector_zero (st_transp, last_basic_block);
1067 regs_set_in_block = XNEWVEC (int, max_gcse_regno);
1069 FOR_EACH_BB (bb)
1071 FOR_BB_INSNS (bb, insn)
1072 if (INSN_P (insn))
1074 df_ref *def_rec;
1075 for (def_rec = DF_INSN_DEFS (insn); *def_rec; def_rec++)
1077 unsigned int ref_regno = DF_REF_REGNO (*def_rec);
1078 if (ref_regno < max_gcse_regno)
1079 regs_set_in_block[DF_REF_REGNO (*def_rec)] = 1;
1083 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1085 if (store_killed_after (ptr->pattern, ptr->pattern_regs, BB_HEAD (bb),
1086 bb, regs_set_in_block, NULL))
1088 /* It should not be necessary to consider the expression
1089 killed if it is both anticipatable and available. */
1090 if (!TEST_BIT (st_antloc[bb->index], ptr->index)
1091 || !TEST_BIT (st_avloc[bb->index], ptr->index))
1092 SET_BIT (st_kill[bb->index], ptr->index);
1094 else
1095 SET_BIT (st_transp[bb->index], ptr->index);
1099 free (regs_set_in_block);
1101 if (dump_file)
1103 dump_sbitmap_vector (dump_file, "st_antloc", "", st_antloc, last_basic_block);
1104 dump_sbitmap_vector (dump_file, "st_kill", "", st_kill, last_basic_block);
1105 dump_sbitmap_vector (dump_file, "st_transp", "", st_transp, last_basic_block);
1106 dump_sbitmap_vector (dump_file, "st_avloc", "", st_avloc, last_basic_block);
1110 /* Free memory used by store motion. */
1112 static void
1113 free_store_memory (void)
1115 free_store_motion_mems ();
1117 if (st_avloc)
1118 sbitmap_vector_free (st_avloc);
1119 if (st_kill)
1120 sbitmap_vector_free (st_kill);
1121 if (st_transp)
1122 sbitmap_vector_free (st_transp);
1123 if (st_antloc)
1124 sbitmap_vector_free (st_antloc);
1125 if (st_insert_map)
1126 sbitmap_vector_free (st_insert_map);
1127 if (st_delete_map)
1128 sbitmap_vector_free (st_delete_map);
1130 st_avloc = st_kill = st_transp = st_antloc = NULL;
1131 st_insert_map = st_delete_map = NULL;
1134 /* Perform store motion. Much like gcse, except we move expressions the
1135 other way by looking at the flowgraph in reverse.
1136 Return non-zero if transformations are performed by the pass. */
1138 static int
1139 one_store_motion_pass (void)
1141 basic_block bb;
1142 int x;
1143 struct st_expr * ptr;
1144 int did_edge_inserts = 0;
1145 int n_stores_deleted = 0;
1146 int n_stores_created = 0;
1148 init_alias_analysis ();
1150 /* Find all the available and anticipatable stores. */
1151 num_stores = compute_store_table ();
1152 if (num_stores == 0)
1154 htab_delete (store_motion_mems_table);
1155 store_motion_mems_table = NULL;
1156 end_alias_analysis ();
1157 return 0;
1160 /* Now compute kill & transp vectors. */
1161 build_store_vectors ();
1162 add_noreturn_fake_exit_edges ();
1163 connect_infinite_loops_to_exit ();
1165 edge_list = pre_edge_rev_lcm (num_stores, st_transp, st_avloc,
1166 st_antloc, st_kill, &st_insert_map,
1167 &st_delete_map);
1169 /* Now we want to insert the new stores which are going to be needed. */
1170 for (ptr = first_st_expr (); ptr != NULL; ptr = next_st_expr (ptr))
1172 /* If any of the edges we have above are abnormal, we can't move this
1173 store. */
1174 for (x = NUM_EDGES (edge_list) - 1; x >= 0; x--)
1175 if (TEST_BIT (st_insert_map[x], ptr->index)
1176 && (INDEX_EDGE (edge_list, x)->flags & EDGE_ABNORMAL))
1177 break;
1179 if (x >= 0)
1181 if (dump_file != NULL)
1182 fprintf (dump_file,
1183 "Can't replace store %d: abnormal edge from %d to %d\n",
1184 ptr->index, INDEX_EDGE (edge_list, x)->src->index,
1185 INDEX_EDGE (edge_list, x)->dest->index);
1186 continue;
1189 /* Now we want to insert the new stores which are going to be needed. */
1191 FOR_EACH_BB (bb)
1192 if (TEST_BIT (st_delete_map[bb->index], ptr->index))
1194 delete_store (ptr, bb);
1195 n_stores_deleted++;
1198 for (x = 0; x < NUM_EDGES (edge_list); x++)
1199 if (TEST_BIT (st_insert_map[x], ptr->index))
1201 did_edge_inserts |= insert_store (ptr, INDEX_EDGE (edge_list, x));
1202 n_stores_created++;
1206 if (did_edge_inserts)
1207 commit_edge_insertions ();
1209 free_store_memory ();
1210 free_edge_list (edge_list);
1211 remove_fake_exit_edges ();
1212 end_alias_analysis ();
1214 if (dump_file)
1216 fprintf (dump_file, "STORE_MOTION of %s, %d basic blocks, ",
1217 current_function_name (), n_basic_blocks);
1218 fprintf (dump_file, "%d insns deleted, %d insns created\n",
1219 n_stores_deleted, n_stores_created);
1222 return (n_stores_deleted > 0 || n_stores_created > 0);
1226 static bool
1227 gate_rtl_store_motion (void)
1229 return optimize > 0 && flag_gcse_sm
1230 && !cfun->calls_setjmp
1231 && optimize_function_for_speed_p (cfun)
1232 && dbg_cnt (store_motion);
1235 static unsigned int
1236 execute_rtl_store_motion (void)
1238 delete_unreachable_blocks ();
1239 df_note_add_problem ();
1240 df_analyze ();
1241 flag_rerun_cse_after_global_opts |= one_store_motion_pass ();
1242 return 0;
1245 struct rtl_opt_pass pass_rtl_store_motion =
1248 RTL_PASS,
1249 "store_motion", /* name */
1250 gate_rtl_store_motion, /* gate */
1251 execute_rtl_store_motion, /* execute */
1252 NULL, /* sub */
1253 NULL, /* next */
1254 0, /* static_pass_number */
1255 TV_LSM, /* tv_id */
1256 PROP_cfglayout, /* properties_required */
1257 0, /* properties_provided */
1258 0, /* properties_destroyed */
1259 0, /* todo_flags_start */
1260 TODO_df_finish | TODO_verify_rtl_sharing |
1261 TODO_dump_func |
1262 TODO_verify_flow | TODO_ggc_collect /* todo_flags_finish */