1 /* Store motion via Lazy Code Motion on the reverse CFG.
2 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005,
3 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
32 #include "hard-reg-set.h"
34 #include "insn-config.h"
36 #include "basic-block.h"
44 #include "tree-pass.h"
49 /* This pass implements downward store motion.
50 As of May 1, 2009, the pass is not enabled by default on any target,
51 but bootstrap completes on ia64 and x86_64 with the pass enabled. */
54 - remove_reachable_equiv_notes is an incomprehensible pile of goo and
55 a compile time hog that needs a rewrite (maybe cache st_exprs to
56 invalidate REG_EQUAL/REG_EQUIV notes for?).
57 - pattern_regs in st_expr should be a regset (on its own obstack).
58 - antic_stores and avail_stores should be VECs instead of lists.
59 - store_motion_mems should be a VEC instead of a list.
60 - there should be an alloc pool for struct st_expr objects.
61 - investigate whether it is helpful to make the address of an st_expr
63 - when GIMPLE alias information is exported, the effectiveness of this
64 pass should be re-evaluated.
67 /* This is a list of store expressions (MEMs). The structure is used
68 as an expression table to track stores which look interesting, and
69 might be moveable towards the exit block. */
73 /* Pattern of this mem. */
75 /* List of registers mentioned by the mem. */
77 /* INSN list of stores that are locally anticipatable. */
79 /* INSN list of stores that are locally available. */
81 /* Next in the list. */
82 struct st_expr
* next
;
83 /* Store ID in the dataflow bitmaps. */
85 /* Hash value for the hash table. */
86 unsigned int hash_index
;
87 /* Register holding the stored expression when a store is moved.
88 This field is also used as a cache in find_moveable_store, see
89 LAST_AVAIL_CHECK_FAILURE below. */
93 /* Head of the list of load/store memory refs. */
94 static struct st_expr
* store_motion_mems
= NULL
;
96 /* Hashtable for the load/store memory refs. */
97 static htab_t store_motion_mems_table
= NULL
;
99 /* These bitmaps will hold the local dataflow properties per basic block. */
100 static sbitmap
*st_kill
, *st_avloc
, *st_antloc
, *st_transp
;
102 /* Nonzero for expressions which should be inserted on a specific edge. */
103 static sbitmap
*st_insert_map
;
105 /* Nonzero for expressions which should be deleted in a specific block. */
106 static sbitmap
*st_delete_map
;
108 /* Global holding the number of store expressions we are dealing with. */
109 static int num_stores
;
111 /* Contains the edge_list returned by pre_edge_lcm. */
112 static struct edge_list
*edge_list
;
115 pre_st_expr_hash (const void *p
)
117 int do_not_record_p
= 0;
118 const struct st_expr
*const x
= (const struct st_expr
*) p
;
119 return hash_rtx (x
->pattern
, GET_MODE (x
->pattern
), &do_not_record_p
, NULL
, false);
123 pre_st_expr_eq (const void *p1
, const void *p2
)
125 const struct st_expr
*const ptr1
= (const struct st_expr
*) p1
,
126 *const ptr2
= (const struct st_expr
*) p2
;
127 return exp_equiv_p (ptr1
->pattern
, ptr2
->pattern
, 0, true);
130 /* This will search the st_expr list for a matching expression. If it
131 doesn't find one, we create one and initialize it. */
133 static struct st_expr
*
134 st_expr_entry (rtx x
)
136 int do_not_record_p
= 0;
137 struct st_expr
* ptr
;
142 hash
= hash_rtx (x
, GET_MODE (x
), &do_not_record_p
,
143 NULL
, /*have_reg_qty=*/false);
146 slot
= htab_find_slot_with_hash (store_motion_mems_table
, &e
, hash
, INSERT
);
148 return (struct st_expr
*)*slot
;
150 ptr
= XNEW (struct st_expr
);
152 ptr
->next
= store_motion_mems
;
154 ptr
->pattern_regs
= NULL_RTX
;
155 ptr
->antic_stores
= NULL_RTX
;
156 ptr
->avail_stores
= NULL_RTX
;
157 ptr
->reaching_reg
= NULL_RTX
;
159 ptr
->hash_index
= hash
;
160 store_motion_mems
= ptr
;
166 /* Free up an individual st_expr entry. */
169 free_st_expr_entry (struct st_expr
* ptr
)
171 free_INSN_LIST_list (& ptr
->antic_stores
);
172 free_INSN_LIST_list (& ptr
->avail_stores
);
177 /* Free up all memory associated with the st_expr list. */
180 free_store_motion_mems (void)
182 if (store_motion_mems_table
)
183 htab_delete (store_motion_mems_table
);
184 store_motion_mems_table
= NULL
;
186 while (store_motion_mems
)
188 struct st_expr
* tmp
= store_motion_mems
;
189 store_motion_mems
= store_motion_mems
->next
;
190 free_st_expr_entry (tmp
);
192 store_motion_mems
= NULL
;
195 /* Assign each element of the list of mems a monotonically increasing value. */
198 enumerate_store_motion_mems (void)
200 struct st_expr
* ptr
;
203 for (ptr
= store_motion_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
209 /* Return first item in the list. */
211 static inline struct st_expr
*
214 return store_motion_mems
;
217 /* Return the next item in the list after the specified one. */
219 static inline struct st_expr
*
220 next_st_expr (struct st_expr
* ptr
)
225 /* Dump debugging info about the store_motion_mems list. */
228 print_store_motion_mems (FILE * file
)
230 struct st_expr
* ptr
;
232 fprintf (dump_file
, "STORE_MOTION list of MEM exprs considered:\n");
234 for (ptr
= first_st_expr (); ptr
!= NULL
; ptr
= next_st_expr (ptr
))
236 fprintf (file
, " Pattern (%3d): ", ptr
->index
);
238 print_rtl (file
, ptr
->pattern
);
240 fprintf (file
, "\n ANTIC stores : ");
242 if (ptr
->antic_stores
)
243 print_rtl (file
, ptr
->antic_stores
);
245 fprintf (file
, "(nil)");
247 fprintf (file
, "\n AVAIL stores : ");
249 if (ptr
->avail_stores
)
250 print_rtl (file
, ptr
->avail_stores
);
252 fprintf (file
, "(nil)");
254 fprintf (file
, "\n\n");
257 fprintf (file
, "\n");
260 /* Return zero if some of the registers in list X are killed
261 due to set of registers in bitmap REGS_SET. */
264 store_ops_ok (const_rtx x
, int *regs_set
)
268 for (; x
; x
= XEXP (x
, 1))
271 if (regs_set
[REGNO(reg
)])
278 /* Helper for extract_mentioned_regs. */
281 extract_mentioned_regs_1 (rtx
*loc
, void *data
)
283 rtx
*mentioned_regs_p
= (rtx
*) data
;
286 *mentioned_regs_p
= alloc_EXPR_LIST (0, *loc
, *mentioned_regs_p
);
291 /* Returns a list of registers mentioned in X.
292 FIXME: A regset would be prettier and less expensive. */
295 extract_mentioned_regs (rtx x
)
297 rtx mentioned_regs
= NULL
;
298 for_each_rtx (&x
, extract_mentioned_regs_1
, &mentioned_regs
);
299 return mentioned_regs
;
302 /* Check to see if the load X is aliased with STORE_PATTERN.
303 AFTER is true if we are checking the case when STORE_PATTERN occurs
307 load_kills_store (const_rtx x
, const_rtx store_pattern
, int after
)
310 return anti_dependence (x
, store_pattern
);
312 return true_dependence (store_pattern
, GET_MODE (store_pattern
), x
);
315 /* Go through the entire rtx X, looking for any loads which might alias
316 STORE_PATTERN. Return true if found.
317 AFTER is true if we are checking the case when STORE_PATTERN occurs
321 find_loads (const_rtx x
, const_rtx store_pattern
, int after
)
330 if (GET_CODE (x
) == SET
)
335 if (load_kills_store (x
, store_pattern
, after
))
339 /* Recursively process the insn. */
340 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
342 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0 && !ret
; i
--)
345 ret
|= find_loads (XEXP (x
, i
), store_pattern
, after
);
346 else if (fmt
[i
] == 'E')
347 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
348 ret
|= find_loads (XVECEXP (x
, i
, j
), store_pattern
, after
);
353 /* Go through pattern PAT looking for any loads which might kill the
354 store in X. Return true if found.
355 AFTER is true if we are checking the case when loads kill X occurs
356 after the insn for PAT. */
359 store_killed_in_pat (const_rtx x
, const_rtx pat
, int after
)
361 if (GET_CODE (pat
) == SET
)
363 rtx dest
= SET_DEST (pat
);
365 if (GET_CODE (dest
) == ZERO_EXTRACT
)
366 dest
= XEXP (dest
, 0);
368 /* Check for memory stores to aliased objects. */
370 && !exp_equiv_p (dest
, x
, 0, true))
374 if (output_dependence (dest
, x
))
379 if (output_dependence (x
, dest
))
385 if (find_loads (pat
, x
, after
))
391 /* Check if INSN kills the store pattern X (is aliased with it).
392 AFTER is true if we are checking the case when store X occurs
393 after the insn. Return true if it does. */
396 store_killed_in_insn (const_rtx x
, const_rtx x_regs
, const_rtx insn
, int after
)
398 const_rtx reg
, base
, note
, pat
;
400 if (! NONDEBUG_INSN_P (insn
))
405 /* A normal or pure call might read from pattern,
406 but a const call will not. */
407 if (!RTL_CONST_CALL_P (insn
))
410 /* But even a const call reads its parameters. Check whether the
411 base of some of registers used in mem is stack pointer. */
412 for (reg
= x_regs
; reg
; reg
= XEXP (reg
, 1))
414 base
= find_base_term (XEXP (reg
, 0));
416 || (GET_CODE (base
) == ADDRESS
417 && GET_MODE (base
) == Pmode
418 && XEXP (base
, 0) == stack_pointer_rtx
))
425 pat
= PATTERN (insn
);
426 if (GET_CODE (pat
) == SET
)
428 if (store_killed_in_pat (x
, pat
, after
))
431 else if (GET_CODE (pat
) == PARALLEL
)
435 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
436 if (store_killed_in_pat (x
, XVECEXP (pat
, 0, i
), after
))
439 else if (find_loads (PATTERN (insn
), x
, after
))
442 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
443 location aliased with X, then this insn kills X. */
444 note
= find_reg_equal_equiv_note (insn
);
447 note
= XEXP (note
, 0);
449 /* However, if the note represents a must alias rather than a may
450 alias relationship, then it does not kill X. */
451 if (exp_equiv_p (note
, x
, 0, true))
454 /* See if there are any aliased loads in the note. */
455 return find_loads (note
, x
, after
);
458 /* Returns true if the expression X is loaded or clobbered on or after INSN
459 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
460 or after the insn. X_REGS is list of registers mentioned in X. If the store
461 is killed, return the last insn in that it occurs in FAIL_INSN. */
464 store_killed_after (const_rtx x
, const_rtx x_regs
, const_rtx insn
, const_basic_block bb
,
465 int *regs_set_after
, rtx
*fail_insn
)
467 rtx last
= BB_END (bb
), act
;
469 if (!store_ops_ok (x_regs
, regs_set_after
))
471 /* We do not know where it will happen. */
473 *fail_insn
= NULL_RTX
;
477 /* Scan from the end, so that fail_insn is determined correctly. */
478 for (act
= last
; act
!= PREV_INSN (insn
); act
= PREV_INSN (act
))
479 if (store_killed_in_insn (x
, x_regs
, act
, false))
489 /* Returns true if the expression X is loaded or clobbered on or before INSN
490 within basic block BB. X_REGS is list of registers mentioned in X.
491 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
493 store_killed_before (const_rtx x
, const_rtx x_regs
, const_rtx insn
, const_basic_block bb
,
494 int *regs_set_before
)
496 rtx first
= BB_HEAD (bb
);
498 if (!store_ops_ok (x_regs
, regs_set_before
))
501 for ( ; insn
!= PREV_INSN (first
); insn
= PREV_INSN (insn
))
502 if (store_killed_in_insn (x
, x_regs
, insn
, true))
508 /* The last insn in the basic block that compute_store_table is processing,
509 where store_killed_after is true for X.
510 Since we go through the basic block from BB_END to BB_HEAD, this is
511 also the available store at the end of the basic block. Therefore
512 this is in effect a cache, to avoid calling store_killed_after for
513 equivalent aliasing store expressions.
514 This value is only meaningful during the computation of the store
515 table. We hi-jack the REACHING_REG field of struct st_expr to save
517 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
519 /* Determine whether INSN is MEM store pattern that we will consider moving.
520 REGS_SET_BEFORE is bitmap of registers set before (and including) the
521 current insn, REGS_SET_AFTER is bitmap of registers set after (and
522 including) the insn in this basic block. We must be passing through BB from
523 head to end, as we are using this fact to speed things up.
525 The results are stored this way:
527 -- the first anticipatable expression is added into ANTIC_STORES
528 -- if the processed expression is not anticipatable, NULL_RTX is added
529 there instead, so that we can use it as indicator that no further
530 expression of this type may be anticipatable
531 -- if the expression is available, it is added as head of AVAIL_STORES;
532 consequently, all of them but this head are dead and may be deleted.
533 -- if the expression is not available, the insn due to that it fails to be
534 available is stored in REACHING_REG (via LAST_AVAIL_CHECK_FAILURE).
536 The things are complicated a bit by fact that there already may be stores
537 to the same MEM from other blocks; also caller must take care of the
538 necessary cleanup of the temporary markers after end of the basic block.
542 find_moveable_store (rtx insn
, int *regs_set_before
, int *regs_set_after
)
544 struct st_expr
* ptr
;
546 int check_anticipatable
, check_available
;
547 basic_block bb
= BLOCK_FOR_INSN (insn
);
549 set
= single_set (insn
);
553 dest
= SET_DEST (set
);
555 if (! MEM_P (dest
) || MEM_VOLATILE_P (dest
)
556 || GET_MODE (dest
) == BLKmode
)
559 if (side_effects_p (dest
))
562 /* If we are handling exceptions, we must be careful with memory references
563 that may trap. If we are not, the behavior is undefined, so we may just
565 if (cfun
->can_throw_non_call_exceptions
&& may_trap_p (dest
))
568 /* Even if the destination cannot trap, the source may. In this case we'd
569 need to handle updating the REG_EH_REGION note. */
570 if (find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
))
573 /* Make sure that the SET_SRC of this store insns can be assigned to
574 a register, or we will fail later on in replace_store_insn, which
575 assumes that we can do this. But sometimes the target machine has
576 oddities like MEM read-modify-write instruction. See for example
578 if (!can_assign_to_reg_without_clobbers_p (SET_SRC (set
)))
581 ptr
= st_expr_entry (dest
);
582 if (!ptr
->pattern_regs
)
583 ptr
->pattern_regs
= extract_mentioned_regs (dest
);
585 /* Do not check for anticipatability if we either found one anticipatable
586 store already, or tested for one and found out that it was killed. */
587 check_anticipatable
= 0;
588 if (!ptr
->antic_stores
)
589 check_anticipatable
= 1;
592 tmp
= XEXP (ptr
->antic_stores
, 0);
594 && BLOCK_FOR_INSN (tmp
) != bb
)
595 check_anticipatable
= 1;
597 if (check_anticipatable
)
599 if (store_killed_before (dest
, ptr
->pattern_regs
, insn
, bb
, regs_set_before
))
603 ptr
->antic_stores
= alloc_INSN_LIST (tmp
, ptr
->antic_stores
);
606 /* It is not necessary to check whether store is available if we did
607 it successfully before; if we failed before, do not bother to check
608 until we reach the insn that caused us to fail. */
610 if (!ptr
->avail_stores
)
614 tmp
= XEXP (ptr
->avail_stores
, 0);
615 if (BLOCK_FOR_INSN (tmp
) != bb
)
620 /* Check that we have already reached the insn at that the check
622 if (LAST_AVAIL_CHECK_FAILURE (ptr
))
624 for (tmp
= BB_END (bb
);
625 tmp
!= insn
&& tmp
!= LAST_AVAIL_CHECK_FAILURE (ptr
);
626 tmp
= PREV_INSN (tmp
))
632 check_available
= store_killed_after (dest
, ptr
->pattern_regs
, insn
,
634 &LAST_AVAIL_CHECK_FAILURE (ptr
));
636 if (!check_available
)
637 ptr
->avail_stores
= alloc_INSN_LIST (insn
, ptr
->avail_stores
);
640 /* Find available and anticipatable stores. */
643 compute_store_table (void)
647 #ifdef ENABLE_CHECKING
652 int *last_set_in
, *already_set
;
653 struct st_expr
* ptr
, **prev_next_ptr_ptr
;
654 unsigned int max_gcse_regno
= max_reg_num ();
656 store_motion_mems
= NULL
;
657 store_motion_mems_table
= htab_create (13, pre_st_expr_hash
,
658 pre_st_expr_eq
, NULL
);
659 last_set_in
= XCNEWVEC (int, max_gcse_regno
);
660 already_set
= XNEWVEC (int, max_gcse_regno
);
662 /* Find all the stores we care about. */
665 /* First compute the registers set in this block. */
666 FOR_BB_INSNS (bb
, insn
)
669 if (! NONDEBUG_INSN_P (insn
))
672 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
673 last_set_in
[DF_REF_REGNO (*def_rec
)] = INSN_UID (insn
);
676 /* Now find the stores. */
677 memset (already_set
, 0, sizeof (int) * max_gcse_regno
);
678 FOR_BB_INSNS (bb
, insn
)
680 if (! NONDEBUG_INSN_P (insn
))
683 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
684 already_set
[DF_REF_REGNO (*def_rec
)] = INSN_UID (insn
);
686 /* Now that we've marked regs, look for stores. */
687 find_moveable_store (insn
, already_set
, last_set_in
);
689 /* Unmark regs that are no longer set. */
690 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
691 if (last_set_in
[DF_REF_REGNO (*def_rec
)] == INSN_UID (insn
))
692 last_set_in
[DF_REF_REGNO (*def_rec
)] = 0;
695 #ifdef ENABLE_CHECKING
696 /* last_set_in should now be all-zero. */
697 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
698 gcc_assert (!last_set_in
[regno
]);
701 /* Clear temporary marks. */
702 for (ptr
= first_st_expr (); ptr
!= NULL
; ptr
= next_st_expr (ptr
))
704 LAST_AVAIL_CHECK_FAILURE (ptr
) = NULL_RTX
;
705 if (ptr
->antic_stores
706 && (tmp
= XEXP (ptr
->antic_stores
, 0)) == NULL_RTX
)
707 ptr
->antic_stores
= XEXP (ptr
->antic_stores
, 1);
711 /* Remove the stores that are not available anywhere, as there will
712 be no opportunity to optimize them. */
713 for (ptr
= store_motion_mems
, prev_next_ptr_ptr
= &store_motion_mems
;
715 ptr
= *prev_next_ptr_ptr
)
717 if (! ptr
->avail_stores
)
719 *prev_next_ptr_ptr
= ptr
->next
;
720 htab_remove_elt_with_hash (store_motion_mems_table
,
721 ptr
, ptr
->hash_index
);
722 free_st_expr_entry (ptr
);
725 prev_next_ptr_ptr
= &ptr
->next
;
728 ret
= enumerate_store_motion_mems ();
731 print_store_motion_mems (dump_file
);
738 /* In all code following after this, REACHING_REG has its original
739 meaning again. Avoid confusion, and undef the accessor macro for
740 the temporary marks usage in compute_store_table. */
741 #undef LAST_AVAIL_CHECK_FAILURE
743 /* Insert an instruction at the beginning of a basic block, and update
744 the BB_HEAD if needed. */
747 insert_insn_start_basic_block (rtx insn
, basic_block bb
)
749 /* Insert at start of successor block. */
750 rtx prev
= PREV_INSN (BB_HEAD (bb
));
751 rtx before
= BB_HEAD (bb
);
754 if (! LABEL_P (before
)
755 && !NOTE_INSN_BASIC_BLOCK_P (before
))
758 if (prev
== BB_END (bb
))
760 before
= NEXT_INSN (before
);
763 insn
= emit_insn_after_noloc (insn
, prev
, bb
);
767 fprintf (dump_file
, "STORE_MOTION insert store at start of BB %d:\n",
769 print_inline_rtx (dump_file
, insn
, 6);
770 fprintf (dump_file
, "\n");
774 /* This routine will insert a store on an edge. EXPR is the st_expr entry for
775 the memory reference, and E is the edge to insert it on. Returns nonzero
776 if an edge insertion was performed. */
779 insert_store (struct st_expr
* expr
, edge e
)
786 /* We did all the deleted before this insert, so if we didn't delete a
787 store, then we haven't set the reaching reg yet either. */
788 if (expr
->reaching_reg
== NULL_RTX
)
791 if (e
->flags
& EDGE_FAKE
)
794 reg
= expr
->reaching_reg
;
795 insn
= gen_move_insn (copy_rtx (expr
->pattern
), reg
);
797 /* If we are inserting this expression on ALL predecessor edges of a BB,
798 insert it at the start of the BB, and reset the insert bits on the other
799 edges so we don't try to insert it on the other edges. */
801 FOR_EACH_EDGE (tmp
, ei
, e
->dest
->preds
)
802 if (!(tmp
->flags
& EDGE_FAKE
))
804 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
806 gcc_assert (index
!= EDGE_INDEX_NO_EDGE
);
807 if (! TEST_BIT (st_insert_map
[index
], expr
->index
))
811 /* If tmp is NULL, we found an insertion on every edge, blank the
812 insertion vector for these edges, and insert at the start of the BB. */
813 if (!tmp
&& bb
!= EXIT_BLOCK_PTR
)
815 FOR_EACH_EDGE (tmp
, ei
, e
->dest
->preds
)
817 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
818 RESET_BIT (st_insert_map
[index
], expr
->index
);
820 insert_insn_start_basic_block (insn
, bb
);
824 /* We can't put stores in the front of blocks pointed to by abnormal
825 edges since that may put a store where one didn't used to be. */
826 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
828 insert_insn_on_edge (insn
, e
);
832 fprintf (dump_file
, "STORE_MOTION insert insn on edge (%d, %d):\n",
833 e
->src
->index
, e
->dest
->index
);
834 print_inline_rtx (dump_file
, insn
, 6);
835 fprintf (dump_file
, "\n");
841 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
842 memory location in SMEXPR set in basic block BB.
844 This could be rather expensive. */
847 remove_reachable_equiv_notes (basic_block bb
, struct st_expr
*smexpr
)
849 edge_iterator
*stack
, ei
;
852 sbitmap visited
= sbitmap_alloc (last_basic_block
);
853 rtx last
, insn
, note
;
854 rtx mem
= smexpr
->pattern
;
856 stack
= XNEWVEC (edge_iterator
, n_basic_blocks
);
858 ei
= ei_start (bb
->succs
);
860 sbitmap_zero (visited
);
862 act
= (EDGE_COUNT (ei_container (ei
)) > 0 ? EDGE_I (ei_container (ei
), 0) : NULL
);
870 sbitmap_free (visited
);
873 act
= ei_edge (stack
[--sp
]);
877 if (bb
== EXIT_BLOCK_PTR
878 || TEST_BIT (visited
, bb
->index
))
882 act
= (! ei_end_p (ei
)) ? ei_edge (ei
) : NULL
;
885 SET_BIT (visited
, bb
->index
);
887 if (TEST_BIT (st_antloc
[bb
->index
], smexpr
->index
))
889 for (last
= smexpr
->antic_stores
;
890 BLOCK_FOR_INSN (XEXP (last
, 0)) != bb
;
891 last
= XEXP (last
, 1))
893 last
= XEXP (last
, 0);
896 last
= NEXT_INSN (BB_END (bb
));
898 for (insn
= BB_HEAD (bb
); insn
!= last
; insn
= NEXT_INSN (insn
))
899 if (NONDEBUG_INSN_P (insn
))
901 note
= find_reg_equal_equiv_note (insn
);
902 if (!note
|| !exp_equiv_p (XEXP (note
, 0), mem
, 0, true))
906 fprintf (dump_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
908 remove_note (insn
, note
);
913 act
= (! ei_end_p (ei
)) ? ei_edge (ei
) : NULL
;
915 if (EDGE_COUNT (bb
->succs
) > 0)
919 ei
= ei_start (bb
->succs
);
920 act
= (EDGE_COUNT (ei_container (ei
)) > 0 ? EDGE_I (ei_container (ei
), 0) : NULL
);
925 /* This routine will replace a store with a SET to a specified register. */
928 replace_store_insn (rtx reg
, rtx del
, basic_block bb
, struct st_expr
*smexpr
)
930 rtx insn
, mem
, note
, set
, ptr
;
932 mem
= smexpr
->pattern
;
933 insn
= gen_move_insn (reg
, SET_SRC (single_set (del
)));
935 for (ptr
= smexpr
->antic_stores
; ptr
; ptr
= XEXP (ptr
, 1))
936 if (XEXP (ptr
, 0) == del
)
938 XEXP (ptr
, 0) = insn
;
942 /* Move the notes from the deleted insn to its replacement. */
943 REG_NOTES (insn
) = REG_NOTES (del
);
945 /* Emit the insn AFTER all the notes are transferred.
946 This is cheaper since we avoid df rescanning for the note change. */
947 insn
= emit_insn_after (insn
, del
);
952 "STORE_MOTION delete insn in BB %d:\n ", bb
->index
);
953 print_inline_rtx (dump_file
, del
, 6);
954 fprintf (dump_file
, "\nSTORE_MOTION replaced with insn:\n ");
955 print_inline_rtx (dump_file
, insn
, 6);
956 fprintf (dump_file
, "\n");
961 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
962 they are no longer accurate provided that they are reached by this
963 definition, so drop them. */
964 for (; insn
!= NEXT_INSN (BB_END (bb
)); insn
= NEXT_INSN (insn
))
965 if (NONDEBUG_INSN_P (insn
))
967 set
= single_set (insn
);
970 if (exp_equiv_p (SET_DEST (set
), mem
, 0, true))
972 note
= find_reg_equal_equiv_note (insn
);
973 if (!note
|| !exp_equiv_p (XEXP (note
, 0), mem
, 0, true))
977 fprintf (dump_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
979 remove_note (insn
, note
);
981 remove_reachable_equiv_notes (bb
, smexpr
);
985 /* Delete a store, but copy the value that would have been stored into
986 the reaching_reg for later storing. */
989 delete_store (struct st_expr
* expr
, basic_block bb
)
993 if (expr
->reaching_reg
== NULL_RTX
)
994 expr
->reaching_reg
= gen_reg_rtx_and_attrs (expr
->pattern
);
996 reg
= expr
->reaching_reg
;
998 for (i
= expr
->avail_stores
; i
; i
= XEXP (i
, 1))
1001 if (BLOCK_FOR_INSN (del
) == bb
)
1003 /* We know there is only one since we deleted redundant
1004 ones during the available computation. */
1005 replace_store_insn (reg
, del
, bb
, expr
);
1011 /* Fill in available, anticipatable, transparent and kill vectors in
1012 STORE_DATA, based on lists of available and anticipatable stores. */
1014 build_store_vectors (void)
1017 int *regs_set_in_block
;
1019 struct st_expr
* ptr
;
1020 unsigned int max_gcse_regno
= max_reg_num ();
1022 /* Build the gen_vector. This is any store in the table which is not killed
1023 by aliasing later in its block. */
1024 st_avloc
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
1025 sbitmap_vector_zero (st_avloc
, last_basic_block
);
1027 st_antloc
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
1028 sbitmap_vector_zero (st_antloc
, last_basic_block
);
1030 for (ptr
= first_st_expr (); ptr
!= NULL
; ptr
= next_st_expr (ptr
))
1032 for (st
= ptr
->avail_stores
; st
!= NULL
; st
= XEXP (st
, 1))
1034 insn
= XEXP (st
, 0);
1035 bb
= BLOCK_FOR_INSN (insn
);
1037 /* If we've already seen an available expression in this block,
1038 we can delete this one (It occurs earlier in the block). We'll
1039 copy the SRC expression to an unused register in case there
1040 are any side effects. */
1041 if (TEST_BIT (st_avloc
[bb
->index
], ptr
->index
))
1043 rtx r
= gen_reg_rtx_and_attrs (ptr
->pattern
);
1045 fprintf (dump_file
, "Removing redundant store:\n");
1046 replace_store_insn (r
, XEXP (st
, 0), bb
, ptr
);
1049 SET_BIT (st_avloc
[bb
->index
], ptr
->index
);
1052 for (st
= ptr
->antic_stores
; st
!= NULL
; st
= XEXP (st
, 1))
1054 insn
= XEXP (st
, 0);
1055 bb
= BLOCK_FOR_INSN (insn
);
1056 SET_BIT (st_antloc
[bb
->index
], ptr
->index
);
1060 st_kill
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
1061 sbitmap_vector_zero (st_kill
, last_basic_block
);
1063 st_transp
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
1064 sbitmap_vector_zero (st_transp
, last_basic_block
);
1065 regs_set_in_block
= XNEWVEC (int, max_gcse_regno
);
1069 memset (regs_set_in_block
, 0, sizeof (int) * max_gcse_regno
);
1071 FOR_BB_INSNS (bb
, insn
)
1072 if (NONDEBUG_INSN_P (insn
))
1075 for (def_rec
= DF_INSN_DEFS (insn
); *def_rec
; def_rec
++)
1077 unsigned int ref_regno
= DF_REF_REGNO (*def_rec
);
1078 if (ref_regno
< max_gcse_regno
)
1079 regs_set_in_block
[DF_REF_REGNO (*def_rec
)] = 1;
1083 for (ptr
= first_st_expr (); ptr
!= NULL
; ptr
= next_st_expr (ptr
))
1085 if (store_killed_after (ptr
->pattern
, ptr
->pattern_regs
, BB_HEAD (bb
),
1086 bb
, regs_set_in_block
, NULL
))
1088 /* It should not be necessary to consider the expression
1089 killed if it is both anticipatable and available. */
1090 if (!TEST_BIT (st_antloc
[bb
->index
], ptr
->index
)
1091 || !TEST_BIT (st_avloc
[bb
->index
], ptr
->index
))
1092 SET_BIT (st_kill
[bb
->index
], ptr
->index
);
1095 SET_BIT (st_transp
[bb
->index
], ptr
->index
);
1099 free (regs_set_in_block
);
1103 dump_sbitmap_vector (dump_file
, "st_antloc", "", st_antloc
, last_basic_block
);
1104 dump_sbitmap_vector (dump_file
, "st_kill", "", st_kill
, last_basic_block
);
1105 dump_sbitmap_vector (dump_file
, "st_transp", "", st_transp
, last_basic_block
);
1106 dump_sbitmap_vector (dump_file
, "st_avloc", "", st_avloc
, last_basic_block
);
1110 /* Free memory used by store motion. */
1113 free_store_memory (void)
1115 free_store_motion_mems ();
1118 sbitmap_vector_free (st_avloc
);
1120 sbitmap_vector_free (st_kill
);
1122 sbitmap_vector_free (st_transp
);
1124 sbitmap_vector_free (st_antloc
);
1126 sbitmap_vector_free (st_insert_map
);
1128 sbitmap_vector_free (st_delete_map
);
1130 st_avloc
= st_kill
= st_transp
= st_antloc
= NULL
;
1131 st_insert_map
= st_delete_map
= NULL
;
1134 /* Perform store motion. Much like gcse, except we move expressions the
1135 other way by looking at the flowgraph in reverse.
1136 Return non-zero if transformations are performed by the pass. */
1139 one_store_motion_pass (void)
1143 struct st_expr
* ptr
;
1144 int did_edge_inserts
= 0;
1145 int n_stores_deleted
= 0;
1146 int n_stores_created
= 0;
1148 init_alias_analysis ();
1150 /* Find all the available and anticipatable stores. */
1151 num_stores
= compute_store_table ();
1152 if (num_stores
== 0)
1154 htab_delete (store_motion_mems_table
);
1155 store_motion_mems_table
= NULL
;
1156 end_alias_analysis ();
1160 /* Now compute kill & transp vectors. */
1161 build_store_vectors ();
1162 add_noreturn_fake_exit_edges ();
1163 connect_infinite_loops_to_exit ();
1165 edge_list
= pre_edge_rev_lcm (num_stores
, st_transp
, st_avloc
,
1166 st_antloc
, st_kill
, &st_insert_map
,
1169 /* Now we want to insert the new stores which are going to be needed. */
1170 for (ptr
= first_st_expr (); ptr
!= NULL
; ptr
= next_st_expr (ptr
))
1172 /* If any of the edges we have above are abnormal, we can't move this
1174 for (x
= NUM_EDGES (edge_list
) - 1; x
>= 0; x
--)
1175 if (TEST_BIT (st_insert_map
[x
], ptr
->index
)
1176 && (INDEX_EDGE (edge_list
, x
)->flags
& EDGE_ABNORMAL
))
1181 if (dump_file
!= NULL
)
1183 "Can't replace store %d: abnormal edge from %d to %d\n",
1184 ptr
->index
, INDEX_EDGE (edge_list
, x
)->src
->index
,
1185 INDEX_EDGE (edge_list
, x
)->dest
->index
);
1189 /* Now we want to insert the new stores which are going to be needed. */
1192 if (TEST_BIT (st_delete_map
[bb
->index
], ptr
->index
))
1194 delete_store (ptr
, bb
);
1198 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
1199 if (TEST_BIT (st_insert_map
[x
], ptr
->index
))
1201 did_edge_inserts
|= insert_store (ptr
, INDEX_EDGE (edge_list
, x
));
1206 if (did_edge_inserts
)
1207 commit_edge_insertions ();
1209 free_store_memory ();
1210 free_edge_list (edge_list
);
1211 remove_fake_exit_edges ();
1212 end_alias_analysis ();
1216 fprintf (dump_file
, "STORE_MOTION of %s, %d basic blocks, ",
1217 current_function_name (), n_basic_blocks
);
1218 fprintf (dump_file
, "%d insns deleted, %d insns created\n",
1219 n_stores_deleted
, n_stores_created
);
1222 return (n_stores_deleted
> 0 || n_stores_created
> 0);
1227 gate_rtl_store_motion (void)
1229 return optimize
> 0 && flag_gcse_sm
1230 && !cfun
->calls_setjmp
1231 && optimize_function_for_speed_p (cfun
)
1232 && dbg_cnt (store_motion
);
1236 execute_rtl_store_motion (void)
1238 delete_unreachable_blocks ();
1240 flag_rerun_cse_after_global_opts
|= one_store_motion_pass ();
1244 struct rtl_opt_pass pass_rtl_store_motion
=
1248 "store_motion", /* name */
1249 gate_rtl_store_motion
, /* gate */
1250 execute_rtl_store_motion
, /* execute */
1253 0, /* static_pass_number */
1255 PROP_cfglayout
, /* properties_required */
1256 0, /* properties_provided */
1257 0, /* properties_destroyed */
1258 0, /* todo_flags_start */
1259 TODO_df_finish
| TODO_verify_rtl_sharing
|
1260 TODO_verify_flow
| TODO_ggc_collect
/* todo_flags_finish */