1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010
3 Free Software Foundation, Inc.
5 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
6 and Kenneth Zadeck <zadeck@naturalbridge.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
28 #include "coretypes.h"
35 #include "hard-reg-set.h"
40 #include "tree-pass.h"
41 #include "alloc-pool.h"
43 #include "insn-config.h"
51 /* This file contains three techniques for performing Dead Store
54 * The first technique performs dse locally on any base address. It
55 is based on the cselib which is a local value numbering technique.
56 This technique is local to a basic block but deals with a fairly
59 * The second technique performs dse globally but is restricted to
60 base addresses that are either constant or are relative to the
63 * The third technique, (which is only done after register allocation)
64 processes the spill spill slots. This differs from the second
65 technique because it takes advantage of the fact that spilling is
66 completely free from the effects of aliasing.
68 Logically, dse is a backwards dataflow problem. A store can be
69 deleted if it if cannot be reached in the backward direction by any
70 use of the value being stored. However, the local technique uses a
71 forwards scan of the basic block because cselib requires that the
72 block be processed in that order.
74 The pass is logically broken into 7 steps:
78 1) The local algorithm, as well as scanning the insns for the two
81 2) Analysis to see if the global algs are necessary. In the case
82 of stores base on a constant address, there must be at least two
83 stores to that address, to make it possible to delete some of the
84 stores. In the case of stores off of the frame or spill related
85 stores, only one store to an address is necessary because those
86 stores die at the end of the function.
88 3) Set up the global dataflow equations based on processing the
89 info parsed in the first step.
91 4) Solve the dataflow equations.
93 5) Delete the insns that the global analysis has indicated are
96 6) Delete insns that store the same value as preceeding store
97 where the earlier store couldn't be eliminated.
101 This step uses cselib and canon_rtx to build the largest expression
102 possible for each address. This pass is a forwards pass through
103 each basic block. From the point of view of the global technique,
104 the first pass could examine a block in either direction. The
105 forwards ordering is to accommodate cselib.
107 We a simplifying assumption: addresses fall into four broad
110 1) base has rtx_varies_p == false, offset is constant.
111 2) base has rtx_varies_p == false, offset variable.
112 3) base has rtx_varies_p == true, offset constant.
113 4) base has rtx_varies_p == true, offset variable.
115 The local passes are able to process all 4 kinds of addresses. The
116 global pass only handles (1).
118 The global problem is formulated as follows:
120 A store, S1, to address A, where A is not relative to the stack
121 frame, can be eliminated if all paths from S1 to the end of the
122 of the function contain another store to A before a read to A.
124 If the address A is relative to the stack frame, a store S2 to A
125 can be eliminated if there are no paths from S1 that reach the
126 end of the function that read A before another store to A. In
127 this case S2 can be deleted if there are paths to from S2 to the
128 end of the function that have no reads or writes to A. This
129 second case allows stores to the stack frame to be deleted that
130 would otherwise die when the function returns. This cannot be
131 done if stores_off_frame_dead_at_return is not true. See the doc
132 for that variable for when this variable is false.
134 The global problem is formulated as a backwards set union
135 dataflow problem where the stores are the gens and reads are the
136 kills. Set union problems are rare and require some special
137 handling given our representation of bitmaps. A straightforward
138 implementation of requires a lot of bitmaps filled with 1s.
139 These are expensive and cumbersome in our bitmap formulation so
140 care has been taken to avoid large vectors filled with 1s. See
141 the comments in bb_info and in the dataflow confluence functions
144 There are two places for further enhancements to this algorithm:
146 1) The original dse which was embedded in a pass called flow also
147 did local address forwarding. For example in
152 flow would replace the right hand side of the second insn with a
153 reference to r100. Most of the information is available to add this
154 to this pass. It has not done it because it is a lot of work in
155 the case that either r100 is assigned to between the first and
156 second insn and/or the second insn is a load of part of the value
157 stored by the first insn.
159 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
160 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
161 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
162 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
164 2) The cleaning up of spill code is quite profitable. It currently
165 depends on reading tea leaves and chicken entrails left by reload.
166 This pass depends on reload creating a singleton alias set for each
167 spill slot and telling the next dse pass which of these alias sets
168 are the singletons. Rather than analyze the addresses of the
169 spills, dse's spill processing just does analysis of the loads and
170 stores that use those alias sets. There are three cases where this
173 a) Reload sometimes creates the slot for one mode of access, and
174 then inserts loads and/or stores for a smaller mode. In this
175 case, the current code just punts on the slot. The proper thing
176 to do is to back out and use one bit vector position for each
177 byte of the entity associated with the slot. This depends on
178 KNOWING that reload always generates the accesses for each of the
179 bytes in some canonical (read that easy to understand several
180 passes after reload happens) way.
182 b) Reload sometimes decides that spill slot it allocated was not
183 large enough for the mode and goes back and allocates more slots
184 with the same mode and alias set. The backout in this case is a
185 little more graceful than (a). In this case the slot is unmarked
186 as being a spill slot and if final address comes out to be based
187 off the frame pointer, the global algorithm handles this slot.
189 c) For any pass that may prespill, there is currently no
190 mechanism to tell the dse pass that the slot being used has the
191 special properties that reload uses. It may be that all that is
192 required is to have those passes make the same calls that reload
193 does, assuming that the alias sets can be manipulated in the same
196 /* There are limits to the size of constant offsets we model for the
197 global problem. There are certainly test cases, that exceed this
198 limit, however, it is unlikely that there are important programs
199 that really have constant offsets this size. */
200 #define MAX_OFFSET (64 * 1024)
203 static bitmap scratch
= NULL
;
206 /* This structure holds information about a candidate store. */
210 /* False means this is a clobber. */
213 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
216 /* The id of the mem group of the base address. If rtx_varies_p is
217 true, this is -1. Otherwise, it is the index into the group
221 /* This is the cselib value. */
222 cselib_val
*cse_base
;
224 /* This canonized mem. */
227 /* Canonized MEM address for use by canon_true_dependence. */
230 /* If this is non-zero, it is the alias set of a spill location. */
231 alias_set_type alias_set
;
233 /* The offset of the first and byte before the last byte associated
234 with the operation. */
235 HOST_WIDE_INT begin
, end
;
239 /* A bitmask as wide as the number of bytes in the word that
240 contains a 1 if the byte may be needed. The store is unused if
241 all of the bits are 0. This is used if IS_LARGE is false. */
242 unsigned HOST_WIDE_INT small_bitmask
;
246 /* A bitmap with one bit per byte. Cleared bit means the position
247 is needed. Used if IS_LARGE is false. */
250 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
251 equal to END - BEGIN, the whole store is unused. */
256 /* The next store info for this insn. */
257 struct store_info
*next
;
259 /* The right hand side of the store. This is used if there is a
260 subsequent reload of the mems address somewhere later in the
264 /* If rhs is or holds a constant, this contains that constant,
268 /* Set if this store stores the same constant value as REDUNDANT_REASON
269 insn stored. These aren't eliminated early, because doing that
270 might prevent the earlier larger store to be eliminated. */
271 struct insn_info
*redundant_reason
;
274 /* Return a bitmask with the first N low bits set. */
276 static unsigned HOST_WIDE_INT
277 lowpart_bitmask (int n
)
279 unsigned HOST_WIDE_INT mask
= ~(unsigned HOST_WIDE_INT
) 0;
280 return mask
>> (HOST_BITS_PER_WIDE_INT
- n
);
283 typedef struct store_info
*store_info_t
;
284 static alloc_pool cse_store_info_pool
;
285 static alloc_pool rtx_store_info_pool
;
287 /* This structure holds information about a load. These are only
288 built for rtx bases. */
291 /* The id of the mem group of the base address. */
294 /* If this is non-zero, it is the alias set of a spill location. */
295 alias_set_type alias_set
;
297 /* The offset of the first and byte after the last byte associated
298 with the operation. If begin == end == 0, the read did not have
299 a constant offset. */
302 /* The mem being read. */
305 /* The next read_info for this insn. */
306 struct read_info
*next
;
308 typedef struct read_info
*read_info_t
;
309 static alloc_pool read_info_pool
;
312 /* One of these records is created for each insn. */
316 /* Set true if the insn contains a store but the insn itself cannot
317 be deleted. This is set if the insn is a parallel and there is
318 more than one non dead output or if the insn is in some way
322 /* This field is only used by the global algorithm. It is set true
323 if the insn contains any read of mem except for a (1). This is
324 also set if the insn is a call or has a clobber mem. If the insn
325 contains a wild read, the use_rec will be null. */
328 /* This field is only used for the processing of const functions.
329 These functions cannot read memory, but they can read the stack
330 because that is where they may get their parms. We need to be
331 this conservative because, like the store motion pass, we don't
332 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
333 Moreover, we need to distinguish two cases:
334 1. Before reload (register elimination), the stores related to
335 outgoing arguments are stack pointer based and thus deemed
336 of non-constant base in this pass. This requires special
337 handling but also means that the frame pointer based stores
338 need not be killed upon encountering a const function call.
339 2. After reload, the stores related to outgoing arguments can be
340 either stack pointer or hard frame pointer based. This means
341 that we have no other choice than also killing all the frame
342 pointer based stores upon encountering a const function call.
343 This field is set after reload for const function calls. Having
344 this set is less severe than a wild read, it just means that all
345 the frame related stores are killed rather than all the stores. */
348 /* This field is only used for the processing of const functions.
349 It is set if the insn may contain a stack pointer based store. */
350 bool stack_pointer_based
;
352 /* This is true if any of the sets within the store contains a
353 cselib base. Such stores can only be deleted by the local
355 bool contains_cselib_groups
;
360 /* The list of mem sets or mem clobbers that are contained in this
361 insn. If the insn is deletable, it contains only one mem set.
362 But it could also contain clobbers. Insns that contain more than
363 one mem set are not deletable, but each of those mems are here in
364 order to provide info to delete other insns. */
365 store_info_t store_rec
;
367 /* The linked list of mem uses in this insn. Only the reads from
368 rtx bases are listed here. The reads to cselib bases are
369 completely processed during the first scan and so are never
371 read_info_t read_rec
;
373 /* The prev insn in the basic block. */
374 struct insn_info
* prev_insn
;
376 /* The linked list of insns that are in consideration for removal in
377 the forwards pass thru the basic block. This pointer may be
378 trash as it is not cleared when a wild read occurs. The only
379 time it is guaranteed to be correct is when the traversal starts
380 at active_local_stores. */
381 struct insn_info
* next_local_store
;
384 typedef struct insn_info
*insn_info_t
;
385 static alloc_pool insn_info_pool
;
387 /* The linked list of stores that are under consideration in this
389 static insn_info_t active_local_stores
;
394 /* Pointer to the insn info for the last insn in the block. These
395 are linked so this is how all of the insns are reached. During
396 scanning this is the current insn being scanned. */
397 insn_info_t last_insn
;
399 /* The info for the global dataflow problem. */
402 /* This is set if the transfer function should and in the wild_read
403 bitmap before applying the kill and gen sets. That vector knocks
404 out most of the bits in the bitmap and thus speeds up the
406 bool apply_wild_read
;
408 /* The following 4 bitvectors hold information about which positions
409 of which stores are live or dead. They are indexed by
412 /* The set of store positions that exist in this block before a wild read. */
415 /* The set of load positions that exist in this block above the
416 same position of a store. */
419 /* The set of stores that reach the top of the block without being
422 Do not represent the in if it is all ones. Note that this is
423 what the bitvector should logically be initialized to for a set
424 intersection problem. However, like the kill set, this is too
425 expensive. So initially, the in set will only be created for the
426 exit block and any block that contains a wild read. */
429 /* The set of stores that reach the bottom of the block from it's
432 Do not represent the in if it is all ones. Note that this is
433 what the bitvector should logically be initialized to for a set
434 intersection problem. However, like the kill and in set, this is
435 too expensive. So what is done is that the confluence operator
436 just initializes the vector from one of the out sets of the
437 successors of the block. */
440 /* The following bitvector is indexed by the reg number. It
441 contains the set of regs that are live at the current instruction
442 being processed. While it contains info for all of the
443 registers, only the pseudos are actually examined. It is used to
444 assure that shift sequences that are inserted do not accidently
445 clobber live hard regs. */
449 typedef struct bb_info
*bb_info_t
;
450 static alloc_pool bb_info_pool
;
452 /* Table to hold all bb_infos. */
453 static bb_info_t
*bb_table
;
455 /* There is a group_info for each rtx base that is used to reference
456 memory. There are also not many of the rtx bases because they are
457 very limited in scope. */
461 /* The actual base of the address. */
464 /* The sequential id of the base. This allows us to have a
465 canonical ordering of these that is not based on addresses. */
468 /* True if there are any positions that are to be processed
470 bool process_globally
;
472 /* True if the base of this group is either the frame_pointer or
473 hard_frame_pointer. */
476 /* A mem wrapped around the base pointer for the group in order to do
477 read dependency. It must be given BLKmode in order to encompass all
478 the possible offsets from the base. */
481 /* Canonized version of base_mem's address. */
484 /* These two sets of two bitmaps are used to keep track of how many
485 stores are actually referencing that position from this base. We
486 only do this for rtx bases as this will be used to assign
487 positions in the bitmaps for the global problem. Bit N is set in
488 store1 on the first store for offset N. Bit N is set in store2
489 for the second store to offset N. This is all we need since we
490 only care about offsets that have two or more stores for them.
492 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
493 for 0 and greater offsets.
495 There is one special case here, for stores into the stack frame,
496 we will or store1 into store2 before deciding which stores look
497 at globally. This is because stores to the stack frame that have
498 no other reads before the end of the function can also be
500 bitmap store1_n
, store1_p
, store2_n
, store2_p
;
502 /* The positions in this bitmap have the same assignments as the in,
503 out, gen and kill bitmaps. This bitmap is all zeros except for
504 the positions that are occupied by stores for this group. */
507 /* The offset_map is used to map the offsets from this base into
508 positions in the global bitmaps. It is only created after all of
509 the all of stores have been scanned and we know which ones we
511 int *offset_map_n
, *offset_map_p
;
512 int offset_map_size_n
, offset_map_size_p
;
514 typedef struct group_info
*group_info_t
;
515 typedef const struct group_info
*const_group_info_t
;
516 static alloc_pool rtx_group_info_pool
;
518 /* Tables of group_info structures, hashed by base value. */
519 static htab_t rtx_group_table
;
521 /* Index into the rtx_group_vec. */
522 static int rtx_group_next_id
;
524 DEF_VEC_P(group_info_t
);
525 DEF_VEC_ALLOC_P(group_info_t
,heap
);
527 static VEC(group_info_t
,heap
) *rtx_group_vec
;
530 /* This structure holds the set of changes that are being deferred
531 when removing read operation. See replace_read. */
532 struct deferred_change
535 /* The mem that is being replaced. */
538 /* The reg it is being replaced with. */
541 struct deferred_change
*next
;
544 typedef struct deferred_change
*deferred_change_t
;
545 static alloc_pool deferred_change_pool
;
547 static deferred_change_t deferred_change_list
= NULL
;
549 /* This are used to hold the alias sets of spill variables. Since
550 these are never aliased and there may be a lot of them, it makes
551 sense to treat them specially. This bitvector is only allocated in
552 calls from dse_record_singleton_alias_set which currently is only
553 made during reload1. So when dse is called before reload this
554 mechanism does nothing. */
556 static bitmap clear_alias_sets
= NULL
;
558 /* The set of clear_alias_sets that have been disqualified because
559 there are loads or stores using a different mode than the alias set
560 was registered with. */
561 static bitmap disqualified_clear_alias_sets
= NULL
;
563 /* The group that holds all of the clear_alias_sets. */
564 static group_info_t clear_alias_group
;
566 /* The modes of the clear_alias_sets. */
567 static htab_t clear_alias_mode_table
;
569 /* Hash table element to look up the mode for an alias set. */
570 struct clear_alias_mode_holder
572 alias_set_type alias_set
;
573 enum machine_mode mode
;
576 static alloc_pool clear_alias_mode_pool
;
578 /* This is true except if cfun->stdarg -- i.e. we cannot do
579 this for vararg functions because they play games with the frame. */
580 static bool stores_off_frame_dead_at_return
;
582 /* Counter for stats. */
583 static int globally_deleted
;
584 static int locally_deleted
;
585 static int spill_deleted
;
587 static bitmap all_blocks
;
589 /* The number of bits used in the global bitmaps. */
590 static unsigned int current_position
;
593 static bool gate_dse (void);
594 static bool gate_dse1 (void);
595 static bool gate_dse2 (void);
598 /*----------------------------------------------------------------------------
602 ----------------------------------------------------------------------------*/
604 /* Hashtable callbacks for maintaining the "bases" field of
605 store_group_info, given that the addresses are function invariants. */
608 clear_alias_mode_eq (const void *p1
, const void *p2
)
610 const struct clear_alias_mode_holder
* h1
611 = (const struct clear_alias_mode_holder
*) p1
;
612 const struct clear_alias_mode_holder
* h2
613 = (const struct clear_alias_mode_holder
*) p2
;
614 return h1
->alias_set
== h2
->alias_set
;
619 clear_alias_mode_hash (const void *p
)
621 const struct clear_alias_mode_holder
*holder
622 = (const struct clear_alias_mode_holder
*) p
;
623 return holder
->alias_set
;
627 /* Find the entry associated with ALIAS_SET. */
629 static struct clear_alias_mode_holder
*
630 clear_alias_set_lookup (alias_set_type alias_set
)
632 struct clear_alias_mode_holder tmp_holder
;
635 tmp_holder
.alias_set
= alias_set
;
636 slot
= htab_find_slot (clear_alias_mode_table
, &tmp_holder
, NO_INSERT
);
639 return (struct clear_alias_mode_holder
*) *slot
;
643 /* Hashtable callbacks for maintaining the "bases" field of
644 store_group_info, given that the addresses are function invariants. */
647 invariant_group_base_eq (const void *p1
, const void *p2
)
649 const_group_info_t gi1
= (const_group_info_t
) p1
;
650 const_group_info_t gi2
= (const_group_info_t
) p2
;
651 return rtx_equal_p (gi1
->rtx_base
, gi2
->rtx_base
);
656 invariant_group_base_hash (const void *p
)
658 const_group_info_t gi
= (const_group_info_t
) p
;
660 return hash_rtx (gi
->rtx_base
, Pmode
, &do_not_record
, NULL
, false);
664 /* Get the GROUP for BASE. Add a new group if it is not there. */
667 get_group_info (rtx base
)
669 struct group_info tmp_gi
;
675 /* Find the store_base_info structure for BASE, creating a new one
677 tmp_gi
.rtx_base
= base
;
678 slot
= htab_find_slot (rtx_group_table
, &tmp_gi
, INSERT
);
679 gi
= (group_info_t
) *slot
;
683 if (!clear_alias_group
)
685 clear_alias_group
= gi
=
686 (group_info_t
) pool_alloc (rtx_group_info_pool
);
687 memset (gi
, 0, sizeof (struct group_info
));
688 gi
->id
= rtx_group_next_id
++;
689 gi
->store1_n
= BITMAP_ALLOC (NULL
);
690 gi
->store1_p
= BITMAP_ALLOC (NULL
);
691 gi
->store2_n
= BITMAP_ALLOC (NULL
);
692 gi
->store2_p
= BITMAP_ALLOC (NULL
);
693 gi
->group_kill
= BITMAP_ALLOC (NULL
);
694 gi
->process_globally
= false;
695 gi
->offset_map_size_n
= 0;
696 gi
->offset_map_size_p
= 0;
697 gi
->offset_map_n
= NULL
;
698 gi
->offset_map_p
= NULL
;
699 VEC_safe_push (group_info_t
, heap
, rtx_group_vec
, gi
);
701 return clear_alias_group
;
706 *slot
= gi
= (group_info_t
) pool_alloc (rtx_group_info_pool
);
708 gi
->id
= rtx_group_next_id
++;
709 gi
->base_mem
= gen_rtx_MEM (BLKmode
, base
);
710 gi
->canon_base_addr
= canon_rtx (base
);
711 gi
->store1_n
= BITMAP_ALLOC (NULL
);
712 gi
->store1_p
= BITMAP_ALLOC (NULL
);
713 gi
->store2_n
= BITMAP_ALLOC (NULL
);
714 gi
->store2_p
= BITMAP_ALLOC (NULL
);
715 gi
->group_kill
= BITMAP_ALLOC (NULL
);
716 gi
->process_globally
= false;
718 (base
== frame_pointer_rtx
) || (base
== hard_frame_pointer_rtx
);
719 gi
->offset_map_size_n
= 0;
720 gi
->offset_map_size_p
= 0;
721 gi
->offset_map_n
= NULL
;
722 gi
->offset_map_p
= NULL
;
723 VEC_safe_push (group_info_t
, heap
, rtx_group_vec
, gi
);
730 /* Initialization of data structures. */
736 globally_deleted
= 0;
739 scratch
= BITMAP_ALLOC (NULL
);
742 = create_alloc_pool ("rtx_store_info_pool",
743 sizeof (struct store_info
), 100);
745 = create_alloc_pool ("read_info_pool",
746 sizeof (struct read_info
), 100);
748 = create_alloc_pool ("insn_info_pool",
749 sizeof (struct insn_info
), 100);
751 = create_alloc_pool ("bb_info_pool",
752 sizeof (struct bb_info
), 100);
754 = create_alloc_pool ("rtx_group_info_pool",
755 sizeof (struct group_info
), 100);
757 = create_alloc_pool ("deferred_change_pool",
758 sizeof (struct deferred_change
), 10);
760 rtx_group_table
= htab_create (11, invariant_group_base_hash
,
761 invariant_group_base_eq
, NULL
);
763 bb_table
= XCNEWVEC (bb_info_t
, last_basic_block
);
764 rtx_group_next_id
= 0;
766 stores_off_frame_dead_at_return
= !cfun
->stdarg
;
768 init_alias_analysis ();
770 if (clear_alias_sets
)
771 clear_alias_group
= get_group_info (NULL
);
773 clear_alias_group
= NULL
;
778 /*----------------------------------------------------------------------------
781 Scan all of the insns. Any random ordering of the blocks is fine.
782 Each block is scanned in forward order to accommodate cselib which
783 is used to remove stores with non-constant bases.
784 ----------------------------------------------------------------------------*/
786 /* Delete all of the store_info recs from INSN_INFO. */
789 free_store_info (insn_info_t insn_info
)
791 store_info_t store_info
= insn_info
->store_rec
;
794 store_info_t next
= store_info
->next
;
795 if (store_info
->is_large
)
796 BITMAP_FREE (store_info
->positions_needed
.large
.bmap
);
797 if (store_info
->cse_base
)
798 pool_free (cse_store_info_pool
, store_info
);
800 pool_free (rtx_store_info_pool
, store_info
);
804 insn_info
->cannot_delete
= true;
805 insn_info
->contains_cselib_groups
= false;
806 insn_info
->store_rec
= NULL
;
809 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
810 SRC + SRCOFF before insn ARG. */
813 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED
,
814 rtx op ATTRIBUTE_UNUSED
,
815 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
820 src
= gen_rtx_PLUS (GET_MODE (src
), src
, srcoff
);
822 /* We can reuse all operands without copying, because we are about
823 to delete the insn that contained it. */
825 emit_insn_before (gen_rtx_SET (VOIDmode
, dest
, src
), insn
);
830 /* Before we delete INSN, make sure that the auto inc/dec, if it is
831 there, is split into a separate insn. */
834 check_for_inc_dec (rtx insn
)
836 rtx note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
838 for_each_inc_dec (&insn
, emit_inc_dec_insn_before
, insn
);
842 /* Delete the insn and free all of the fields inside INSN_INFO. */
845 delete_dead_store_insn (insn_info_t insn_info
)
847 read_info_t read_info
;
852 check_for_inc_dec (insn_info
->insn
);
855 fprintf (dump_file
, "Locally deleting insn %d ",
856 INSN_UID (insn_info
->insn
));
857 if (insn_info
->store_rec
->alias_set
)
858 fprintf (dump_file
, "alias set %d\n",
859 (int) insn_info
->store_rec
->alias_set
);
861 fprintf (dump_file
, "\n");
864 free_store_info (insn_info
);
865 read_info
= insn_info
->read_rec
;
869 read_info_t next
= read_info
->next
;
870 pool_free (read_info_pool
, read_info
);
873 insn_info
->read_rec
= NULL
;
875 delete_insn (insn_info
->insn
);
877 insn_info
->insn
= NULL
;
879 insn_info
->wild_read
= false;
883 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
887 set_usage_bits (group_info_t group
, HOST_WIDE_INT offset
, HOST_WIDE_INT width
)
891 if (offset
> -MAX_OFFSET
&& offset
+ width
< MAX_OFFSET
)
892 for (i
=offset
; i
<offset
+width
; i
++)
899 store1
= group
->store1_n
;
900 store2
= group
->store2_n
;
905 store1
= group
->store1_p
;
906 store2
= group
->store2_p
;
910 if (!bitmap_set_bit (store1
, ai
))
911 bitmap_set_bit (store2
, ai
);
916 if (group
->offset_map_size_n
< ai
)
917 group
->offset_map_size_n
= ai
;
921 if (group
->offset_map_size_p
< ai
)
922 group
->offset_map_size_p
= ai
;
929 /* Set the BB_INFO so that the last insn is marked as a wild read. */
932 add_wild_read (bb_info_t bb_info
)
934 insn_info_t insn_info
= bb_info
->last_insn
;
935 read_info_t
*ptr
= &insn_info
->read_rec
;
939 read_info_t next
= (*ptr
)->next
;
940 if ((*ptr
)->alias_set
== 0)
942 pool_free (read_info_pool
, *ptr
);
948 insn_info
->wild_read
= true;
949 active_local_stores
= NULL
;
953 /* Return true if X is a constant or one of the registers that behave
954 as a constant over the life of a function. This is equivalent to
955 !rtx_varies_p for memory addresses. */
958 const_or_frame_p (rtx x
)
960 switch (GET_CODE (x
))
971 /* Note that we have to test for the actual rtx used for the frame
972 and arg pointers and not just the register number in case we have
973 eliminated the frame and/or arg pointer and are using it
975 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
976 /* The arg pointer varies if it is not a fixed register. */
977 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
978 || x
== pic_offset_table_rtx
)
987 /* Take all reasonable action to put the address of MEM into the form
988 that we can do analysis on.
990 The gold standard is to get the address into the form: address +
991 OFFSET where address is something that rtx_varies_p considers a
992 constant. When we can get the address in this form, we can do
993 global analysis on it. Note that for constant bases, address is
994 not actually returned, only the group_id. The address can be
997 If that fails, we try cselib to get a value we can at least use
998 locally. If that fails we return false.
1000 The GROUP_ID is set to -1 for cselib bases and the index of the
1001 group for non_varying bases.
1003 FOR_READ is true if this is a mem read and false if not. */
1006 canon_address (rtx mem
,
1007 alias_set_type
*alias_set_out
,
1009 HOST_WIDE_INT
*offset
,
1012 enum machine_mode address_mode
1013 = targetm
.addr_space
.address_mode (MEM_ADDR_SPACE (mem
));
1014 rtx mem_address
= XEXP (mem
, 0);
1015 rtx expanded_address
, address
;
1018 /* Make sure that cselib is has initialized all of the operands of
1019 the address before asking it to do the subst. */
1021 if (clear_alias_sets
)
1023 /* If this is a spill, do not do any further processing. */
1024 alias_set_type alias_set
= MEM_ALIAS_SET (mem
);
1026 fprintf (dump_file
, "found alias set %d\n", (int) alias_set
);
1027 if (bitmap_bit_p (clear_alias_sets
, alias_set
))
1029 struct clear_alias_mode_holder
*entry
1030 = clear_alias_set_lookup (alias_set
);
1032 /* If the modes do not match, we cannot process this set. */
1033 if (entry
->mode
!= GET_MODE (mem
))
1037 "disqualifying alias set %d, (%s) != (%s)\n",
1038 (int) alias_set
, GET_MODE_NAME (entry
->mode
),
1039 GET_MODE_NAME (GET_MODE (mem
)));
1041 bitmap_set_bit (disqualified_clear_alias_sets
, alias_set
);
1045 *alias_set_out
= alias_set
;
1046 *group_id
= clear_alias_group
->id
;
1053 cselib_lookup (mem_address
, address_mode
, 1, GET_MODE (mem
));
1057 fprintf (dump_file
, " mem: ");
1058 print_inline_rtx (dump_file
, mem_address
, 0);
1059 fprintf (dump_file
, "\n");
1062 /* First see if just canon_rtx (mem_address) is const or frame,
1063 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1065 for (expanded
= 0; expanded
< 2; expanded
++)
1069 /* Use cselib to replace all of the reg references with the full
1070 expression. This will take care of the case where we have
1072 r_x = base + offset;
1077 val = *(base + offset); */
1079 expanded_address
= cselib_expand_value_rtx (mem_address
,
1082 /* If this fails, just go with the address from first
1084 if (!expanded_address
)
1088 expanded_address
= mem_address
;
1090 /* Split the address into canonical BASE + OFFSET terms. */
1091 address
= canon_rtx (expanded_address
);
1099 fprintf (dump_file
, "\n after cselib_expand address: ");
1100 print_inline_rtx (dump_file
, expanded_address
, 0);
1101 fprintf (dump_file
, "\n");
1104 fprintf (dump_file
, "\n after canon_rtx address: ");
1105 print_inline_rtx (dump_file
, address
, 0);
1106 fprintf (dump_file
, "\n");
1109 if (GET_CODE (address
) == CONST
)
1110 address
= XEXP (address
, 0);
1112 if (GET_CODE (address
) == PLUS
1113 && CONST_INT_P (XEXP (address
, 1)))
1115 *offset
= INTVAL (XEXP (address
, 1));
1116 address
= XEXP (address
, 0);
1119 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem
))
1120 && const_or_frame_p (address
))
1122 group_info_t group
= get_group_info (address
);
1125 fprintf (dump_file
, " gid=%d offset=%d \n",
1126 group
->id
, (int)*offset
);
1128 *group_id
= group
->id
;
1133 *base
= cselib_lookup (address
, address_mode
, true, GET_MODE (mem
));
1139 fprintf (dump_file
, " no cselib val - should be a wild read.\n");
1143 fprintf (dump_file
, " varying cselib base=%u:%u offset = %d\n",
1144 (*base
)->uid
, (*base
)->hash
, (int)*offset
);
1149 /* Clear the rhs field from the active_local_stores array. */
1152 clear_rhs_from_active_local_stores (void)
1154 insn_info_t ptr
= active_local_stores
;
1158 store_info_t store_info
= ptr
->store_rec
;
1159 /* Skip the clobbers. */
1160 while (!store_info
->is_set
)
1161 store_info
= store_info
->next
;
1163 store_info
->rhs
= NULL
;
1164 store_info
->const_rhs
= NULL
;
1166 ptr
= ptr
->next_local_store
;
1171 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1174 set_position_unneeded (store_info_t s_info
, int pos
)
1176 if (__builtin_expect (s_info
->is_large
, false))
1178 if (bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
))
1179 s_info
->positions_needed
.large
.count
++;
1182 s_info
->positions_needed
.small_bitmask
1183 &= ~(((unsigned HOST_WIDE_INT
) 1) << pos
);
1186 /* Mark the whole store S_INFO as unneeded. */
1189 set_all_positions_unneeded (store_info_t s_info
)
1191 if (__builtin_expect (s_info
->is_large
, false))
1193 int pos
, end
= s_info
->end
- s_info
->begin
;
1194 for (pos
= 0; pos
< end
; pos
++)
1195 bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
);
1196 s_info
->positions_needed
.large
.count
= end
;
1199 s_info
->positions_needed
.small_bitmask
= (unsigned HOST_WIDE_INT
) 0;
1202 /* Return TRUE if any bytes from S_INFO store are needed. */
1205 any_positions_needed_p (store_info_t s_info
)
1207 if (__builtin_expect (s_info
->is_large
, false))
1208 return (s_info
->positions_needed
.large
.count
1209 < s_info
->end
- s_info
->begin
);
1211 return (s_info
->positions_needed
.small_bitmask
1212 != (unsigned HOST_WIDE_INT
) 0);
1215 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1216 store are needed. */
1219 all_positions_needed_p (store_info_t s_info
, int start
, int width
)
1221 if (__builtin_expect (s_info
->is_large
, false))
1223 int end
= start
+ width
;
1225 if (bitmap_bit_p (s_info
->positions_needed
.large
.bmap
, start
++))
1231 unsigned HOST_WIDE_INT mask
= lowpart_bitmask (width
) << start
;
1232 return (s_info
->positions_needed
.small_bitmask
& mask
) == mask
;
1237 static rtx
get_stored_val (store_info_t
, enum machine_mode
, HOST_WIDE_INT
,
1238 HOST_WIDE_INT
, basic_block
, bool);
1241 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1242 there is a candidate store, after adding it to the appropriate
1243 local store group if so. */
1246 record_store (rtx body
, bb_info_t bb_info
)
1248 rtx mem
, rhs
, const_rhs
, mem_addr
;
1249 HOST_WIDE_INT offset
= 0;
1250 HOST_WIDE_INT width
= 0;
1251 alias_set_type spill_alias_set
;
1252 insn_info_t insn_info
= bb_info
->last_insn
;
1253 store_info_t store_info
= NULL
;
1255 cselib_val
*base
= NULL
;
1256 insn_info_t ptr
, last
, redundant_reason
;
1257 bool store_is_unused
;
1259 if (GET_CODE (body
) != SET
&& GET_CODE (body
) != CLOBBER
)
1262 mem
= SET_DEST (body
);
1264 /* If this is not used, then this cannot be used to keep the insn
1265 from being deleted. On the other hand, it does provide something
1266 that can be used to prove that another store is dead. */
1268 = (find_reg_note (insn_info
->insn
, REG_UNUSED
, mem
) != NULL
);
1270 /* Check whether that value is a suitable memory location. */
1273 /* If the set or clobber is unused, then it does not effect our
1274 ability to get rid of the entire insn. */
1275 if (!store_is_unused
)
1276 insn_info
->cannot_delete
= true;
1280 /* At this point we know mem is a mem. */
1281 if (GET_MODE (mem
) == BLKmode
)
1283 if (GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
1286 fprintf (dump_file
, " adding wild read for (clobber (mem:BLK (scratch))\n");
1287 add_wild_read (bb_info
);
1288 insn_info
->cannot_delete
= true;
1291 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1292 as memset (addr, 0, 36); */
1293 else if (!MEM_SIZE (mem
)
1294 || !CONST_INT_P (MEM_SIZE (mem
))
1295 || GET_CODE (body
) != SET
1296 || INTVAL (MEM_SIZE (mem
)) <= 0
1297 || INTVAL (MEM_SIZE (mem
)) > MAX_OFFSET
1298 || !CONST_INT_P (SET_SRC (body
)))
1300 if (!store_is_unused
)
1302 /* If the set or clobber is unused, then it does not effect our
1303 ability to get rid of the entire insn. */
1304 insn_info
->cannot_delete
= true;
1305 clear_rhs_from_active_local_stores ();
1311 /* We can still process a volatile mem, we just cannot delete it. */
1312 if (MEM_VOLATILE_P (mem
))
1313 insn_info
->cannot_delete
= true;
1315 if (!canon_address (mem
, &spill_alias_set
, &group_id
, &offset
, &base
))
1317 clear_rhs_from_active_local_stores ();
1321 if (GET_MODE (mem
) == BLKmode
)
1322 width
= INTVAL (MEM_SIZE (mem
));
1325 width
= GET_MODE_SIZE (GET_MODE (mem
));
1326 gcc_assert ((unsigned) width
<= HOST_BITS_PER_WIDE_INT
);
1329 if (spill_alias_set
)
1331 bitmap store1
= clear_alias_group
->store1_p
;
1332 bitmap store2
= clear_alias_group
->store2_p
;
1334 gcc_assert (GET_MODE (mem
) != BLKmode
);
1336 if (!bitmap_set_bit (store1
, spill_alias_set
))
1337 bitmap_set_bit (store2
, spill_alias_set
);
1339 if (clear_alias_group
->offset_map_size_p
< spill_alias_set
)
1340 clear_alias_group
->offset_map_size_p
= spill_alias_set
;
1342 store_info
= (store_info_t
) pool_alloc (rtx_store_info_pool
);
1345 fprintf (dump_file
, " processing spill store %d(%s)\n",
1346 (int) spill_alias_set
, GET_MODE_NAME (GET_MODE (mem
)));
1348 else if (group_id
>= 0)
1350 /* In the restrictive case where the base is a constant or the
1351 frame pointer we can do global analysis. */
1354 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
1356 store_info
= (store_info_t
) pool_alloc (rtx_store_info_pool
);
1357 set_usage_bits (group
, offset
, width
);
1360 fprintf (dump_file
, " processing const base store gid=%d[%d..%d)\n",
1361 group_id
, (int)offset
, (int)(offset
+width
));
1365 rtx base_term
= find_base_term (XEXP (mem
, 0));
1367 || (GET_CODE (base_term
) == ADDRESS
1368 && GET_MODE (base_term
) == Pmode
1369 && XEXP (base_term
, 0) == stack_pointer_rtx
))
1370 insn_info
->stack_pointer_based
= true;
1371 insn_info
->contains_cselib_groups
= true;
1373 store_info
= (store_info_t
) pool_alloc (cse_store_info_pool
);
1377 fprintf (dump_file
, " processing cselib store [%d..%d)\n",
1378 (int)offset
, (int)(offset
+width
));
1381 const_rhs
= rhs
= NULL_RTX
;
1382 if (GET_CODE (body
) == SET
1383 /* No place to keep the value after ra. */
1384 && !reload_completed
1385 && (REG_P (SET_SRC (body
))
1386 || GET_CODE (SET_SRC (body
)) == SUBREG
1387 || CONSTANT_P (SET_SRC (body
)))
1388 && !MEM_VOLATILE_P (mem
)
1389 /* Sometimes the store and reload is used for truncation and
1391 && !(FLOAT_MODE_P (GET_MODE (mem
)) && (flag_float_store
)))
1393 rhs
= SET_SRC (body
);
1394 if (CONSTANT_P (rhs
))
1396 else if (body
== PATTERN (insn_info
->insn
))
1398 rtx tem
= find_reg_note (insn_info
->insn
, REG_EQUAL
, NULL_RTX
);
1399 if (tem
&& CONSTANT_P (XEXP (tem
, 0)))
1400 const_rhs
= XEXP (tem
, 0);
1402 if (const_rhs
== NULL_RTX
&& REG_P (rhs
))
1404 rtx tem
= cselib_expand_value_rtx (rhs
, scratch
, 5);
1406 if (tem
&& CONSTANT_P (tem
))
1411 /* Check to see if this stores causes some other stores to be
1413 ptr
= active_local_stores
;
1415 redundant_reason
= NULL
;
1416 mem
= canon_rtx (mem
);
1417 /* For alias_set != 0 canon_true_dependence should be never called. */
1418 if (spill_alias_set
)
1419 mem_addr
= NULL_RTX
;
1423 mem_addr
= base
->val_rtx
;
1427 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
1428 mem_addr
= group
->canon_base_addr
;
1431 mem_addr
= plus_constant (mem_addr
, offset
);
1436 insn_info_t next
= ptr
->next_local_store
;
1437 store_info_t s_info
= ptr
->store_rec
;
1440 /* Skip the clobbers. We delete the active insn if this insn
1441 shadows the set. To have been put on the active list, it
1442 has exactly on set. */
1443 while (!s_info
->is_set
)
1444 s_info
= s_info
->next
;
1446 if (s_info
->alias_set
!= spill_alias_set
)
1448 else if (s_info
->alias_set
)
1450 struct clear_alias_mode_holder
*entry
1451 = clear_alias_set_lookup (s_info
->alias_set
);
1452 /* Generally, spills cannot be processed if and of the
1453 references to the slot have a different mode. But if
1454 we are in the same block and mode is exactly the same
1455 between this store and one before in the same block,
1456 we can still delete it. */
1457 if ((GET_MODE (mem
) == GET_MODE (s_info
->mem
))
1458 && (GET_MODE (mem
) == entry
->mode
))
1461 set_all_positions_unneeded (s_info
);
1464 fprintf (dump_file
, " trying spill store in insn=%d alias_set=%d\n",
1465 INSN_UID (ptr
->insn
), (int) s_info
->alias_set
);
1467 else if ((s_info
->group_id
== group_id
)
1468 && (s_info
->cse_base
== base
))
1472 fprintf (dump_file
, " trying store in insn=%d gid=%d[%d..%d)\n",
1473 INSN_UID (ptr
->insn
), s_info
->group_id
,
1474 (int)s_info
->begin
, (int)s_info
->end
);
1476 /* Even if PTR won't be eliminated as unneeded, if both
1477 PTR and this insn store the same constant value, we might
1478 eliminate this insn instead. */
1479 if (s_info
->const_rhs
1481 && offset
>= s_info
->begin
1482 && offset
+ width
<= s_info
->end
1483 && all_positions_needed_p (s_info
, offset
- s_info
->begin
,
1486 if (GET_MODE (mem
) == BLKmode
)
1488 if (GET_MODE (s_info
->mem
) == BLKmode
1489 && s_info
->const_rhs
== const_rhs
)
1490 redundant_reason
= ptr
;
1492 else if (s_info
->const_rhs
== const0_rtx
1493 && const_rhs
== const0_rtx
)
1494 redundant_reason
= ptr
;
1499 val
= get_stored_val (s_info
, GET_MODE (mem
),
1500 offset
, offset
+ width
,
1501 BLOCK_FOR_INSN (insn_info
->insn
),
1503 if (get_insns () != NULL
)
1506 if (val
&& rtx_equal_p (val
, const_rhs
))
1507 redundant_reason
= ptr
;
1511 for (i
= MAX (offset
, s_info
->begin
);
1512 i
< offset
+ width
&& i
< s_info
->end
;
1514 set_position_unneeded (s_info
, i
- s_info
->begin
);
1516 else if (s_info
->rhs
)
1517 /* Need to see if it is possible for this store to overwrite
1518 the value of store_info. If it is, set the rhs to NULL to
1519 keep it from being used to remove a load. */
1521 if (canon_true_dependence (s_info
->mem
,
1522 GET_MODE (s_info
->mem
),
1524 mem
, mem_addr
, rtx_varies_p
))
1527 s_info
->const_rhs
= NULL
;
1531 /* An insn can be deleted if every position of every one of
1532 its s_infos is zero. */
1533 if (any_positions_needed_p (s_info
)
1534 || ptr
->cannot_delete
)
1539 insn_info_t insn_to_delete
= ptr
;
1542 last
->next_local_store
= ptr
->next_local_store
;
1544 active_local_stores
= ptr
->next_local_store
;
1546 delete_dead_store_insn (insn_to_delete
);
1554 /* Finish filling in the store_info. */
1555 store_info
->next
= insn_info
->store_rec
;
1556 insn_info
->store_rec
= store_info
;
1557 store_info
->mem
= mem
;
1558 store_info
->alias_set
= spill_alias_set
;
1559 store_info
->mem_addr
= mem_addr
;
1560 store_info
->cse_base
= base
;
1561 if (width
> HOST_BITS_PER_WIDE_INT
)
1563 store_info
->is_large
= true;
1564 store_info
->positions_needed
.large
.count
= 0;
1565 store_info
->positions_needed
.large
.bmap
= BITMAP_ALLOC (NULL
);
1569 store_info
->is_large
= false;
1570 store_info
->positions_needed
.small_bitmask
= lowpart_bitmask (width
);
1572 store_info
->group_id
= group_id
;
1573 store_info
->begin
= offset
;
1574 store_info
->end
= offset
+ width
;
1575 store_info
->is_set
= GET_CODE (body
) == SET
;
1576 store_info
->rhs
= rhs
;
1577 store_info
->const_rhs
= const_rhs
;
1578 store_info
->redundant_reason
= redundant_reason
;
1580 /* If this is a clobber, we return 0. We will only be able to
1581 delete this insn if there is only one store USED store, but we
1582 can use the clobber to delete other stores earlier. */
1583 return store_info
->is_set
? 1 : 0;
1588 dump_insn_info (const char * start
, insn_info_t insn_info
)
1590 fprintf (dump_file
, "%s insn=%d %s\n", start
,
1591 INSN_UID (insn_info
->insn
),
1592 insn_info
->store_rec
? "has store" : "naked");
1596 /* If the modes are different and the value's source and target do not
1597 line up, we need to extract the value from lower part of the rhs of
1598 the store, shift it, and then put it into a form that can be shoved
1599 into the read_insn. This function generates a right SHIFT of a
1600 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1601 shift sequence is returned or NULL if we failed to find a
1605 find_shift_sequence (int access_size
,
1606 store_info_t store_info
,
1607 enum machine_mode read_mode
,
1608 int shift
, bool speed
, bool require_cst
)
1610 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1611 enum machine_mode new_mode
;
1612 rtx read_reg
= NULL
;
1614 /* Some machines like the x86 have shift insns for each size of
1615 operand. Other machines like the ppc or the ia-64 may only have
1616 shift insns that shift values within 32 or 64 bit registers.
1617 This loop tries to find the smallest shift insn that will right
1618 justify the value we want to read but is available in one insn on
1621 for (new_mode
= smallest_mode_for_size (access_size
* BITS_PER_UNIT
,
1623 GET_MODE_BITSIZE (new_mode
) <= BITS_PER_WORD
;
1624 new_mode
= GET_MODE_WIDER_MODE (new_mode
))
1626 rtx target
, new_reg
, shift_seq
, insn
, new_lhs
;
1629 /* If a constant was stored into memory, try to simplify it here,
1630 otherwise the cost of the shift might preclude this optimization
1631 e.g. at -Os, even when no actual shift will be needed. */
1632 if (store_info
->const_rhs
)
1634 unsigned int byte
= subreg_lowpart_offset (new_mode
, store_mode
);
1635 rtx ret
= simplify_subreg (new_mode
, store_info
->const_rhs
,
1637 if (ret
&& CONSTANT_P (ret
))
1639 ret
= simplify_const_binary_operation (LSHIFTRT
, new_mode
,
1640 ret
, GEN_INT (shift
));
1641 if (ret
&& CONSTANT_P (ret
))
1643 byte
= subreg_lowpart_offset (read_mode
, new_mode
);
1644 ret
= simplify_subreg (read_mode
, ret
, new_mode
, byte
);
1645 if (ret
&& CONSTANT_P (ret
)
1646 && rtx_cost (ret
, SET
, speed
) <= COSTS_N_INSNS (1))
1655 /* Try a wider mode if truncating the store mode to NEW_MODE
1656 requires a real instruction. */
1657 if (GET_MODE_BITSIZE (new_mode
) < GET_MODE_BITSIZE (store_mode
)
1658 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (new_mode
),
1659 GET_MODE_BITSIZE (store_mode
)))
1662 /* Also try a wider mode if the necessary punning is either not
1663 desirable or not possible. */
1664 if (!CONSTANT_P (store_info
->rhs
)
1665 && !MODES_TIEABLE_P (new_mode
, store_mode
))
1668 new_reg
= gen_reg_rtx (new_mode
);
1672 /* In theory we could also check for an ashr. Ian Taylor knows
1673 of one dsp where the cost of these two was not the same. But
1674 this really is a rare case anyway. */
1675 target
= expand_binop (new_mode
, lshr_optab
, new_reg
,
1676 GEN_INT (shift
), new_reg
, 1, OPTAB_DIRECT
);
1678 shift_seq
= get_insns ();
1681 if (target
!= new_reg
|| shift_seq
== NULL
)
1685 for (insn
= shift_seq
; insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1687 cost
+= insn_rtx_cost (PATTERN (insn
), speed
);
1689 /* The computation up to here is essentially independent
1690 of the arguments and could be precomputed. It may
1691 not be worth doing so. We could precompute if
1692 worthwhile or at least cache the results. The result
1693 technically depends on both SHIFT and ACCESS_SIZE,
1694 but in practice the answer will depend only on ACCESS_SIZE. */
1696 if (cost
> COSTS_N_INSNS (1))
1699 new_lhs
= extract_low_bits (new_mode
, store_mode
,
1700 copy_rtx (store_info
->rhs
));
1701 if (new_lhs
== NULL_RTX
)
1704 /* We found an acceptable shift. Generate a move to
1705 take the value from the store and put it into the
1706 shift pseudo, then shift it, then generate another
1707 move to put in into the target of the read. */
1708 emit_move_insn (new_reg
, new_lhs
);
1709 emit_insn (shift_seq
);
1710 read_reg
= extract_low_bits (read_mode
, new_mode
, new_reg
);
1718 /* Call back for note_stores to find the hard regs set or clobbered by
1719 insn. Data is a bitmap of the hardregs set so far. */
1722 look_for_hardregs (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
1724 bitmap regs_set
= (bitmap
) data
;
1727 && REGNO (x
) < FIRST_PSEUDO_REGISTER
)
1729 int regno
= REGNO (x
);
1730 int n
= hard_regno_nregs
[regno
][GET_MODE (x
)];
1732 bitmap_set_bit (regs_set
, regno
+ n
);
1736 /* Helper function for replace_read and record_store.
1737 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1738 to one before READ_END bytes read in READ_MODE. Return NULL
1739 if not successful. If REQUIRE_CST is true, return always constant. */
1742 get_stored_val (store_info_t store_info
, enum machine_mode read_mode
,
1743 HOST_WIDE_INT read_begin
, HOST_WIDE_INT read_end
,
1744 basic_block bb
, bool require_cst
)
1746 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1748 int access_size
; /* In bytes. */
1751 /* To get here the read is within the boundaries of the write so
1752 shift will never be negative. Start out with the shift being in
1754 if (store_mode
== BLKmode
)
1756 else if (BYTES_BIG_ENDIAN
)
1757 shift
= store_info
->end
- read_end
;
1759 shift
= read_begin
- store_info
->begin
;
1761 access_size
= shift
+ GET_MODE_SIZE (read_mode
);
1763 /* From now on it is bits. */
1764 shift
*= BITS_PER_UNIT
;
1767 read_reg
= find_shift_sequence (access_size
, store_info
, read_mode
, shift
,
1768 optimize_bb_for_speed_p (bb
),
1770 else if (store_mode
== BLKmode
)
1772 /* The store is a memset (addr, const_val, const_size). */
1773 gcc_assert (CONST_INT_P (store_info
->rhs
));
1774 store_mode
= int_mode_for_mode (read_mode
);
1775 if (store_mode
== BLKmode
)
1776 read_reg
= NULL_RTX
;
1777 else if (store_info
->rhs
== const0_rtx
)
1778 read_reg
= extract_low_bits (read_mode
, store_mode
, const0_rtx
);
1779 else if (GET_MODE_BITSIZE (store_mode
) > HOST_BITS_PER_WIDE_INT
1780 || BITS_PER_UNIT
>= HOST_BITS_PER_WIDE_INT
)
1781 read_reg
= NULL_RTX
;
1784 unsigned HOST_WIDE_INT c
1785 = INTVAL (store_info
->rhs
)
1786 & (((HOST_WIDE_INT
) 1 << BITS_PER_UNIT
) - 1);
1787 int shift
= BITS_PER_UNIT
;
1788 while (shift
< HOST_BITS_PER_WIDE_INT
)
1793 read_reg
= GEN_INT (trunc_int_for_mode (c
, store_mode
));
1794 read_reg
= extract_low_bits (read_mode
, store_mode
, read_reg
);
1797 else if (store_info
->const_rhs
1799 || GET_MODE_CLASS (read_mode
) != GET_MODE_CLASS (store_mode
)))
1800 read_reg
= extract_low_bits (read_mode
, store_mode
,
1801 copy_rtx (store_info
->const_rhs
));
1803 read_reg
= extract_low_bits (read_mode
, store_mode
,
1804 copy_rtx (store_info
->rhs
));
1805 if (require_cst
&& read_reg
&& !CONSTANT_P (read_reg
))
1806 read_reg
= NULL_RTX
;
1810 /* Take a sequence of:
1833 Depending on the alignment and the mode of the store and
1837 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1838 and READ_INSN are for the read. Return true if the replacement
1842 replace_read (store_info_t store_info
, insn_info_t store_insn
,
1843 read_info_t read_info
, insn_info_t read_insn
, rtx
*loc
,
1846 enum machine_mode store_mode
= GET_MODE (store_info
->mem
);
1847 enum machine_mode read_mode
= GET_MODE (read_info
->mem
);
1848 rtx insns
, this_insn
, read_reg
;
1854 /* Create a sequence of instructions to set up the read register.
1855 This sequence goes immediately before the store and its result
1856 is read by the load.
1858 We need to keep this in perspective. We are replacing a read
1859 with a sequence of insns, but the read will almost certainly be
1860 in cache, so it is not going to be an expensive one. Thus, we
1861 are not willing to do a multi insn shift or worse a subroutine
1862 call to get rid of the read. */
1864 fprintf (dump_file
, "trying to replace %smode load in insn %d"
1865 " from %smode store in insn %d\n",
1866 GET_MODE_NAME (read_mode
), INSN_UID (read_insn
->insn
),
1867 GET_MODE_NAME (store_mode
), INSN_UID (store_insn
->insn
));
1869 bb
= BLOCK_FOR_INSN (read_insn
->insn
);
1870 read_reg
= get_stored_val (store_info
,
1871 read_mode
, read_info
->begin
, read_info
->end
,
1873 if (read_reg
== NULL_RTX
)
1877 fprintf (dump_file
, " -- could not extract bits of stored value\n");
1880 /* Force the value into a new register so that it won't be clobbered
1881 between the store and the load. */
1882 read_reg
= copy_to_mode_reg (read_mode
, read_reg
);
1883 insns
= get_insns ();
1886 if (insns
!= NULL_RTX
)
1888 /* Now we have to scan the set of new instructions to see if the
1889 sequence contains and sets of hardregs that happened to be
1890 live at this point. For instance, this can happen if one of
1891 the insns sets the CC and the CC happened to be live at that
1892 point. This does occasionally happen, see PR 37922. */
1893 bitmap regs_set
= BITMAP_ALLOC (NULL
);
1895 for (this_insn
= insns
; this_insn
!= NULL_RTX
; this_insn
= NEXT_INSN (this_insn
))
1896 note_stores (PATTERN (this_insn
), look_for_hardregs
, regs_set
);
1898 bitmap_and_into (regs_set
, regs_live
);
1899 if (!bitmap_empty_p (regs_set
))
1904 "abandoning replacement because sequence clobbers live hardregs:");
1905 df_print_regset (dump_file
, regs_set
);
1908 BITMAP_FREE (regs_set
);
1911 BITMAP_FREE (regs_set
);
1914 if (validate_change (read_insn
->insn
, loc
, read_reg
, 0))
1916 deferred_change_t deferred_change
=
1917 (deferred_change_t
) pool_alloc (deferred_change_pool
);
1919 /* Insert this right before the store insn where it will be safe
1920 from later insns that might change it before the read. */
1921 emit_insn_before (insns
, store_insn
->insn
);
1923 /* And now for the kludge part: cselib croaks if you just
1924 return at this point. There are two reasons for this:
1926 1) Cselib has an idea of how many pseudos there are and
1927 that does not include the new ones we just added.
1929 2) Cselib does not know about the move insn we added
1930 above the store_info, and there is no way to tell it
1931 about it, because it has "moved on".
1933 Problem (1) is fixable with a certain amount of engineering.
1934 Problem (2) is requires starting the bb from scratch. This
1937 So we are just going to have to lie. The move/extraction
1938 insns are not really an issue, cselib did not see them. But
1939 the use of the new pseudo read_insn is a real problem because
1940 cselib has not scanned this insn. The way that we solve this
1941 problem is that we are just going to put the mem back for now
1942 and when we are finished with the block, we undo this. We
1943 keep a table of mems to get rid of. At the end of the basic
1944 block we can put them back. */
1946 *loc
= read_info
->mem
;
1947 deferred_change
->next
= deferred_change_list
;
1948 deferred_change_list
= deferred_change
;
1949 deferred_change
->loc
= loc
;
1950 deferred_change
->reg
= read_reg
;
1952 /* Get rid of the read_info, from the point of view of the
1953 rest of dse, play like this read never happened. */
1954 read_insn
->read_rec
= read_info
->next
;
1955 pool_free (read_info_pool
, read_info
);
1958 fprintf (dump_file
, " -- replaced the loaded MEM with ");
1959 print_simple_rtl (dump_file
, read_reg
);
1960 fprintf (dump_file
, "\n");
1968 fprintf (dump_file
, " -- replacing the loaded MEM with ");
1969 print_simple_rtl (dump_file
, read_reg
);
1970 fprintf (dump_file
, " led to an invalid instruction\n");
1976 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
1977 if LOC is a mem and if it is look at the address and kill any
1978 appropriate stores that may be active. */
1981 check_mem_read_rtx (rtx
*loc
, void *data
)
1983 rtx mem
= *loc
, mem_addr
;
1985 insn_info_t insn_info
;
1986 HOST_WIDE_INT offset
= 0;
1987 HOST_WIDE_INT width
= 0;
1988 alias_set_type spill_alias_set
= 0;
1989 cselib_val
*base
= NULL
;
1991 read_info_t read_info
;
1993 if (!mem
|| !MEM_P (mem
))
1996 bb_info
= (bb_info_t
) data
;
1997 insn_info
= bb_info
->last_insn
;
1999 if ((MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2000 || (MEM_VOLATILE_P (mem
)))
2003 fprintf (dump_file
, " adding wild read, volatile or barrier.\n");
2004 add_wild_read (bb_info
);
2005 insn_info
->cannot_delete
= true;
2009 /* If it is reading readonly mem, then there can be no conflict with
2011 if (MEM_READONLY_P (mem
))
2014 if (!canon_address (mem
, &spill_alias_set
, &group_id
, &offset
, &base
))
2017 fprintf (dump_file
, " adding wild read, canon_address failure.\n");
2018 add_wild_read (bb_info
);
2022 if (GET_MODE (mem
) == BLKmode
)
2025 width
= GET_MODE_SIZE (GET_MODE (mem
));
2027 read_info
= (read_info_t
) pool_alloc (read_info_pool
);
2028 read_info
->group_id
= group_id
;
2029 read_info
->mem
= mem
;
2030 read_info
->alias_set
= spill_alias_set
;
2031 read_info
->begin
= offset
;
2032 read_info
->end
= offset
+ width
;
2033 read_info
->next
= insn_info
->read_rec
;
2034 insn_info
->read_rec
= read_info
;
2035 /* For alias_set != 0 canon_true_dependence should be never called. */
2036 if (spill_alias_set
)
2037 mem_addr
= NULL_RTX
;
2041 mem_addr
= base
->val_rtx
;
2045 = VEC_index (group_info_t
, rtx_group_vec
, group_id
);
2046 mem_addr
= group
->canon_base_addr
;
2049 mem_addr
= plus_constant (mem_addr
, offset
);
2052 /* We ignore the clobbers in store_info. The is mildly aggressive,
2053 but there really should not be a clobber followed by a read. */
2055 if (spill_alias_set
)
2057 insn_info_t i_ptr
= active_local_stores
;
2058 insn_info_t last
= NULL
;
2061 fprintf (dump_file
, " processing spill load %d\n",
2062 (int) spill_alias_set
);
2066 store_info_t store_info
= i_ptr
->store_rec
;
2068 /* Skip the clobbers. */
2069 while (!store_info
->is_set
)
2070 store_info
= store_info
->next
;
2072 if (store_info
->alias_set
== spill_alias_set
)
2075 dump_insn_info ("removing from active", i_ptr
);
2078 last
->next_local_store
= i_ptr
->next_local_store
;
2080 active_local_stores
= i_ptr
->next_local_store
;
2084 i_ptr
= i_ptr
->next_local_store
;
2087 else if (group_id
>= 0)
2089 /* This is the restricted case where the base is a constant or
2090 the frame pointer and offset is a constant. */
2091 insn_info_t i_ptr
= active_local_stores
;
2092 insn_info_t last
= NULL
;
2097 fprintf (dump_file
, " processing const load gid=%d[BLK]\n",
2100 fprintf (dump_file
, " processing const load gid=%d[%d..%d)\n",
2101 group_id
, (int)offset
, (int)(offset
+width
));
2106 bool remove
= false;
2107 store_info_t store_info
= i_ptr
->store_rec
;
2109 /* Skip the clobbers. */
2110 while (!store_info
->is_set
)
2111 store_info
= store_info
->next
;
2113 /* There are three cases here. */
2114 if (store_info
->group_id
< 0)
2115 /* We have a cselib store followed by a read from a
2118 = canon_true_dependence (store_info
->mem
,
2119 GET_MODE (store_info
->mem
),
2120 store_info
->mem_addr
,
2121 mem
, mem_addr
, rtx_varies_p
);
2123 else if (group_id
== store_info
->group_id
)
2125 /* This is a block mode load. We may get lucky and
2126 canon_true_dependence may save the day. */
2129 = canon_true_dependence (store_info
->mem
,
2130 GET_MODE (store_info
->mem
),
2131 store_info
->mem_addr
,
2132 mem
, mem_addr
, rtx_varies_p
);
2134 /* If this read is just reading back something that we just
2135 stored, rewrite the read. */
2139 && offset
>= store_info
->begin
2140 && offset
+ width
<= store_info
->end
2141 && all_positions_needed_p (store_info
,
2142 offset
- store_info
->begin
,
2144 && replace_read (store_info
, i_ptr
, read_info
,
2145 insn_info
, loc
, bb_info
->regs_live
))
2148 /* The bases are the same, just see if the offsets
2150 if ((offset
< store_info
->end
)
2151 && (offset
+ width
> store_info
->begin
))
2157 The else case that is missing here is that the
2158 bases are constant but different. There is nothing
2159 to do here because there is no overlap. */
2164 dump_insn_info ("removing from active", i_ptr
);
2167 last
->next_local_store
= i_ptr
->next_local_store
;
2169 active_local_stores
= i_ptr
->next_local_store
;
2173 i_ptr
= i_ptr
->next_local_store
;
2178 insn_info_t i_ptr
= active_local_stores
;
2179 insn_info_t last
= NULL
;
2182 fprintf (dump_file
, " processing cselib load mem:");
2183 print_inline_rtx (dump_file
, mem
, 0);
2184 fprintf (dump_file
, "\n");
2189 bool remove
= false;
2190 store_info_t store_info
= i_ptr
->store_rec
;
2193 fprintf (dump_file
, " processing cselib load against insn %d\n",
2194 INSN_UID (i_ptr
->insn
));
2196 /* Skip the clobbers. */
2197 while (!store_info
->is_set
)
2198 store_info
= store_info
->next
;
2200 /* If this read is just reading back something that we just
2201 stored, rewrite the read. */
2203 && store_info
->group_id
== -1
2204 && store_info
->cse_base
== base
2206 && offset
>= store_info
->begin
2207 && offset
+ width
<= store_info
->end
2208 && all_positions_needed_p (store_info
,
2209 offset
- store_info
->begin
, width
)
2210 && replace_read (store_info
, i_ptr
, read_info
, insn_info
, loc
,
2211 bb_info
->regs_live
))
2214 if (!store_info
->alias_set
)
2215 remove
= canon_true_dependence (store_info
->mem
,
2216 GET_MODE (store_info
->mem
),
2217 store_info
->mem_addr
,
2218 mem
, mem_addr
, rtx_varies_p
);
2223 dump_insn_info ("removing from active", i_ptr
);
2226 last
->next_local_store
= i_ptr
->next_local_store
;
2228 active_local_stores
= i_ptr
->next_local_store
;
2232 i_ptr
= i_ptr
->next_local_store
;
2238 /* A for_each_rtx callback in which DATA points the INSN_INFO for
2239 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2240 true for any part of *LOC. */
2243 check_mem_read_use (rtx
*loc
, void *data
)
2245 for_each_rtx (loc
, check_mem_read_rtx
, data
);
2249 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2250 So far it only handles arguments passed in registers. */
2253 get_call_args (rtx call_insn
, tree fn
, rtx
*args
, int nargs
)
2255 CUMULATIVE_ARGS args_so_far
;
2259 INIT_CUMULATIVE_ARGS (args_so_far
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
2261 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
2263 arg
!= void_list_node
&& idx
< nargs
;
2264 arg
= TREE_CHAIN (arg
), idx
++)
2266 enum machine_mode mode
= TYPE_MODE (TREE_VALUE (arg
));
2268 reg
= targetm
.calls
.function_arg (&args_so_far
, mode
, NULL_TREE
, true);
2269 if (!reg
|| !REG_P (reg
) || GET_MODE (reg
) != mode
2270 || GET_MODE_CLASS (mode
) != MODE_INT
)
2273 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
2275 link
= XEXP (link
, 1))
2276 if (GET_CODE (XEXP (link
, 0)) == USE
)
2278 args
[idx
] = XEXP (XEXP (link
, 0), 0);
2279 if (REG_P (args
[idx
])
2280 && REGNO (args
[idx
]) == REGNO (reg
)
2281 && (GET_MODE (args
[idx
]) == mode
2282 || (GET_MODE_CLASS (GET_MODE (args
[idx
])) == MODE_INT
2283 && (GET_MODE_SIZE (GET_MODE (args
[idx
]))
2285 && (GET_MODE_SIZE (GET_MODE (args
[idx
]))
2286 > GET_MODE_SIZE (mode
)))))
2292 tmp
= cselib_expand_value_rtx (args
[idx
], scratch
, 5);
2293 if (GET_MODE (args
[idx
]) != mode
)
2295 if (!tmp
|| !CONST_INT_P (tmp
))
2297 tmp
= GEN_INT (trunc_int_for_mode (INTVAL (tmp
), mode
));
2302 targetm
.calls
.function_arg_advance (&args_so_far
, mode
, NULL_TREE
, true);
2304 if (arg
!= void_list_node
|| idx
!= nargs
)
2310 /* Apply record_store to all candidate stores in INSN. Mark INSN
2311 if some part of it is not a candidate store and assigns to a
2312 non-register target. */
2315 scan_insn (bb_info_t bb_info
, rtx insn
)
2318 insn_info_t insn_info
= (insn_info_t
) pool_alloc (insn_info_pool
);
2320 memset (insn_info
, 0, sizeof (struct insn_info
));
2323 fprintf (dump_file
, "\n**scanning insn=%d\n",
2326 insn_info
->prev_insn
= bb_info
->last_insn
;
2327 insn_info
->insn
= insn
;
2328 bb_info
->last_insn
= insn_info
;
2330 if (DEBUG_INSN_P (insn
))
2332 insn_info
->cannot_delete
= true;
2336 /* Cselib clears the table for this case, so we have to essentially
2338 if (NONJUMP_INSN_P (insn
)
2339 && GET_CODE (PATTERN (insn
)) == ASM_OPERANDS
2340 && MEM_VOLATILE_P (PATTERN (insn
)))
2342 add_wild_read (bb_info
);
2343 insn_info
->cannot_delete
= true;
2347 /* Look at all of the uses in the insn. */
2348 note_uses (&PATTERN (insn
), check_mem_read_use
, bb_info
);
2353 tree memset_call
= NULL_TREE
;
2355 insn_info
->cannot_delete
= true;
2357 /* Const functions cannot do anything bad i.e. read memory,
2358 however, they can read their parameters which may have
2359 been pushed onto the stack.
2360 memset and bzero don't read memory either. */
2361 const_call
= RTL_CONST_CALL_P (insn
);
2364 rtx call
= PATTERN (insn
);
2365 if (GET_CODE (call
) == PARALLEL
)
2366 call
= XVECEXP (call
, 0, 0);
2367 if (GET_CODE (call
) == SET
)
2368 call
= SET_SRC (call
);
2369 if (GET_CODE (call
) == CALL
2370 && MEM_P (XEXP (call
, 0))
2371 && GET_CODE (XEXP (XEXP (call
, 0), 0)) == SYMBOL_REF
)
2373 rtx symbol
= XEXP (XEXP (call
, 0), 0);
2374 if (SYMBOL_REF_DECL (symbol
)
2375 && TREE_CODE (SYMBOL_REF_DECL (symbol
)) == FUNCTION_DECL
)
2377 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol
))
2379 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol
))
2380 == BUILT_IN_MEMSET
))
2381 || SYMBOL_REF_DECL (symbol
) == block_clear_fn
)
2382 memset_call
= SYMBOL_REF_DECL (symbol
);
2386 if (const_call
|| memset_call
)
2388 insn_info_t i_ptr
= active_local_stores
;
2389 insn_info_t last
= NULL
;
2392 fprintf (dump_file
, "%s call %d\n",
2393 const_call
? "const" : "memset", INSN_UID (insn
));
2395 /* See the head comment of the frame_read field. */
2396 if (reload_completed
)
2397 insn_info
->frame_read
= true;
2399 /* Loop over the active stores and remove those which are
2400 killed by the const function call. */
2403 bool remove_store
= false;
2405 /* The stack pointer based stores are always killed. */
2406 if (i_ptr
->stack_pointer_based
)
2407 remove_store
= true;
2409 /* If the frame is read, the frame related stores are killed. */
2410 else if (insn_info
->frame_read
)
2412 store_info_t store_info
= i_ptr
->store_rec
;
2414 /* Skip the clobbers. */
2415 while (!store_info
->is_set
)
2416 store_info
= store_info
->next
;
2418 if (store_info
->group_id
>= 0
2419 && VEC_index (group_info_t
, rtx_group_vec
,
2420 store_info
->group_id
)->frame_related
)
2421 remove_store
= true;
2427 dump_insn_info ("removing from active", i_ptr
);
2430 last
->next_local_store
= i_ptr
->next_local_store
;
2432 active_local_stores
= i_ptr
->next_local_store
;
2437 i_ptr
= i_ptr
->next_local_store
;
2443 if (get_call_args (insn
, memset_call
, args
, 3)
2444 && CONST_INT_P (args
[1])
2445 && CONST_INT_P (args
[2])
2446 && INTVAL (args
[2]) > 0)
2448 rtx mem
= gen_rtx_MEM (BLKmode
, args
[0]);
2449 set_mem_size (mem
, args
[2]);
2450 body
= gen_rtx_SET (VOIDmode
, mem
, args
[1]);
2451 mems_found
+= record_store (body
, bb_info
);
2453 fprintf (dump_file
, "handling memset as BLKmode store\n");
2454 if (mems_found
== 1)
2456 insn_info
->next_local_store
= active_local_stores
;
2457 active_local_stores
= insn_info
;
2464 /* Every other call, including pure functions, may read memory. */
2465 add_wild_read (bb_info
);
2470 /* Assuming that there are sets in these insns, we cannot delete
2472 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
)
2473 || volatile_refs_p (PATTERN (insn
))
2474 || insn_could_throw_p (insn
)
2475 || (RTX_FRAME_RELATED_P (insn
))
2476 || find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
))
2477 insn_info
->cannot_delete
= true;
2479 body
= PATTERN (insn
);
2480 if (GET_CODE (body
) == PARALLEL
)
2483 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
2484 mems_found
+= record_store (XVECEXP (body
, 0, i
), bb_info
);
2487 mems_found
+= record_store (body
, bb_info
);
2490 fprintf (dump_file
, "mems_found = %d, cannot_delete = %s\n",
2491 mems_found
, insn_info
->cannot_delete
? "true" : "false");
2493 /* If we found some sets of mems, add it into the active_local_stores so
2494 that it can be locally deleted if found dead or used for
2495 replace_read and redundant constant store elimination. Otherwise mark
2496 it as cannot delete. This simplifies the processing later. */
2497 if (mems_found
== 1)
2499 insn_info
->next_local_store
= active_local_stores
;
2500 active_local_stores
= insn_info
;
2503 insn_info
->cannot_delete
= true;
2507 /* Remove BASE from the set of active_local_stores. This is a
2508 callback from cselib that is used to get rid of the stores in
2509 active_local_stores. */
2512 remove_useless_values (cselib_val
*base
)
2514 insn_info_t insn_info
= active_local_stores
;
2515 insn_info_t last
= NULL
;
2519 store_info_t store_info
= insn_info
->store_rec
;
2522 /* If ANY of the store_infos match the cselib group that is
2523 being deleted, then the insn can not be deleted. */
2526 if ((store_info
->group_id
== -1)
2527 && (store_info
->cse_base
== base
))
2532 store_info
= store_info
->next
;
2538 last
->next_local_store
= insn_info
->next_local_store
;
2540 active_local_stores
= insn_info
->next_local_store
;
2541 free_store_info (insn_info
);
2546 insn_info
= insn_info
->next_local_store
;
2551 /* Do all of step 1. */
2557 bitmap regs_live
= BITMAP_ALLOC (NULL
);
2560 all_blocks
= BITMAP_ALLOC (NULL
);
2561 bitmap_set_bit (all_blocks
, ENTRY_BLOCK
);
2562 bitmap_set_bit (all_blocks
, EXIT_BLOCK
);
2567 bb_info_t bb_info
= (bb_info_t
) pool_alloc (bb_info_pool
);
2569 memset (bb_info
, 0, sizeof (struct bb_info
));
2570 bitmap_set_bit (all_blocks
, bb
->index
);
2571 bb_info
->regs_live
= regs_live
;
2573 bitmap_copy (regs_live
, DF_LR_IN (bb
));
2574 df_simulate_initialize_forwards (bb
, regs_live
);
2576 bb_table
[bb
->index
] = bb_info
;
2577 cselib_discard_hook
= remove_useless_values
;
2579 if (bb
->index
>= NUM_FIXED_BLOCKS
)
2584 = create_alloc_pool ("cse_store_info_pool",
2585 sizeof (struct store_info
), 100);
2586 active_local_stores
= NULL
;
2587 cselib_clear_table ();
2589 /* Scan the insns. */
2590 FOR_BB_INSNS (bb
, insn
)
2593 scan_insn (bb_info
, insn
);
2594 cselib_process_insn (insn
);
2596 df_simulate_one_insn_forwards (bb
, insn
, regs_live
);
2599 /* This is something of a hack, because the global algorithm
2600 is supposed to take care of the case where stores go dead
2601 at the end of the function. However, the global
2602 algorithm must take a more conservative view of block
2603 mode reads than the local alg does. So to get the case
2604 where you have a store to the frame followed by a non
2605 overlapping block more read, we look at the active local
2606 stores at the end of the function and delete all of the
2607 frame and spill based ones. */
2608 if (stores_off_frame_dead_at_return
2609 && (EDGE_COUNT (bb
->succs
) == 0
2610 || (single_succ_p (bb
)
2611 && single_succ (bb
) == EXIT_BLOCK_PTR
2612 && ! crtl
->calls_eh_return
)))
2614 insn_info_t i_ptr
= active_local_stores
;
2617 store_info_t store_info
= i_ptr
->store_rec
;
2619 /* Skip the clobbers. */
2620 while (!store_info
->is_set
)
2621 store_info
= store_info
->next
;
2622 if (store_info
->alias_set
&& !i_ptr
->cannot_delete
)
2623 delete_dead_store_insn (i_ptr
);
2625 if (store_info
->group_id
>= 0)
2628 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
2629 if (group
->frame_related
&& !i_ptr
->cannot_delete
)
2630 delete_dead_store_insn (i_ptr
);
2633 i_ptr
= i_ptr
->next_local_store
;
2637 /* Get rid of the loads that were discovered in
2638 replace_read. Cselib is finished with this block. */
2639 while (deferred_change_list
)
2641 deferred_change_t next
= deferred_change_list
->next
;
2643 /* There is no reason to validate this change. That was
2645 *deferred_change_list
->loc
= deferred_change_list
->reg
;
2646 pool_free (deferred_change_pool
, deferred_change_list
);
2647 deferred_change_list
= next
;
2650 /* Get rid of all of the cselib based store_infos in this
2651 block and mark the containing insns as not being
2653 ptr
= bb_info
->last_insn
;
2656 if (ptr
->contains_cselib_groups
)
2658 store_info_t s_info
= ptr
->store_rec
;
2659 while (s_info
&& !s_info
->is_set
)
2660 s_info
= s_info
->next
;
2662 && s_info
->redundant_reason
2663 && s_info
->redundant_reason
->insn
2664 && !ptr
->cannot_delete
)
2667 fprintf (dump_file
, "Locally deleting insn %d "
2668 "because insn %d stores the "
2669 "same value and couldn't be "
2671 INSN_UID (ptr
->insn
),
2672 INSN_UID (s_info
->redundant_reason
->insn
));
2673 delete_dead_store_insn (ptr
);
2676 s_info
->redundant_reason
= NULL
;
2677 free_store_info (ptr
);
2681 store_info_t s_info
;
2683 /* Free at least positions_needed bitmaps. */
2684 for (s_info
= ptr
->store_rec
; s_info
; s_info
= s_info
->next
)
2685 if (s_info
->is_large
)
2687 BITMAP_FREE (s_info
->positions_needed
.large
.bmap
);
2688 s_info
->is_large
= false;
2691 ptr
= ptr
->prev_insn
;
2694 free_alloc_pool (cse_store_info_pool
);
2696 bb_info
->regs_live
= NULL
;
2699 BITMAP_FREE (regs_live
);
2701 htab_empty (rtx_group_table
);
2705 /*----------------------------------------------------------------------------
2708 Assign each byte position in the stores that we are going to
2709 analyze globally to a position in the bitmaps. Returns true if
2710 there are any bit positions assigned.
2711 ----------------------------------------------------------------------------*/
2714 dse_step2_init (void)
2719 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
2721 /* For all non stack related bases, we only consider a store to
2722 be deletable if there are two or more stores for that
2723 position. This is because it takes one store to make the
2724 other store redundant. However, for the stores that are
2725 stack related, we consider them if there is only one store
2726 for the position. We do this because the stack related
2727 stores can be deleted if their is no read between them and
2728 the end of the function.
2730 To make this work in the current framework, we take the stack
2731 related bases add all of the bits from store1 into store2.
2732 This has the effect of making the eligible even if there is
2735 if (stores_off_frame_dead_at_return
&& group
->frame_related
)
2737 bitmap_ior_into (group
->store2_n
, group
->store1_n
);
2738 bitmap_ior_into (group
->store2_p
, group
->store1_p
);
2740 fprintf (dump_file
, "group %d is frame related ", i
);
2743 group
->offset_map_size_n
++;
2744 group
->offset_map_n
= XNEWVEC (int, group
->offset_map_size_n
);
2745 group
->offset_map_size_p
++;
2746 group
->offset_map_p
= XNEWVEC (int, group
->offset_map_size_p
);
2747 group
->process_globally
= false;
2750 fprintf (dump_file
, "group %d(%d+%d): ", i
,
2751 (int)bitmap_count_bits (group
->store2_n
),
2752 (int)bitmap_count_bits (group
->store2_p
));
2753 bitmap_print (dump_file
, group
->store2_n
, "n ", " ");
2754 bitmap_print (dump_file
, group
->store2_p
, "p ", "\n");
2760 /* Init the offset tables for the normal case. */
2763 dse_step2_nospill (void)
2767 /* Position 0 is unused because 0 is used in the maps to mean
2769 current_position
= 1;
2771 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
2776 if (group
== clear_alias_group
)
2779 memset (group
->offset_map_n
, 0, sizeof(int) * group
->offset_map_size_n
);
2780 memset (group
->offset_map_p
, 0, sizeof(int) * group
->offset_map_size_p
);
2781 bitmap_clear (group
->group_kill
);
2783 EXECUTE_IF_SET_IN_BITMAP (group
->store2_n
, 0, j
, bi
)
2785 bitmap_set_bit (group
->group_kill
, current_position
);
2786 group
->offset_map_n
[j
] = current_position
++;
2787 group
->process_globally
= true;
2789 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
2791 bitmap_set_bit (group
->group_kill
, current_position
);
2792 group
->offset_map_p
[j
] = current_position
++;
2793 group
->process_globally
= true;
2796 return current_position
!= 1;
2800 /* Init the offset tables for the spill case. */
2803 dse_step2_spill (void)
2806 group_info_t group
= clear_alias_group
;
2809 /* Position 0 is unused because 0 is used in the maps to mean
2811 current_position
= 1;
2815 bitmap_print (dump_file
, clear_alias_sets
,
2816 "clear alias sets ", "\n");
2817 bitmap_print (dump_file
, disqualified_clear_alias_sets
,
2818 "disqualified clear alias sets ", "\n");
2821 memset (group
->offset_map_n
, 0, sizeof(int) * group
->offset_map_size_n
);
2822 memset (group
->offset_map_p
, 0, sizeof(int) * group
->offset_map_size_p
);
2823 bitmap_clear (group
->group_kill
);
2825 /* Remove the disqualified positions from the store2_p set. */
2826 bitmap_and_compl_into (group
->store2_p
, disqualified_clear_alias_sets
);
2828 /* We do not need to process the store2_n set because
2829 alias_sets are always positive. */
2830 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
2832 bitmap_set_bit (group
->group_kill
, current_position
);
2833 group
->offset_map_p
[j
] = current_position
++;
2834 group
->process_globally
= true;
2837 return current_position
!= 1;
2842 /*----------------------------------------------------------------------------
2845 Build the bit vectors for the transfer functions.
2846 ----------------------------------------------------------------------------*/
2849 /* Note that this is NOT a general purpose function. Any mem that has
2850 an alias set registered here expected to be COMPLETELY unaliased:
2851 i.e it's addresses are not and need not be examined.
2853 It is known that all references to this address will have this
2854 alias set and there are NO other references to this address in the
2857 Currently the only place that is known to be clean enough to use
2858 this interface is the code that assigns the spill locations.
2860 All of the mems that have alias_sets registered are subjected to a
2861 very powerful form of dse where function calls, volatile reads and
2862 writes, and reads from random location are not taken into account.
2864 It is also assumed that these locations go dead when the function
2865 returns. This assumption could be relaxed if there were found to
2866 be places that this assumption was not correct.
2868 The MODE is passed in and saved. The mode of each load or store to
2869 a mem with ALIAS_SET is checked against MEM. If the size of that
2870 load or store is different from MODE, processing is halted on this
2871 alias set. For the vast majority of aliases sets, all of the loads
2872 and stores will use the same mode. But vectors are treated
2873 differently: the alias set is established for the entire vector,
2874 but reload will insert loads and stores for individual elements and
2875 we do not necessarily have the information to track those separate
2876 elements. So when we see a mode mismatch, we just bail. */
2880 dse_record_singleton_alias_set (alias_set_type alias_set
,
2881 enum machine_mode mode
)
2883 struct clear_alias_mode_holder tmp_holder
;
2884 struct clear_alias_mode_holder
*entry
;
2887 /* If we are not going to run dse, we need to return now or there
2888 will be problems with allocating the bitmaps. */
2889 if ((!gate_dse()) || !alias_set
)
2892 if (!clear_alias_sets
)
2894 clear_alias_sets
= BITMAP_ALLOC (NULL
);
2895 disqualified_clear_alias_sets
= BITMAP_ALLOC (NULL
);
2896 clear_alias_mode_table
= htab_create (11, clear_alias_mode_hash
,
2897 clear_alias_mode_eq
, NULL
);
2898 clear_alias_mode_pool
= create_alloc_pool ("clear_alias_mode_pool",
2899 sizeof (struct clear_alias_mode_holder
), 100);
2902 bitmap_set_bit (clear_alias_sets
, alias_set
);
2904 tmp_holder
.alias_set
= alias_set
;
2906 slot
= htab_find_slot (clear_alias_mode_table
, &tmp_holder
, INSERT
);
2907 gcc_assert (*slot
== NULL
);
2910 (struct clear_alias_mode_holder
*) pool_alloc (clear_alias_mode_pool
);
2911 entry
->alias_set
= alias_set
;
2916 /* Remove ALIAS_SET from the sets of stack slots being considered. */
2919 dse_invalidate_singleton_alias_set (alias_set_type alias_set
)
2921 if ((!gate_dse()) || !alias_set
)
2924 bitmap_clear_bit (clear_alias_sets
, alias_set
);
2928 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2932 get_bitmap_index (group_info_t group_info
, HOST_WIDE_INT offset
)
2936 HOST_WIDE_INT offset_p
= -offset
;
2937 if (offset_p
>= group_info
->offset_map_size_n
)
2939 return group_info
->offset_map_n
[offset_p
];
2943 if (offset
>= group_info
->offset_map_size_p
)
2945 return group_info
->offset_map_p
[offset
];
2950 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2954 scan_stores_nospill (store_info_t store_info
, bitmap gen
, bitmap kill
)
2959 group_info_t group_info
2960 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
2961 if (group_info
->process_globally
)
2962 for (i
= store_info
->begin
; i
< store_info
->end
; i
++)
2964 int index
= get_bitmap_index (group_info
, i
);
2967 bitmap_set_bit (gen
, index
);
2969 bitmap_clear_bit (kill
, index
);
2972 store_info
= store_info
->next
;
2977 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2981 scan_stores_spill (store_info_t store_info
, bitmap gen
, bitmap kill
)
2985 if (store_info
->alias_set
)
2987 int index
= get_bitmap_index (clear_alias_group
,
2988 store_info
->alias_set
);
2991 bitmap_set_bit (gen
, index
);
2993 bitmap_clear_bit (kill
, index
);
2996 store_info
= store_info
->next
;
3001 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3005 scan_reads_nospill (insn_info_t insn_info
, bitmap gen
, bitmap kill
)
3007 read_info_t read_info
= insn_info
->read_rec
;
3011 /* If this insn reads the frame, kill all the frame related stores. */
3012 if (insn_info
->frame_read
)
3014 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3015 if (group
->process_globally
&& group
->frame_related
)
3018 bitmap_ior_into (kill
, group
->group_kill
);
3019 bitmap_and_compl_into (gen
, group
->group_kill
);
3025 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3027 if (group
->process_globally
)
3029 if (i
== read_info
->group_id
)
3031 if (read_info
->begin
> read_info
->end
)
3033 /* Begin > end for block mode reads. */
3035 bitmap_ior_into (kill
, group
->group_kill
);
3036 bitmap_and_compl_into (gen
, group
->group_kill
);
3040 /* The groups are the same, just process the
3043 for (j
= read_info
->begin
; j
< read_info
->end
; j
++)
3045 int index
= get_bitmap_index (group
, j
);
3049 bitmap_set_bit (kill
, index
);
3050 bitmap_clear_bit (gen
, index
);
3057 /* The groups are different, if the alias sets
3058 conflict, clear the entire group. We only need
3059 to apply this test if the read_info is a cselib
3060 read. Anything with a constant base cannot alias
3061 something else with a different constant
3063 if ((read_info
->group_id
< 0)
3064 && canon_true_dependence (group
->base_mem
,
3065 GET_MODE (group
->base_mem
),
3066 group
->canon_base_addr
,
3067 read_info
->mem
, NULL_RTX
,
3071 bitmap_ior_into (kill
, group
->group_kill
);
3072 bitmap_and_compl_into (gen
, group
->group_kill
);
3078 read_info
= read_info
->next
;
3082 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3086 scan_reads_spill (read_info_t read_info
, bitmap gen
, bitmap kill
)
3090 if (read_info
->alias_set
)
3092 int index
= get_bitmap_index (clear_alias_group
,
3093 read_info
->alias_set
);
3097 bitmap_set_bit (kill
, index
);
3098 bitmap_clear_bit (gen
, index
);
3102 read_info
= read_info
->next
;
3107 /* Return the insn in BB_INFO before the first wild read or if there
3108 are no wild reads in the block, return the last insn. */
3111 find_insn_before_first_wild_read (bb_info_t bb_info
)
3113 insn_info_t insn_info
= bb_info
->last_insn
;
3114 insn_info_t last_wild_read
= NULL
;
3118 if (insn_info
->wild_read
)
3120 last_wild_read
= insn_info
->prev_insn
;
3121 /* Block starts with wild read. */
3122 if (!last_wild_read
)
3126 insn_info
= insn_info
->prev_insn
;
3130 return last_wild_read
;
3132 return bb_info
->last_insn
;
3136 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3137 the block in order to build the gen and kill sets for the block.
3138 We start at ptr which may be the last insn in the block or may be
3139 the first insn with a wild read. In the latter case we are able to
3140 skip the rest of the block because it just does not matter:
3141 anything that happens is hidden by the wild read. */
3144 dse_step3_scan (bool for_spills
, basic_block bb
)
3146 bb_info_t bb_info
= bb_table
[bb
->index
];
3147 insn_info_t insn_info
;
3150 /* There are no wild reads in the spill case. */
3151 insn_info
= bb_info
->last_insn
;
3153 insn_info
= find_insn_before_first_wild_read (bb_info
);
3155 /* In the spill case or in the no_spill case if there is no wild
3156 read in the block, we will need a kill set. */
3157 if (insn_info
== bb_info
->last_insn
)
3160 bitmap_clear (bb_info
->kill
);
3162 bb_info
->kill
= BITMAP_ALLOC (NULL
);
3166 BITMAP_FREE (bb_info
->kill
);
3170 /* There may have been code deleted by the dce pass run before
3172 if (insn_info
->insn
&& INSN_P (insn_info
->insn
))
3174 /* Process the read(s) last. */
3177 scan_stores_spill (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3178 scan_reads_spill (insn_info
->read_rec
, bb_info
->gen
, bb_info
->kill
);
3182 scan_stores_nospill (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3183 scan_reads_nospill (insn_info
, bb_info
->gen
, bb_info
->kill
);
3187 insn_info
= insn_info
->prev_insn
;
3192 /* Set the gen set of the exit block, and also any block with no
3193 successors that does not have a wild read. */
3196 dse_step3_exit_block_scan (bb_info_t bb_info
)
3198 /* The gen set is all 0's for the exit block except for the
3199 frame_pointer_group. */
3201 if (stores_off_frame_dead_at_return
)
3206 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3208 if (group
->process_globally
&& group
->frame_related
)
3209 bitmap_ior_into (bb_info
->gen
, group
->group_kill
);
3215 /* Find all of the blocks that are not backwards reachable from the
3216 exit block or any block with no successors (BB). These are the
3217 infinite loops or infinite self loops. These blocks will still
3218 have their bits set in UNREACHABLE_BLOCKS. */
3221 mark_reachable_blocks (sbitmap unreachable_blocks
, basic_block bb
)
3226 if (TEST_BIT (unreachable_blocks
, bb
->index
))
3228 RESET_BIT (unreachable_blocks
, bb
->index
);
3229 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3231 mark_reachable_blocks (unreachable_blocks
, e
->src
);
3236 /* Build the transfer functions for the function. */
3239 dse_step3 (bool for_spills
)
3242 sbitmap unreachable_blocks
= sbitmap_alloc (last_basic_block
);
3243 sbitmap_iterator sbi
;
3244 bitmap all_ones
= NULL
;
3247 sbitmap_ones (unreachable_blocks
);
3251 bb_info_t bb_info
= bb_table
[bb
->index
];
3253 bitmap_clear (bb_info
->gen
);
3255 bb_info
->gen
= BITMAP_ALLOC (NULL
);
3257 if (bb
->index
== ENTRY_BLOCK
)
3259 else if (bb
->index
== EXIT_BLOCK
)
3260 dse_step3_exit_block_scan (bb_info
);
3262 dse_step3_scan (for_spills
, bb
);
3263 if (EDGE_COUNT (bb
->succs
) == 0)
3264 mark_reachable_blocks (unreachable_blocks
, bb
);
3266 /* If this is the second time dataflow is run, delete the old
3269 BITMAP_FREE (bb_info
->in
);
3271 BITMAP_FREE (bb_info
->out
);
3274 /* For any block in an infinite loop, we must initialize the out set
3275 to all ones. This could be expensive, but almost never occurs in
3276 practice. However, it is common in regression tests. */
3277 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks
, 0, i
, sbi
)
3279 if (bitmap_bit_p (all_blocks
, i
))
3281 bb_info_t bb_info
= bb_table
[i
];
3287 all_ones
= BITMAP_ALLOC (NULL
);
3288 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, j
, group
)
3289 bitmap_ior_into (all_ones
, group
->group_kill
);
3293 bb_info
->out
= BITMAP_ALLOC (NULL
);
3294 bitmap_copy (bb_info
->out
, all_ones
);
3300 BITMAP_FREE (all_ones
);
3301 sbitmap_free (unreachable_blocks
);
3306 /*----------------------------------------------------------------------------
3309 Solve the bitvector equations.
3310 ----------------------------------------------------------------------------*/
3313 /* Confluence function for blocks with no successors. Create an out
3314 set from the gen set of the exit block. This block logically has
3315 the exit block as a successor. */
3320 dse_confluence_0 (basic_block bb
)
3322 bb_info_t bb_info
= bb_table
[bb
->index
];
3324 if (bb
->index
== EXIT_BLOCK
)
3329 bb_info
->out
= BITMAP_ALLOC (NULL
);
3330 bitmap_copy (bb_info
->out
, bb_table
[EXIT_BLOCK
]->gen
);
3334 /* Propagate the information from the in set of the dest of E to the
3335 out set of the src of E. If the various in or out sets are not
3336 there, that means they are all ones. */
3339 dse_confluence_n (edge e
)
3341 bb_info_t src_info
= bb_table
[e
->src
->index
];
3342 bb_info_t dest_info
= bb_table
[e
->dest
->index
];
3347 bitmap_and_into (src_info
->out
, dest_info
->in
);
3350 src_info
->out
= BITMAP_ALLOC (NULL
);
3351 bitmap_copy (src_info
->out
, dest_info
->in
);
3358 /* Propagate the info from the out to the in set of BB_INDEX's basic
3359 block. There are three cases:
3361 1) The block has no kill set. In this case the kill set is all
3362 ones. It does not matter what the out set of the block is, none of
3363 the info can reach the top. The only thing that reaches the top is
3364 the gen set and we just copy the set.
3366 2) There is a kill set but no out set and bb has successors. In
3367 this case we just return. Eventually an out set will be created and
3368 it is better to wait than to create a set of ones.
3370 3) There is both a kill and out set. We apply the obvious transfer
3375 dse_transfer_function (int bb_index
)
3377 bb_info_t bb_info
= bb_table
[bb_index
];
3385 return bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3386 bb_info
->out
, bb_info
->kill
);
3389 bb_info
->in
= BITMAP_ALLOC (NULL
);
3390 bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3391 bb_info
->out
, bb_info
->kill
);
3401 /* Case 1 above. If there is already an in set, nothing
3407 bb_info
->in
= BITMAP_ALLOC (NULL
);
3408 bitmap_copy (bb_info
->in
, bb_info
->gen
);
3414 /* Solve the dataflow equations. */
3419 df_simple_dataflow (DF_BACKWARD
, NULL
, dse_confluence_0
,
3420 dse_confluence_n
, dse_transfer_function
,
3421 all_blocks
, df_get_postorder (DF_BACKWARD
),
3422 df_get_n_blocks (DF_BACKWARD
));
3427 fprintf (dump_file
, "\n\n*** Global dataflow info after analysis.\n");
3430 bb_info_t bb_info
= bb_table
[bb
->index
];
3432 df_print_bb_index (bb
, dump_file
);
3434 bitmap_print (dump_file
, bb_info
->in
, " in: ", "\n");
3436 fprintf (dump_file
, " in: *MISSING*\n");
3438 bitmap_print (dump_file
, bb_info
->gen
, " gen: ", "\n");
3440 fprintf (dump_file
, " gen: *MISSING*\n");
3442 bitmap_print (dump_file
, bb_info
->kill
, " kill: ", "\n");
3444 fprintf (dump_file
, " kill: *MISSING*\n");
3446 bitmap_print (dump_file
, bb_info
->out
, " out: ", "\n");
3448 fprintf (dump_file
, " out: *MISSING*\n\n");
3455 /*----------------------------------------------------------------------------
3458 Delete the stores that can only be deleted using the global information.
3459 ----------------------------------------------------------------------------*/
3463 dse_step5_nospill (void)
3468 bb_info_t bb_info
= bb_table
[bb
->index
];
3469 insn_info_t insn_info
= bb_info
->last_insn
;
3470 bitmap v
= bb_info
->out
;
3474 bool deleted
= false;
3475 if (dump_file
&& insn_info
->insn
)
3477 fprintf (dump_file
, "starting to process insn %d\n",
3478 INSN_UID (insn_info
->insn
));
3479 bitmap_print (dump_file
, v
, " v: ", "\n");
3482 /* There may have been code deleted by the dce pass run before
3485 && INSN_P (insn_info
->insn
)
3486 && (!insn_info
->cannot_delete
)
3487 && (!bitmap_empty_p (v
)))
3489 store_info_t store_info
= insn_info
->store_rec
;
3491 /* Try to delete the current insn. */
3494 /* Skip the clobbers. */
3495 while (!store_info
->is_set
)
3496 store_info
= store_info
->next
;
3498 if (store_info
->alias_set
)
3503 group_info_t group_info
3504 = VEC_index (group_info_t
, rtx_group_vec
, store_info
->group_id
);
3506 for (i
= store_info
->begin
; i
< store_info
->end
; i
++)
3508 int index
= get_bitmap_index (group_info
, i
);
3511 fprintf (dump_file
, "i = %d, index = %d\n", (int)i
, index
);
3512 if (index
== 0 || !bitmap_bit_p (v
, index
))
3515 fprintf (dump_file
, "failing at i = %d\n", (int)i
);
3525 check_for_inc_dec (insn_info
->insn
);
3526 delete_insn (insn_info
->insn
);
3527 insn_info
->insn
= NULL
;
3532 /* We do want to process the local info if the insn was
3533 deleted. For instance, if the insn did a wild read, we
3534 no longer need to trash the info. */
3536 && INSN_P (insn_info
->insn
)
3539 scan_stores_nospill (insn_info
->store_rec
, v
, NULL
);
3540 if (insn_info
->wild_read
)
3543 fprintf (dump_file
, "wild read\n");
3546 else if (insn_info
->read_rec
)
3549 fprintf (dump_file
, "regular read\n");
3550 scan_reads_nospill (insn_info
, v
, NULL
);
3554 insn_info
= insn_info
->prev_insn
;
3561 dse_step5_spill (void)
3566 bb_info_t bb_info
= bb_table
[bb
->index
];
3567 insn_info_t insn_info
= bb_info
->last_insn
;
3568 bitmap v
= bb_info
->out
;
3572 bool deleted
= false;
3573 /* There may have been code deleted by the dce pass run before
3576 && INSN_P (insn_info
->insn
)
3577 && (!insn_info
->cannot_delete
)
3578 && (!bitmap_empty_p (v
)))
3580 /* Try to delete the current insn. */
3581 store_info_t store_info
= insn_info
->store_rec
;
3586 if (store_info
->alias_set
)
3588 int index
= get_bitmap_index (clear_alias_group
,
3589 store_info
->alias_set
);
3590 if (index
== 0 || !bitmap_bit_p (v
, index
))
3598 store_info
= store_info
->next
;
3600 if (deleted
&& dbg_cnt (dse
))
3603 fprintf (dump_file
, "Spill deleting insn %d\n",
3604 INSN_UID (insn_info
->insn
));
3605 check_for_inc_dec (insn_info
->insn
);
3606 delete_insn (insn_info
->insn
);
3608 insn_info
->insn
= NULL
;
3613 && INSN_P (insn_info
->insn
)
3616 scan_stores_spill (insn_info
->store_rec
, v
, NULL
);
3617 scan_reads_spill (insn_info
->read_rec
, v
, NULL
);
3620 insn_info
= insn_info
->prev_insn
;
3627 /*----------------------------------------------------------------------------
3630 Delete stores made redundant by earlier stores (which store the same
3631 value) that couldn't be eliminated.
3632 ----------------------------------------------------------------------------*/
3641 bb_info_t bb_info
= bb_table
[bb
->index
];
3642 insn_info_t insn_info
= bb_info
->last_insn
;
3646 /* There may have been code deleted by the dce pass run before
3649 && INSN_P (insn_info
->insn
)
3650 && !insn_info
->cannot_delete
)
3652 store_info_t s_info
= insn_info
->store_rec
;
3654 while (s_info
&& !s_info
->is_set
)
3655 s_info
= s_info
->next
;
3657 && s_info
->redundant_reason
3658 && s_info
->redundant_reason
->insn
3659 && INSN_P (s_info
->redundant_reason
->insn
))
3661 rtx rinsn
= s_info
->redundant_reason
->insn
;
3663 fprintf (dump_file
, "Locally deleting insn %d "
3664 "because insn %d stores the "
3665 "same value and couldn't be "
3667 INSN_UID (insn_info
->insn
),
3669 delete_dead_store_insn (insn_info
);
3672 insn_info
= insn_info
->prev_insn
;
3677 /*----------------------------------------------------------------------------
3680 Destroy everything left standing.
3681 ----------------------------------------------------------------------------*/
3684 dse_step7 (bool global_done
)
3690 FOR_EACH_VEC_ELT (group_info_t
, rtx_group_vec
, i
, group
)
3692 free (group
->offset_map_n
);
3693 free (group
->offset_map_p
);
3694 BITMAP_FREE (group
->store1_n
);
3695 BITMAP_FREE (group
->store1_p
);
3696 BITMAP_FREE (group
->store2_n
);
3697 BITMAP_FREE (group
->store2_p
);
3698 BITMAP_FREE (group
->group_kill
);
3704 bb_info_t bb_info
= bb_table
[bb
->index
];
3705 BITMAP_FREE (bb_info
->gen
);
3707 BITMAP_FREE (bb_info
->kill
);
3709 BITMAP_FREE (bb_info
->in
);
3711 BITMAP_FREE (bb_info
->out
);
3714 if (clear_alias_sets
)
3716 BITMAP_FREE (clear_alias_sets
);
3717 BITMAP_FREE (disqualified_clear_alias_sets
);
3718 free_alloc_pool (clear_alias_mode_pool
);
3719 htab_delete (clear_alias_mode_table
);
3722 end_alias_analysis ();
3724 htab_delete (rtx_group_table
);
3725 VEC_free (group_info_t
, heap
, rtx_group_vec
);
3726 BITMAP_FREE (all_blocks
);
3727 BITMAP_FREE (scratch
);
3729 free_alloc_pool (rtx_store_info_pool
);
3730 free_alloc_pool (read_info_pool
);
3731 free_alloc_pool (insn_info_pool
);
3732 free_alloc_pool (bb_info_pool
);
3733 free_alloc_pool (rtx_group_info_pool
);
3734 free_alloc_pool (deferred_change_pool
);
3738 /* -------------------------------------------------------------------------
3740 ------------------------------------------------------------------------- */
3742 /* Callback for running pass_rtl_dse. */
3745 rest_of_handle_dse (void)
3747 bool did_global
= false;
3749 df_set_flags (DF_DEFER_INSN_RESCAN
);
3751 /* Need the notes since we must track live hardregs in the forwards
3753 df_note_add_problem ();
3759 if (dse_step2_nospill ())
3761 df_set_flags (DF_LR_RUN_DCE
);
3765 fprintf (dump_file
, "doing global processing\n");
3768 dse_step5_nospill ();
3771 /* For the instance of dse that runs after reload, we make a special
3772 pass to process the spills. These are special in that they are
3773 totally transparent, i.e, there is no aliasing issues that need
3774 to be considered. This means that the wild reads that kill
3775 everything else do not apply here. */
3776 if (clear_alias_sets
&& dse_step2_spill ())
3780 df_set_flags (DF_LR_RUN_DCE
);
3785 fprintf (dump_file
, "doing global spill processing\n");
3792 dse_step7 (did_global
);
3795 fprintf (dump_file
, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3796 locally_deleted
, globally_deleted
, spill_deleted
);
3803 return gate_dse1 () || gate_dse2 ();
3809 return optimize
> 0 && flag_dse
3816 return optimize
> 0 && flag_dse
3820 struct rtl_opt_pass pass_rtl_dse1
=
3825 gate_dse1
, /* gate */
3826 rest_of_handle_dse
, /* execute */
3829 0, /* static_pass_number */
3830 TV_DSE1
, /* tv_id */
3831 0, /* properties_required */
3832 0, /* properties_provided */
3833 0, /* properties_destroyed */
3834 0, /* todo_flags_start */
3836 TODO_df_finish
| TODO_verify_rtl_sharing
|
3837 TODO_ggc_collect
/* todo_flags_finish */
3841 struct rtl_opt_pass pass_rtl_dse2
=
3846 gate_dse2
, /* gate */
3847 rest_of_handle_dse
, /* execute */
3850 0, /* static_pass_number */
3851 TV_DSE2
, /* tv_id */
3852 0, /* properties_required */
3853 0, /* properties_provided */
3854 0, /* properties_destroyed */
3855 0, /* todo_flags_start */
3857 TODO_df_finish
| TODO_verify_rtl_sharing
|
3858 TODO_ggc_collect
/* todo_flags_finish */