1 /* RTL dead store elimination.
2 Copyright (C) 2005-2024 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
37 #include "gimple-ssa.h"
43 #include "stor-layout.h"
46 #include "tree-pass.h"
51 #include "cfgcleanup.h"
54 /* This file contains three techniques for performing Dead Store
57 * The first technique performs dse locally on any base address. It
58 is based on the cselib which is a local value numbering technique.
59 This technique is local to a basic block but deals with a fairly
62 * The second technique performs dse globally but is restricted to
63 base addresses that are either constant or are relative to the
66 * The third technique, (which is only done after register allocation)
67 processes the spill slots. This differs from the second
68 technique because it takes advantage of the fact that spilling is
69 completely free from the effects of aliasing.
71 Logically, dse is a backwards dataflow problem. A store can be
72 deleted if it if cannot be reached in the backward direction by any
73 use of the value being stored. However, the local technique uses a
74 forwards scan of the basic block because cselib requires that the
75 block be processed in that order.
77 The pass is logically broken into 7 steps:
81 1) The local algorithm, as well as scanning the insns for the two
84 2) Analysis to see if the global algs are necessary. In the case
85 of stores base on a constant address, there must be at least two
86 stores to that address, to make it possible to delete some of the
87 stores. In the case of stores off of the frame or spill related
88 stores, only one store to an address is necessary because those
89 stores die at the end of the function.
91 3) Set up the global dataflow equations based on processing the
92 info parsed in the first step.
94 4) Solve the dataflow equations.
96 5) Delete the insns that the global analysis has indicated are
99 6) Delete insns that store the same value as preceding store
100 where the earlier store couldn't be eliminated.
104 This step uses cselib and canon_rtx to build the largest expression
105 possible for each address. This pass is a forwards pass through
106 each basic block. From the point of view of the global technique,
107 the first pass could examine a block in either direction. The
108 forwards ordering is to accommodate cselib.
110 We make a simplifying assumption: addresses fall into four broad
113 1) base has rtx_varies_p == false, offset is constant.
114 2) base has rtx_varies_p == false, offset variable.
115 3) base has rtx_varies_p == true, offset constant.
116 4) base has rtx_varies_p == true, offset variable.
118 The local passes are able to process all 4 kinds of addresses. The
119 global pass only handles 1).
121 The global problem is formulated as follows:
123 A store, S1, to address A, where A is not relative to the stack
124 frame, can be eliminated if all paths from S1 to the end of the
125 function contain another store to A before a read to A.
127 If the address A is relative to the stack frame, a store S2 to A
128 can be eliminated if there are no paths from S2 that reach the
129 end of the function that read A before another store to A. In
130 this case S2 can be deleted if there are paths from S2 to the
131 end of the function that have no reads or writes to A. This
132 second case allows stores to the stack frame to be deleted that
133 would otherwise die when the function returns. This cannot be
134 done if stores_off_frame_dead_at_return is not true. See the doc
135 for that variable for when this variable is false.
137 The global problem is formulated as a backwards set union
138 dataflow problem where the stores are the gens and reads are the
139 kills. Set union problems are rare and require some special
140 handling given our representation of bitmaps. A straightforward
141 implementation requires a lot of bitmaps filled with 1s.
142 These are expensive and cumbersome in our bitmap formulation so
143 care has been taken to avoid large vectors filled with 1s. See
144 the comments in bb_info and in the dataflow confluence functions
147 There are two places for further enhancements to this algorithm:
149 1) The original dse which was embedded in a pass called flow also
150 did local address forwarding. For example in
155 flow would replace the right hand side of the second insn with a
156 reference to r100. Most of the information is available to add this
157 to this pass. It has not done it because it is a lot of work in
158 the case that either r100 is assigned to between the first and
159 second insn and/or the second insn is a load of part of the value
160 stored by the first insn.
162 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
167 2) The cleaning up of spill code is quite profitable. It currently
168 depends on reading tea leaves and chicken entrails left by reload.
169 This pass depends on reload creating a singleton alias set for each
170 spill slot and telling the next dse pass which of these alias sets
171 are the singletons. Rather than analyze the addresses of the
172 spills, dse's spill processing just does analysis of the loads and
173 stores that use those alias sets. There are three cases where this
176 a) Reload sometimes creates the slot for one mode of access, and
177 then inserts loads and/or stores for a smaller mode. In this
178 case, the current code just punts on the slot. The proper thing
179 to do is to back out and use one bit vector position for each
180 byte of the entity associated with the slot. This depends on
181 KNOWING that reload always generates the accesses for each of the
182 bytes in some canonical (read that easy to understand several
183 passes after reload happens) way.
185 b) Reload sometimes decides that spill slot it allocated was not
186 large enough for the mode and goes back and allocates more slots
187 with the same mode and alias set. The backout in this case is a
188 little more graceful than (a). In this case the slot is unmarked
189 as being a spill slot and if final address comes out to be based
190 off the frame pointer, the global algorithm handles this slot.
192 c) For any pass that may prespill, there is currently no
193 mechanism to tell the dse pass that the slot being used has the
194 special properties that reload uses. It may be that all that is
195 required is to have those passes make the same calls that reload
196 does, assuming that the alias sets can be manipulated in the same
199 /* There are limits to the size of constant offsets we model for the
200 global problem. There are certainly test cases, that exceed this
201 limit, however, it is unlikely that there are important programs
202 that really have constant offsets this size. */
203 #define MAX_OFFSET (64 * 1024)
205 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
206 on the default obstack because these bitmaps can grow quite large
207 (~2GB for the small (!) test case of PR54146) and we'll hold on to
208 all that memory until the end of the compiler run.
209 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
210 releasing the whole obstack. */
211 static bitmap_obstack dse_bitmap_obstack
;
213 /* Obstack for other data. As for above: Kinda nice to be able to
214 throw it all away at the end in one big sweep. */
215 static struct obstack dse_obstack
;
217 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
218 static bitmap scratch
= NULL
;
220 struct insn_info_type
;
222 /* This structure holds information about a candidate store. */
227 /* False means this is a clobber. */
230 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
233 /* The id of the mem group of the base address. If rtx_varies_p is
234 true, this is -1. Otherwise, it is the index into the group
238 /* This is the cselib value. */
239 cselib_val
*cse_base
;
241 /* This canonized mem. */
244 /* Canonized MEM address for use by canon_true_dependence. */
247 /* The offset of the first byte associated with the operation. */
250 /* The number of bytes covered by the operation. This is always exact
251 and known (rather than -1). */
254 /* The address space that the memory reference uses. */
255 unsigned char addrspace
;
259 /* A bitmask as wide as the number of bytes in the word that
260 contains a 1 if the byte may be needed. The store is unused if
261 all of the bits are 0. This is used if IS_LARGE is false. */
262 unsigned HOST_WIDE_INT small_bitmask
;
266 /* A bitmap with one bit per byte, or null if the number of
267 bytes isn't known at compile time. A cleared bit means
268 the position is needed. Used if IS_LARGE is true. */
271 /* When BITMAP is nonnull, this counts the number of set bits
272 (i.e. unneeded bytes) in the bitmap. If it is equal to
273 WIDTH, the whole store is unused.
276 - the store is definitely not needed when COUNT == 1
277 - all the store is needed when COUNT == 0 and RHS is nonnull
278 - otherwise we don't know which parts of the store are needed. */
283 /* The next store info for this insn. */
284 class store_info
*next
;
286 /* The right hand side of the store. This is used if there is a
287 subsequent reload of the mems address somewhere later in the
291 /* If rhs is or holds a constant, this contains that constant,
295 /* Set if this store stores the same constant value as REDUNDANT_REASON
296 insn stored. These aren't eliminated early, because doing that
297 might prevent the earlier larger store to be eliminated. */
298 struct insn_info_type
*redundant_reason
;
301 /* Return a bitmask with the first N low bits set. */
303 static unsigned HOST_WIDE_INT
304 lowpart_bitmask (int n
)
306 unsigned HOST_WIDE_INT mask
= HOST_WIDE_INT_M1U
;
307 return mask
>> (HOST_BITS_PER_WIDE_INT
- n
);
310 static object_allocator
<store_info
> cse_store_info_pool ("cse_store_info_pool");
312 static object_allocator
<store_info
> rtx_store_info_pool ("rtx_store_info_pool");
314 /* This structure holds information about a load. These are only
315 built for rtx bases. */
319 /* The id of the mem group of the base address. */
322 /* The offset of the first byte associated with the operation. */
325 /* The number of bytes covered by the operation, or -1 if not known. */
328 /* The mem being read. */
331 /* The next read_info for this insn. */
332 class read_info_type
*next
;
334 typedef class read_info_type
*read_info_t
;
336 static object_allocator
<read_info_type
> read_info_type_pool ("read_info_pool");
338 /* One of these records is created for each insn. */
340 struct insn_info_type
342 /* Set true if the insn contains a store but the insn itself cannot
343 be deleted. This is set if the insn is a parallel and there is
344 more than one non dead output or if the insn is in some way
348 /* This field is only used by the global algorithm. It is set true
349 if the insn contains any read of mem except for a (1). This is
350 also set if the insn is a call or has a clobber mem. If the insn
351 contains a wild read, the use_rec will be null. */
354 /* This is true only for CALL instructions which could potentially read
355 any non-frame memory location. This field is used by the global
357 bool non_frame_wild_read
;
359 /* This field is only used for the processing of const functions.
360 These functions cannot read memory, but they can read the stack
361 because that is where they may get their parms. We need to be
362 this conservative because, like the store motion pass, we don't
363 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
364 Moreover, we need to distinguish two cases:
365 1. Before reload (register elimination), the stores related to
366 outgoing arguments are stack pointer based and thus deemed
367 of non-constant base in this pass. This requires special
368 handling but also means that the frame pointer based stores
369 need not be killed upon encountering a const function call.
370 2. After reload, the stores related to outgoing arguments can be
371 either stack pointer or hard frame pointer based. This means
372 that we have no other choice than also killing all the frame
373 pointer based stores upon encountering a const function call.
374 This field is set after reload for const function calls and before
375 reload for const tail function calls on targets where arg pointer
376 is the frame pointer. Having this set is less severe than a wild
377 read, it just means that all the frame related stores are killed
378 rather than all the stores. */
381 /* This field is only used for the processing of const functions.
382 It is set if the insn may contain a stack pointer based store. */
383 bool stack_pointer_based
;
385 /* This is true if any of the sets within the store contains a
386 cselib base. Such stores can only be deleted by the local
388 bool contains_cselib_groups
;
393 /* The list of mem sets or mem clobbers that are contained in this
394 insn. If the insn is deletable, it contains only one mem set.
395 But it could also contain clobbers. Insns that contain more than
396 one mem set are not deletable, but each of those mems are here in
397 order to provide info to delete other insns. */
398 store_info
*store_rec
;
400 /* The linked list of mem uses in this insn. Only the reads from
401 rtx bases are listed here. The reads to cselib bases are
402 completely processed during the first scan and so are never
404 read_info_t read_rec
;
406 /* The live fixed registers. We assume only fixed registers can
407 cause trouble by being clobbered from an expanded pattern;
408 storing only the live fixed registers (rather than all registers)
409 means less memory needs to be allocated / copied for the individual
411 regset fixed_regs_live
;
413 /* The prev insn in the basic block. */
414 struct insn_info_type
* prev_insn
;
416 /* The linked list of insns that are in consideration for removal in
417 the forwards pass through the basic block. This pointer may be
418 trash as it is not cleared when a wild read occurs. The only
419 time it is guaranteed to be correct is when the traversal starts
420 at active_local_stores. */
421 struct insn_info_type
* next_local_store
;
423 typedef struct insn_info_type
*insn_info_t
;
425 static object_allocator
<insn_info_type
> insn_info_type_pool ("insn_info_pool");
427 /* The linked list of stores that are under consideration in this
429 static insn_info_t active_local_stores
;
430 static int active_local_stores_len
;
432 struct dse_bb_info_type
434 /* Pointer to the insn info for the last insn in the block. These
435 are linked so this is how all of the insns are reached. During
436 scanning this is the current insn being scanned. */
437 insn_info_t last_insn
;
439 /* The info for the global dataflow problem. */
442 /* This is set if the transfer function should and in the wild_read
443 bitmap before applying the kill and gen sets. That vector knocks
444 out most of the bits in the bitmap and thus speeds up the
446 bool apply_wild_read
;
448 /* The following 4 bitvectors hold information about which positions
449 of which stores are live or dead. They are indexed by
452 /* The set of store positions that exist in this block before a wild read. */
455 /* The set of load positions that exist in this block above the
456 same position of a store. */
459 /* The set of stores that reach the top of the block without being
462 Do not represent the in if it is all ones. Note that this is
463 what the bitvector should logically be initialized to for a set
464 intersection problem. However, like the kill set, this is too
465 expensive. So initially, the in set will only be created for the
466 exit block and any block that contains a wild read. */
469 /* The set of stores that reach the bottom of the block from it's
472 Do not represent the in if it is all ones. Note that this is
473 what the bitvector should logically be initialized to for a set
474 intersection problem. However, like the kill and in set, this is
475 too expensive. So what is done is that the confluence operator
476 just initializes the vector from one of the out sets of the
477 successors of the block. */
480 /* The following bitvector is indexed by the reg number. It
481 contains the set of regs that are live at the current instruction
482 being processed. While it contains info for all of the
483 registers, only the hard registers are actually examined. It is used
484 to assure that shift and/or add sequences that are inserted do not
485 accidentally clobber live hard regs. */
489 typedef struct dse_bb_info_type
*bb_info_t
;
491 static object_allocator
<dse_bb_info_type
> dse_bb_info_type_pool
494 /* Table to hold all bb_infos. */
495 static bb_info_t
*bb_table
;
497 /* There is a group_info for each rtx base that is used to reference
498 memory. There are also not many of the rtx bases because they are
499 very limited in scope. */
503 /* The actual base of the address. */
506 /* The sequential id of the base. This allows us to have a
507 canonical ordering of these that is not based on addresses. */
510 /* True if there are any positions that are to be processed
512 bool process_globally
;
514 /* True if the base of this group is either the frame_pointer or
515 hard_frame_pointer. */
518 /* A mem wrapped around the base pointer for the group in order to do
519 read dependency. It must be given BLKmode in order to encompass all
520 the possible offsets from the base. */
523 /* Canonized version of base_mem's address. */
526 /* These two sets of two bitmaps are used to keep track of how many
527 stores are actually referencing that position from this base. We
528 only do this for rtx bases as this will be used to assign
529 positions in the bitmaps for the global problem. Bit N is set in
530 store1 on the first store for offset N. Bit N is set in store2
531 for the second store to offset N. This is all we need since we
532 only care about offsets that have two or more stores for them.
534 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
535 for 0 and greater offsets.
537 There is one special case here, for stores into the stack frame,
538 we will or store1 into store2 before deciding which stores look
539 at globally. This is because stores to the stack frame that have
540 no other reads before the end of the function can also be
542 bitmap store1_n
, store1_p
, store2_n
, store2_p
;
544 /* These bitmaps keep track of offsets in this group escape this function.
545 An offset escapes if it corresponds to a named variable whose
546 addressable flag is set. */
547 bitmap escaped_n
, escaped_p
;
549 /* The positions in this bitmap have the same assignments as the in,
550 out, gen and kill bitmaps. This bitmap is all zeros except for
551 the positions that are occupied by stores for this group. */
554 /* The offset_map is used to map the offsets from this base into
555 positions in the global bitmaps. It is only created after all of
556 the all of stores have been scanned and we know which ones we
558 int *offset_map_n
, *offset_map_p
;
559 int offset_map_size_n
, offset_map_size_p
;
562 static object_allocator
<group_info
> group_info_pool ("rtx_group_info_pool");
564 /* Index into the rtx_group_vec. */
565 static int rtx_group_next_id
;
568 static vec
<group_info
*> rtx_group_vec
;
571 /* This structure holds the set of changes that are being deferred
572 when removing read operation. See replace_read. */
573 struct deferred_change
576 /* The mem that is being replaced. */
579 /* The reg it is being replaced with. */
582 struct deferred_change
*next
;
585 static object_allocator
<deferred_change
> deferred_change_pool
586 ("deferred_change_pool");
588 static deferred_change
*deferred_change_list
= NULL
;
590 /* This is true except if cfun->stdarg -- i.e. we cannot do
591 this for vararg functions because they play games with the frame. */
592 static bool stores_off_frame_dead_at_return
;
594 /* Counter for stats. */
595 static int globally_deleted
;
596 static int locally_deleted
;
598 static bitmap all_blocks
;
600 /* Locations that are killed by calls in the global phase. */
601 static bitmap kill_on_calls
;
603 /* The number of bits used in the global bitmaps. */
604 static unsigned int current_position
;
606 /* Print offset range [OFFSET, OFFSET + WIDTH) to FILE. */
609 print_range (FILE *file
, poly_int64 offset
, poly_int64 width
)
612 print_dec (offset
, file
, SIGNED
);
613 fprintf (file
, "..");
614 print_dec (offset
+ width
, file
, SIGNED
);
618 /*----------------------------------------------------------------------------
622 ----------------------------------------------------------------------------*/
625 /* Hashtable callbacks for maintaining the "bases" field of
626 store_group_info, given that the addresses are function invariants. */
628 struct invariant_group_base_hasher
: nofree_ptr_hash
<group_info
>
630 static inline hashval_t
hash (const group_info
*);
631 static inline bool equal (const group_info
*, const group_info
*);
635 invariant_group_base_hasher::equal (const group_info
*gi1
,
636 const group_info
*gi2
)
638 return rtx_equal_p (gi1
->rtx_base
, gi2
->rtx_base
);
642 invariant_group_base_hasher::hash (const group_info
*gi
)
645 return hash_rtx (gi
->rtx_base
, Pmode
, &do_not_record
, NULL
, false);
648 /* Tables of group_info structures, hashed by base value. */
649 static hash_table
<invariant_group_base_hasher
> *rtx_group_table
;
652 /* Get the GROUP for BASE. Add a new group if it is not there. */
655 get_group_info (rtx base
)
657 struct group_info tmp_gi
;
661 gcc_assert (base
!= NULL_RTX
);
663 /* Find the store_base_info structure for BASE, creating a new one
665 tmp_gi
.rtx_base
= base
;
666 slot
= rtx_group_table
->find_slot (&tmp_gi
, INSERT
);
671 *slot
= gi
= group_info_pool
.allocate ();
673 gi
->id
= rtx_group_next_id
++;
674 gi
->base_mem
= gen_rtx_MEM (BLKmode
, base
);
675 gi
->canon_base_addr
= canon_rtx (base
);
676 gi
->store1_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
677 gi
->store1_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
678 gi
->store2_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
679 gi
->store2_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
680 gi
->escaped_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
681 gi
->escaped_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
682 gi
->group_kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
683 gi
->process_globally
= false;
685 (base
== frame_pointer_rtx
) || (base
== hard_frame_pointer_rtx
)
686 || (base
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]);
687 gi
->offset_map_size_n
= 0;
688 gi
->offset_map_size_p
= 0;
689 gi
->offset_map_n
= NULL
;
690 gi
->offset_map_p
= NULL
;
691 rtx_group_vec
.safe_push (gi
);
698 /* Initialization of data structures. */
704 globally_deleted
= 0;
706 bitmap_obstack_initialize (&dse_bitmap_obstack
);
707 gcc_obstack_init (&dse_obstack
);
709 scratch
= BITMAP_ALLOC (®_obstack
);
710 kill_on_calls
= BITMAP_ALLOC (&dse_bitmap_obstack
);
713 rtx_group_table
= new hash_table
<invariant_group_base_hasher
> (11);
715 bb_table
= XNEWVEC (bb_info_t
, last_basic_block_for_fn (cfun
));
716 rtx_group_next_id
= 0;
718 stores_off_frame_dead_at_return
= !cfun
->stdarg
;
720 init_alias_analysis ();
725 /*----------------------------------------------------------------------------
728 Scan all of the insns. Any random ordering of the blocks is fine.
729 Each block is scanned in forward order to accommodate cselib which
730 is used to remove stores with non-constant bases.
731 ----------------------------------------------------------------------------*/
733 /* Delete all of the store_info recs from INSN_INFO. */
736 free_store_info (insn_info_t insn_info
)
738 store_info
*cur
= insn_info
->store_rec
;
741 store_info
*next
= cur
->next
;
743 BITMAP_FREE (cur
->positions_needed
.large
.bmap
);
745 cse_store_info_pool
.remove (cur
);
747 rtx_store_info_pool
.remove (cur
);
751 insn_info
->cannot_delete
= true;
752 insn_info
->contains_cselib_groups
= false;
753 insn_info
->store_rec
= NULL
;
756 struct note_add_store_info
758 rtx_insn
*first
, *current
;
759 regset fixed_regs_live
;
763 /* Callback for emit_inc_dec_insn_before via note_stores.
764 Check if a register is clobbered which is live afterwards. */
767 note_add_store (rtx loc
, const_rtx expr ATTRIBUTE_UNUSED
, void *data
)
770 note_add_store_info
*info
= (note_add_store_info
*) data
;
775 /* If this register is referenced by the current or an earlier insn,
776 that's OK. E.g. this applies to the register that is being incremented
777 with this addition. */
778 for (insn
= info
->first
;
779 insn
!= NEXT_INSN (info
->current
);
780 insn
= NEXT_INSN (insn
))
781 if (reg_referenced_p (loc
, PATTERN (insn
)))
784 /* If we come here, we have a clobber of a register that's only OK
785 if that register is not live. If we don't have liveness information
786 available, fail now. */
787 if (!info
->fixed_regs_live
)
789 info
->failure
= true;
792 /* Now check if this is a live fixed register. */
793 unsigned int end_regno
= END_REGNO (loc
);
794 for (unsigned int regno
= REGNO (loc
); regno
< end_regno
; ++regno
)
795 if (REGNO_REG_SET_P (info
->fixed_regs_live
, regno
))
796 info
->failure
= true;
799 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
800 SRC + SRCOFF before insn ARG. */
803 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED
,
804 rtx op ATTRIBUTE_UNUSED
,
805 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
807 insn_info_t insn_info
= (insn_info_t
) arg
;
808 rtx_insn
*insn
= insn_info
->insn
, *new_insn
, *cur
;
809 note_add_store_info info
;
811 /* We can reuse all operands without copying, because we are about
812 to delete the insn that contained it. */
816 emit_insn (gen_add3_insn (dest
, src
, srcoff
));
817 new_insn
= get_insns ();
821 new_insn
= gen_move_insn (dest
, src
);
822 info
.first
= new_insn
;
823 info
.fixed_regs_live
= insn_info
->fixed_regs_live
;
824 info
.failure
= false;
825 for (cur
= new_insn
; cur
; cur
= NEXT_INSN (cur
))
828 note_stores (cur
, note_add_store
, &info
);
831 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
832 return it immediately, communicating the failure to its caller. */
836 emit_insn_before (new_insn
, insn
);
841 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
842 is there, is split into a separate insn.
843 Return true on success (or if there was nothing to do), false on failure. */
846 check_for_inc_dec_1 (insn_info_t insn_info
)
848 rtx_insn
*insn
= insn_info
->insn
;
849 rtx note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
851 return for_each_inc_dec (PATTERN (insn
), emit_inc_dec_insn_before
,
854 /* Punt on stack pushes, those don't have REG_INC notes and we are
855 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
856 subrtx_iterator::array_type array
;
857 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), NONCONST
)
860 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
868 /* Entry point for postreload. If you work on reload_cse, or you need this
869 anywhere else, consider if you can provide register liveness information
870 and add a parameter to this function so that it can be passed down in
871 insn_info.fixed_regs_live. */
873 check_for_inc_dec (rtx_insn
*insn
)
875 insn_info_type insn_info
;
878 insn_info
.insn
= insn
;
879 insn_info
.fixed_regs_live
= NULL
;
880 note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
882 return for_each_inc_dec (PATTERN (insn
), emit_inc_dec_insn_before
,
885 /* Punt on stack pushes, those don't have REG_INC notes and we are
886 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
887 subrtx_iterator::array_type array
;
888 FOR_EACH_SUBRTX (iter
, array
, PATTERN (insn
), NONCONST
)
891 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
898 /* Delete the insn and free all of the fields inside INSN_INFO. */
901 delete_dead_store_insn (insn_info_t insn_info
)
903 read_info_t read_info
;
908 if (!check_for_inc_dec_1 (insn_info
))
910 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
911 fprintf (dump_file
, "Locally deleting insn %d\n",
912 INSN_UID (insn_info
->insn
));
914 free_store_info (insn_info
);
915 read_info
= insn_info
->read_rec
;
919 read_info_t next
= read_info
->next
;
920 read_info_type_pool
.remove (read_info
);
923 insn_info
->read_rec
= NULL
;
925 delete_insn (insn_info
->insn
);
927 insn_info
->insn
= NULL
;
929 insn_info
->wild_read
= false;
932 /* Return whether DECL, a local variable, can possibly escape the current
936 local_variable_can_escape (tree decl
)
938 if (TREE_ADDRESSABLE (decl
))
941 /* If this is a partitioned variable, we need to consider all the variables
942 in the partition. This is necessary because a store into one of them can
943 be replaced with a store into another and this may not change the outcome
944 of the escape analysis. */
945 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
947 tree
*namep
= cfun
->gimple_df
->decls_to_pointers
->get (decl
);
949 return TREE_ADDRESSABLE (*namep
);
955 /* Return whether EXPR can possibly escape the current function scope. */
958 can_escape (tree expr
)
963 base
= get_base_address (expr
);
965 && !may_be_aliased (base
)
967 && !DECL_EXTERNAL (base
)
968 && !TREE_STATIC (base
)
969 && local_variable_can_escape (base
)))
974 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
978 set_usage_bits (group_info
*group
, poly_int64 offset
, poly_int64 width
,
981 /* Non-constant offsets and widths act as global kills, so there's no point
982 trying to use them to derive global DSE candidates. */
983 HOST_WIDE_INT i
, const_offset
, const_width
;
984 bool expr_escapes
= can_escape (expr
);
985 if (offset
.is_constant (&const_offset
)
986 && width
.is_constant (&const_width
)
987 && const_offset
> -MAX_OFFSET
988 && const_offset
+ const_width
< MAX_OFFSET
)
989 for (i
= const_offset
; i
< const_offset
+ const_width
; ++i
)
997 store1
= group
->store1_n
;
998 store2
= group
->store2_n
;
999 escaped
= group
->escaped_n
;
1004 store1
= group
->store1_p
;
1005 store2
= group
->store2_p
;
1006 escaped
= group
->escaped_p
;
1010 if (!bitmap_set_bit (store1
, ai
))
1011 bitmap_set_bit (store2
, ai
);
1016 if (group
->offset_map_size_n
< ai
)
1017 group
->offset_map_size_n
= ai
;
1021 if (group
->offset_map_size_p
< ai
)
1022 group
->offset_map_size_p
= ai
;
1026 bitmap_set_bit (escaped
, ai
);
1031 reset_active_stores (void)
1033 active_local_stores
= NULL
;
1034 active_local_stores_len
= 0;
1037 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1040 free_read_records (bb_info_t bb_info
)
1042 insn_info_t insn_info
= bb_info
->last_insn
;
1043 read_info_t
*ptr
= &insn_info
->read_rec
;
1046 read_info_t next
= (*ptr
)->next
;
1047 read_info_type_pool
.remove (*ptr
);
1052 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1055 add_wild_read (bb_info_t bb_info
)
1057 insn_info_t insn_info
= bb_info
->last_insn
;
1058 insn_info
->wild_read
= true;
1059 free_read_records (bb_info
);
1060 reset_active_stores ();
1063 /* Set the BB_INFO so that the last insn is marked as a wild read of
1064 non-frame locations. */
1067 add_non_frame_wild_read (bb_info_t bb_info
)
1069 insn_info_t insn_info
= bb_info
->last_insn
;
1070 insn_info
->non_frame_wild_read
= true;
1071 free_read_records (bb_info
);
1072 reset_active_stores ();
1075 /* Return true if X is a constant or one of the registers that behave
1076 as a constant over the life of a function. This is equivalent to
1077 !rtx_varies_p for memory addresses. */
1080 const_or_frame_p (rtx x
)
1085 if (GET_CODE (x
) == REG
)
1087 /* Note that we have to test for the actual rtx used for the frame
1088 and arg pointers and not just the register number in case we have
1089 eliminated the frame and/or arg pointer and are using it
1091 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
1092 /* The arg pointer varies if it is not a fixed register. */
1093 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
1094 || x
== pic_offset_table_rtx
)
1102 /* Take all reasonable action to put the address of MEM into the form
1103 that we can do analysis on.
1105 The gold standard is to get the address into the form: address +
1106 OFFSET where address is something that rtx_varies_p considers a
1107 constant. When we can get the address in this form, we can do
1108 global analysis on it. Note that for constant bases, address is
1109 not actually returned, only the group_id. The address can be
1112 If that fails, we try cselib to get a value we can at least use
1113 locally. If that fails we return false.
1115 The GROUP_ID is set to -1 for cselib bases and the index of the
1116 group for non_varying bases.
1118 FOR_READ is true if this is a mem read and false if not. */
1121 canon_address (rtx mem
,
1126 machine_mode address_mode
= get_address_mode (mem
);
1127 rtx mem_address
= XEXP (mem
, 0);
1128 rtx expanded_address
, address
;
1131 cselib_lookup (mem_address
, address_mode
, 1, GET_MODE (mem
));
1133 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1135 fprintf (dump_file
, " mem: ");
1136 print_inline_rtx (dump_file
, mem_address
, 0);
1137 fprintf (dump_file
, "\n");
1140 /* First see if just canon_rtx (mem_address) is const or frame,
1141 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1143 for (expanded
= 0; expanded
< 2; expanded
++)
1147 /* Use cselib to replace all of the reg references with the full
1148 expression. This will take care of the case where we have
1150 r_x = base + offset;
1155 val = *(base + offset); */
1157 expanded_address
= cselib_expand_value_rtx (mem_address
,
1160 /* If this fails, just go with the address from first
1162 if (!expanded_address
)
1166 expanded_address
= mem_address
;
1168 /* Split the address into canonical BASE + OFFSET terms. */
1169 address
= canon_rtx (expanded_address
);
1173 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1177 fprintf (dump_file
, "\n after cselib_expand address: ");
1178 print_inline_rtx (dump_file
, expanded_address
, 0);
1179 fprintf (dump_file
, "\n");
1182 fprintf (dump_file
, "\n after canon_rtx address: ");
1183 print_inline_rtx (dump_file
, address
, 0);
1184 fprintf (dump_file
, "\n");
1187 if (GET_CODE (address
) == CONST
)
1188 address
= XEXP (address
, 0);
1190 address
= strip_offset_and_add (address
, offset
);
1192 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem
))
1193 && const_or_frame_p (address
))
1195 group_info
*group
= get_group_info (address
);
1197 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1199 fprintf (dump_file
, " gid=%d offset=", group
->id
);
1200 print_dec (*offset
, dump_file
);
1201 fprintf (dump_file
, "\n");
1204 *group_id
= group
->id
;
1209 *base
= cselib_lookup (address
, address_mode
, true, GET_MODE (mem
));
1214 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1215 fprintf (dump_file
, " no cselib val - should be a wild read.\n");
1218 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1220 fprintf (dump_file
, " varying cselib base=%u:%u offset = ",
1221 (*base
)->uid
, (*base
)->hash
);
1222 print_dec (*offset
, dump_file
);
1223 fprintf (dump_file
, "\n");
1229 /* Clear the rhs field from the active_local_stores array. */
1232 clear_rhs_from_active_local_stores (void)
1234 insn_info_t ptr
= active_local_stores
;
1238 store_info
*store_info
= ptr
->store_rec
;
1239 /* Skip the clobbers. */
1240 while (!store_info
->is_set
)
1241 store_info
= store_info
->next
;
1243 store_info
->rhs
= NULL
;
1244 store_info
->const_rhs
= NULL
;
1246 ptr
= ptr
->next_local_store
;
1251 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1254 set_position_unneeded (store_info
*s_info
, int pos
)
1256 if (UNLIKELY (s_info
->is_large
))
1258 if (bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
))
1259 s_info
->positions_needed
.large
.count
++;
1262 s_info
->positions_needed
.small_bitmask
1263 &= ~(HOST_WIDE_INT_1U
<< pos
);
1266 /* Mark the whole store S_INFO as unneeded. */
1269 set_all_positions_unneeded (store_info
*s_info
)
1271 if (UNLIKELY (s_info
->is_large
))
1273 HOST_WIDE_INT width
;
1274 if (s_info
->width
.is_constant (&width
))
1276 bitmap_set_range (s_info
->positions_needed
.large
.bmap
, 0, width
);
1277 s_info
->positions_needed
.large
.count
= width
;
1281 gcc_checking_assert (!s_info
->positions_needed
.large
.bmap
);
1282 s_info
->positions_needed
.large
.count
= 1;
1286 s_info
->positions_needed
.small_bitmask
= HOST_WIDE_INT_0U
;
1289 /* Return TRUE if any bytes from S_INFO store are needed. */
1292 any_positions_needed_p (store_info
*s_info
)
1294 if (UNLIKELY (s_info
->is_large
))
1296 HOST_WIDE_INT width
;
1297 if (s_info
->width
.is_constant (&width
))
1299 gcc_checking_assert (s_info
->positions_needed
.large
.bmap
);
1300 return s_info
->positions_needed
.large
.count
< width
;
1304 gcc_checking_assert (!s_info
->positions_needed
.large
.bmap
);
1305 return s_info
->positions_needed
.large
.count
== 0;
1309 return (s_info
->positions_needed
.small_bitmask
!= HOST_WIDE_INT_0U
);
1312 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1313 store are known to be needed. */
1316 all_positions_needed_p (store_info
*s_info
, poly_int64 start
,
1319 gcc_assert (s_info
->rhs
);
1320 if (!s_info
->width
.is_constant ())
1322 gcc_assert (s_info
->is_large
1323 && !s_info
->positions_needed
.large
.bmap
);
1324 return s_info
->positions_needed
.large
.count
== 0;
1327 /* Otherwise, if START and WIDTH are non-constant, we're asking about
1328 a non-constant region of a constant-sized store. We can't say for
1329 sure that all positions are needed. */
1330 HOST_WIDE_INT const_start
, const_width
;
1331 if (!start
.is_constant (&const_start
)
1332 || !width
.is_constant (&const_width
))
1335 if (UNLIKELY (s_info
->is_large
))
1337 for (HOST_WIDE_INT i
= const_start
; i
< const_start
+ const_width
; ++i
)
1338 if (bitmap_bit_p (s_info
->positions_needed
.large
.bmap
, i
))
1344 unsigned HOST_WIDE_INT mask
1345 = lowpart_bitmask (const_width
) << const_start
;
1346 return (s_info
->positions_needed
.small_bitmask
& mask
) == mask
;
1351 static rtx
get_stored_val (store_info
*, machine_mode
, poly_int64
,
1352 poly_int64
, basic_block
, bool);
1355 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1356 there is a candidate store, after adding it to the appropriate
1357 local store group if so. */
1360 record_store (rtx body
, bb_info_t bb_info
)
1362 rtx mem
, rhs
, const_rhs
, mem_addr
;
1363 poly_int64 offset
= 0;
1364 poly_int64 width
= 0;
1365 insn_info_t insn_info
= bb_info
->last_insn
;
1366 store_info
*store_info
= NULL
;
1368 cselib_val
*base
= NULL
;
1369 insn_info_t ptr
, last
, redundant_reason
;
1370 bool store_is_unused
;
1372 if (GET_CODE (body
) != SET
&& GET_CODE (body
) != CLOBBER
)
1375 mem
= SET_DEST (body
);
1377 /* If this is not used, then this cannot be used to keep the insn
1378 from being deleted. On the other hand, it does provide something
1379 that can be used to prove that another store is dead. */
1381 = (find_reg_note (insn_info
->insn
, REG_UNUSED
, mem
) != NULL
);
1383 /* Check whether that value is a suitable memory location. */
1386 /* If the set or clobber is unused, then it does not effect our
1387 ability to get rid of the entire insn. */
1388 if (!store_is_unused
)
1389 insn_info
->cannot_delete
= true;
1393 /* At this point we know mem is a mem. */
1394 if (GET_MODE (mem
) == BLKmode
)
1396 HOST_WIDE_INT const_size
;
1397 if (GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
1399 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1400 fprintf (dump_file
, " adding wild read for (clobber (mem:BLK (scratch))\n");
1401 add_wild_read (bb_info
);
1402 insn_info
->cannot_delete
= true;
1405 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1406 as memset (addr, 0, 36); */
1407 else if (!MEM_SIZE_KNOWN_P (mem
)
1408 || maybe_le (MEM_SIZE (mem
), 0)
1409 /* This is a limit on the bitmap size, which is only relevant
1410 for constant-sized MEMs. */
1411 || (MEM_SIZE (mem
).is_constant (&const_size
)
1412 && const_size
> MAX_OFFSET
)
1413 || GET_CODE (body
) != SET
1414 || !CONST_INT_P (SET_SRC (body
)))
1416 if (!store_is_unused
)
1418 /* If the set or clobber is unused, then it does not effect our
1419 ability to get rid of the entire insn. */
1420 insn_info
->cannot_delete
= true;
1421 clear_rhs_from_active_local_stores ();
1427 /* We can still process a volatile mem, we just cannot delete it. */
1428 if (MEM_VOLATILE_P (mem
))
1429 insn_info
->cannot_delete
= true;
1431 if (!canon_address (mem
, &group_id
, &offset
, &base
))
1433 clear_rhs_from_active_local_stores ();
1437 if (GET_MODE (mem
) == BLKmode
)
1438 width
= MEM_SIZE (mem
);
1440 width
= GET_MODE_SIZE (GET_MODE (mem
));
1442 if (!endpoint_representable_p (offset
, width
))
1444 clear_rhs_from_active_local_stores ();
1448 if (known_eq (width
, 0))
1453 /* In the restrictive case where the base is a constant or the
1454 frame pointer we can do global analysis. */
1457 = rtx_group_vec
[group_id
];
1458 tree expr
= MEM_EXPR (mem
);
1460 store_info
= rtx_store_info_pool
.allocate ();
1461 set_usage_bits (group
, offset
, width
, expr
);
1463 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1465 fprintf (dump_file
, " processing const base store gid=%d",
1467 print_range (dump_file
, offset
, width
);
1468 fprintf (dump_file
, "\n");
1473 if (may_be_sp_based_p (XEXP (mem
, 0)))
1474 insn_info
->stack_pointer_based
= true;
1475 insn_info
->contains_cselib_groups
= true;
1477 store_info
= cse_store_info_pool
.allocate ();
1480 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1482 fprintf (dump_file
, " processing cselib store ");
1483 print_range (dump_file
, offset
, width
);
1484 fprintf (dump_file
, "\n");
1488 const_rhs
= rhs
= NULL_RTX
;
1489 if (GET_CODE (body
) == SET
1490 /* No place to keep the value after ra. */
1491 && !reload_completed
1492 && (REG_P (SET_SRC (body
))
1493 || GET_CODE (SET_SRC (body
)) == SUBREG
1494 || CONSTANT_P (SET_SRC (body
)))
1495 && !MEM_VOLATILE_P (mem
)
1496 /* Sometimes the store and reload is used for truncation and
1498 && !(FLOAT_MODE_P (GET_MODE (mem
)) && (flag_float_store
)))
1500 rhs
= SET_SRC (body
);
1501 if (CONSTANT_P (rhs
))
1503 else if (body
== PATTERN (insn_info
->insn
))
1505 rtx tem
= find_reg_note (insn_info
->insn
, REG_EQUAL
, NULL_RTX
);
1506 if (tem
&& CONSTANT_P (XEXP (tem
, 0)))
1507 const_rhs
= XEXP (tem
, 0);
1509 if (const_rhs
== NULL_RTX
&& REG_P (rhs
))
1511 rtx tem
= cselib_expand_value_rtx (rhs
, scratch
, 5);
1513 if (tem
&& CONSTANT_P (tem
))
1517 /* If RHS is set only once to a constant, set CONST_RHS
1519 rtx def_src
= df_find_single_def_src (rhs
);
1520 if (def_src
!= nullptr && CONSTANT_P (def_src
))
1521 const_rhs
= def_src
;
1526 /* Check to see if this stores causes some other stores to be
1528 ptr
= active_local_stores
;
1530 redundant_reason
= NULL
;
1531 unsigned char addrspace
= MEM_ADDR_SPACE (mem
);
1532 mem
= canon_rtx (mem
);
1535 mem_addr
= base
->val_rtx
;
1538 group_info
*group
= rtx_group_vec
[group_id
];
1539 mem_addr
= group
->canon_base_addr
;
1541 if (maybe_ne (offset
, 0))
1542 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
1546 insn_info_t next
= ptr
->next_local_store
;
1547 class store_info
*s_info
= ptr
->store_rec
;
1550 /* Skip the clobbers. We delete the active insn if this insn
1551 shadows the set. To have been put on the active list, it
1552 has exactly on set. */
1553 while (!s_info
->is_set
)
1554 s_info
= s_info
->next
;
1556 if (s_info
->group_id
== group_id
1557 && s_info
->cse_base
== base
1558 && s_info
->addrspace
== addrspace
)
1561 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1563 fprintf (dump_file
, " trying store in insn=%d gid=%d",
1564 INSN_UID (ptr
->insn
), s_info
->group_id
);
1565 print_range (dump_file
, s_info
->offset
, s_info
->width
);
1566 fprintf (dump_file
, "\n");
1569 /* Even if PTR won't be eliminated as unneeded, if both
1570 PTR and this insn store the same constant value, we might
1571 eliminate this insn instead. */
1572 if (s_info
->const_rhs
1574 && known_subrange_p (offset
, width
,
1575 s_info
->offset
, s_info
->width
)
1576 && all_positions_needed_p (s_info
, offset
- s_info
->offset
,
1578 /* We can only remove the later store if the earlier aliases
1579 at least all accesses the later one. */
1580 && mems_same_for_tbaa_p (s_info
->mem
, mem
))
1582 if (GET_MODE (mem
) == BLKmode
)
1584 if (GET_MODE (s_info
->mem
) == BLKmode
1585 && s_info
->const_rhs
== const_rhs
)
1586 redundant_reason
= ptr
;
1588 else if (s_info
->const_rhs
== const0_rtx
1589 && const_rhs
== const0_rtx
)
1590 redundant_reason
= ptr
;
1595 val
= get_stored_val (s_info
, GET_MODE (mem
), offset
, width
,
1596 BLOCK_FOR_INSN (insn_info
->insn
),
1598 if (get_insns () != NULL
)
1601 if (val
&& rtx_equal_p (val
, const_rhs
))
1602 redundant_reason
= ptr
;
1606 HOST_WIDE_INT begin_unneeded
, const_s_width
, const_width
;
1607 if (known_subrange_p (s_info
->offset
, s_info
->width
, offset
, width
))
1608 /* The new store touches every byte that S_INFO does. */
1609 set_all_positions_unneeded (s_info
);
1610 else if ((offset
- s_info
->offset
).is_constant (&begin_unneeded
)
1611 && s_info
->width
.is_constant (&const_s_width
)
1612 && width
.is_constant (&const_width
))
1614 HOST_WIDE_INT end_unneeded
= begin_unneeded
+ const_width
;
1615 begin_unneeded
= MAX (begin_unneeded
, 0);
1616 end_unneeded
= MIN (end_unneeded
, const_s_width
);
1617 for (i
= begin_unneeded
; i
< end_unneeded
; ++i
)
1618 set_position_unneeded (s_info
, i
);
1622 /* We don't know which parts of S_INFO are needed and
1623 which aren't, so invalidate the RHS. */
1625 s_info
->const_rhs
= NULL
;
1628 else if (s_info
->rhs
)
1629 /* Need to see if it is possible for this store to overwrite
1630 the value of store_info. If it is, set the rhs to NULL to
1631 keep it from being used to remove a load. */
1633 if (canon_output_dependence (s_info
->mem
, true,
1634 mem
, GET_MODE (mem
),
1638 s_info
->const_rhs
= NULL
;
1642 /* An insn can be deleted if every position of every one of
1643 its s_infos is zero. */
1644 if (any_positions_needed_p (s_info
))
1649 insn_info_t insn_to_delete
= ptr
;
1651 active_local_stores_len
--;
1653 last
->next_local_store
= ptr
->next_local_store
;
1655 active_local_stores
= ptr
->next_local_store
;
1657 if (!insn_to_delete
->cannot_delete
)
1658 delete_dead_store_insn (insn_to_delete
);
1666 /* Finish filling in the store_info. */
1667 store_info
->next
= insn_info
->store_rec
;
1668 insn_info
->store_rec
= store_info
;
1669 store_info
->mem
= mem
;
1670 store_info
->mem_addr
= mem_addr
;
1671 store_info
->cse_base
= base
;
1672 HOST_WIDE_INT const_width
;
1673 if (!width
.is_constant (&const_width
))
1675 store_info
->is_large
= true;
1676 store_info
->positions_needed
.large
.count
= 0;
1677 store_info
->positions_needed
.large
.bmap
= NULL
;
1679 else if (const_width
> HOST_BITS_PER_WIDE_INT
)
1681 store_info
->is_large
= true;
1682 store_info
->positions_needed
.large
.count
= 0;
1683 store_info
->positions_needed
.large
.bmap
= BITMAP_ALLOC (&dse_bitmap_obstack
);
1687 store_info
->is_large
= false;
1688 store_info
->positions_needed
.small_bitmask
1689 = lowpart_bitmask (const_width
);
1691 store_info
->group_id
= group_id
;
1692 store_info
->offset
= offset
;
1693 store_info
->width
= width
;
1694 store_info
->is_set
= GET_CODE (body
) == SET
;
1695 store_info
->rhs
= rhs
;
1696 store_info
->const_rhs
= const_rhs
;
1697 store_info
->redundant_reason
= redundant_reason
;
1698 store_info
->addrspace
= addrspace
;
1700 /* If this is a clobber, we return 0. We will only be able to
1701 delete this insn if there is only one store USED store, but we
1702 can use the clobber to delete other stores earlier. */
1703 return store_info
->is_set
? 1 : 0;
1708 dump_insn_info (const char * start
, insn_info_t insn_info
)
1710 fprintf (dump_file
, "%s insn=%d %s\n", start
,
1711 INSN_UID (insn_info
->insn
),
1712 insn_info
->store_rec
? "has store" : "naked");
1716 /* If the modes are different and the value's source and target do not
1717 line up, we need to extract the value from lower part of the rhs of
1718 the store, shift it, and then put it into a form that can be shoved
1719 into the read_insn. This function generates a right SHIFT of a
1720 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1721 shift sequence is returned or NULL if we failed to find a
1725 find_shift_sequence (poly_int64 access_size
,
1726 store_info
*store_info
,
1727 machine_mode read_mode
,
1728 poly_int64 shift
, bool speed
, bool require_cst
)
1730 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1731 scalar_int_mode new_mode
;
1732 rtx read_reg
= NULL
;
1734 /* If a constant was stored into memory, try to simplify it here,
1735 otherwise the cost of the shift might preclude this optimization
1736 e.g. at -Os, even when no actual shift will be needed. */
1737 if (store_info
->const_rhs
1738 && known_le (access_size
, GET_MODE_SIZE (MAX_MODE_INT
)))
1740 auto new_mode
= smallest_int_mode_for_size (access_size
* BITS_PER_UNIT
);
1741 auto byte
= subreg_lowpart_offset (new_mode
, store_mode
);
1743 = simplify_subreg (new_mode
, store_info
->const_rhs
, store_mode
, byte
);
1744 if (ret
&& CONSTANT_P (ret
))
1746 rtx shift_rtx
= gen_int_shift_amount (new_mode
, shift
);
1747 ret
= simplify_const_binary_operation (LSHIFTRT
, new_mode
, ret
,
1749 if (ret
&& CONSTANT_P (ret
))
1751 byte
= subreg_lowpart_offset (read_mode
, new_mode
);
1752 ret
= simplify_subreg (read_mode
, ret
, new_mode
, byte
);
1753 if (ret
&& CONSTANT_P (ret
)
1754 && (set_src_cost (ret
, read_mode
, speed
)
1755 <= COSTS_N_INSNS (1)))
1764 /* Some machines like the x86 have shift insns for each size of
1765 operand. Other machines like the ppc or the ia-64 may only have
1766 shift insns that shift values within 32 or 64 bit registers.
1767 This loop tries to find the smallest shift insn that will right
1768 justify the value we want to read but is available in one insn on
1771 opt_scalar_int_mode new_mode_iter
;
1772 FOR_EACH_MODE_IN_CLASS (new_mode_iter
, MODE_INT
)
1774 rtx target
, new_reg
, new_lhs
;
1775 rtx_insn
*shift_seq
, *insn
;
1778 new_mode
= new_mode_iter
.require ();
1779 if (GET_MODE_BITSIZE (new_mode
) > BITS_PER_WORD
)
1781 if (maybe_lt (GET_MODE_SIZE (new_mode
), GET_MODE_SIZE (read_mode
)))
1784 /* Try a wider mode if truncating the store mode to NEW_MODE
1785 requires a real instruction. */
1786 if (maybe_lt (GET_MODE_SIZE (new_mode
), GET_MODE_SIZE (store_mode
))
1787 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode
, store_mode
))
1790 /* Also try a wider mode if the necessary punning is either not
1791 desirable or not possible. */
1792 if (!CONSTANT_P (store_info
->rhs
)
1793 && !targetm
.modes_tieable_p (new_mode
, store_mode
))
1796 if (multiple_p (shift
, GET_MODE_BITSIZE (new_mode
))
1797 && known_le (GET_MODE_SIZE (new_mode
), GET_MODE_SIZE (store_mode
)))
1799 /* Try to implement the shift using a subreg. */
1801 = subreg_offset_from_lsb (new_mode
, store_mode
, shift
);
1802 rtx rhs_subreg
= simplify_gen_subreg (new_mode
, store_info
->rhs
,
1803 store_mode
, offset
);
1807 = extract_low_bits (read_mode
, new_mode
, copy_rtx (rhs_subreg
));
1812 if (maybe_lt (GET_MODE_SIZE (new_mode
), access_size
))
1815 new_reg
= gen_reg_rtx (new_mode
);
1819 /* In theory we could also check for an ashr. Ian Taylor knows
1820 of one dsp where the cost of these two was not the same. But
1821 this really is a rare case anyway. */
1822 target
= expand_binop (new_mode
, lshr_optab
, new_reg
,
1823 gen_int_shift_amount (new_mode
, shift
),
1824 new_reg
, 1, OPTAB_DIRECT
);
1826 shift_seq
= get_insns ();
1829 if (target
!= new_reg
|| shift_seq
== NULL
)
1833 for (insn
= shift_seq
; insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1835 cost
+= insn_cost (insn
, speed
);
1837 /* The computation up to here is essentially independent
1838 of the arguments and could be precomputed. It may
1839 not be worth doing so. We could precompute if
1840 worthwhile or at least cache the results. The result
1841 technically depends on both SHIFT and ACCESS_SIZE,
1842 but in practice the answer will depend only on ACCESS_SIZE. */
1844 if (cost
> COSTS_N_INSNS (1))
1847 new_lhs
= extract_low_bits (new_mode
, store_mode
,
1848 copy_rtx (store_info
->rhs
));
1849 if (new_lhs
== NULL_RTX
)
1852 /* We found an acceptable shift. Generate a move to
1853 take the value from the store and put it into the
1854 shift pseudo, then shift it, then generate another
1855 move to put in into the target of the read. */
1856 emit_move_insn (new_reg
, new_lhs
);
1857 emit_insn (shift_seq
);
1858 read_reg
= extract_low_bits (read_mode
, new_mode
, new_reg
);
1866 /* Call back for note_stores to find the hard regs set or clobbered by
1867 insn. Data is a bitmap of the hardregs set so far. */
1870 look_for_hardregs (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
1872 bitmap regs_set
= (bitmap
) data
;
1875 && HARD_REGISTER_P (x
))
1876 bitmap_set_range (regs_set
, REGNO (x
), REG_NREGS (x
));
1879 /* Helper function for replace_read and record_store.
1880 Attempt to return a value of mode READ_MODE stored in STORE_INFO,
1881 consisting of READ_WIDTH bytes starting from READ_OFFSET. Return NULL
1882 if not successful. If REQUIRE_CST is true, return always constant. */
1885 get_stored_val (store_info
*store_info
, machine_mode read_mode
,
1886 poly_int64 read_offset
, poly_int64 read_width
,
1887 basic_block bb
, bool require_cst
)
1889 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1893 /* To get here the read is within the boundaries of the write so
1894 shift will never be negative. Start out with the shift being in
1896 if (store_mode
== BLKmode
)
1898 else if (BYTES_BIG_ENDIAN
)
1899 gap
= ((store_info
->offset
+ store_info
->width
)
1900 - (read_offset
+ read_width
));
1902 gap
= read_offset
- store_info
->offset
;
1904 if (maybe_ne (gap
, 0))
1906 if (!gap
.is_constant ())
1909 poly_int64 shift
= gap
* BITS_PER_UNIT
;
1910 poly_int64 access_size
= GET_MODE_SIZE (read_mode
) + gap
;
1911 read_reg
= find_shift_sequence (access_size
, store_info
, read_mode
,
1912 shift
, optimize_bb_for_speed_p (bb
),
1915 else if (store_mode
== BLKmode
)
1917 /* The store is a memset (addr, const_val, const_size). */
1918 gcc_assert (CONST_INT_P (store_info
->rhs
));
1919 scalar_int_mode int_store_mode
;
1920 if (!int_mode_for_mode (read_mode
).exists (&int_store_mode
))
1921 read_reg
= NULL_RTX
;
1922 else if (store_info
->rhs
== const0_rtx
)
1923 read_reg
= extract_low_bits (read_mode
, int_store_mode
, const0_rtx
);
1924 else if (GET_MODE_BITSIZE (int_store_mode
) > HOST_BITS_PER_WIDE_INT
1925 || BITS_PER_UNIT
>= HOST_BITS_PER_WIDE_INT
)
1926 read_reg
= NULL_RTX
;
1929 unsigned HOST_WIDE_INT c
1930 = INTVAL (store_info
->rhs
)
1931 & ((HOST_WIDE_INT_1
<< BITS_PER_UNIT
) - 1);
1932 int shift
= BITS_PER_UNIT
;
1933 while (shift
< HOST_BITS_PER_WIDE_INT
)
1938 read_reg
= gen_int_mode (c
, int_store_mode
);
1939 read_reg
= extract_low_bits (read_mode
, int_store_mode
, read_reg
);
1942 else if (store_info
->const_rhs
1944 || GET_MODE_CLASS (read_mode
) != GET_MODE_CLASS (store_mode
)))
1945 read_reg
= extract_low_bits (read_mode
, store_mode
,
1946 copy_rtx (store_info
->const_rhs
));
1947 else if (VECTOR_MODE_P (read_mode
) && VECTOR_MODE_P (store_mode
)
1948 && known_le (GET_MODE_BITSIZE (read_mode
), GET_MODE_BITSIZE (store_mode
))
1949 && targetm
.modes_tieable_p (read_mode
, store_mode
))
1950 read_reg
= gen_lowpart (read_mode
, copy_rtx (store_info
->rhs
));
1952 read_reg
= extract_low_bits (read_mode
, store_mode
,
1953 copy_rtx (store_info
->rhs
));
1954 if (require_cst
&& read_reg
&& !CONSTANT_P (read_reg
))
1955 read_reg
= NULL_RTX
;
1959 /* Take a sequence of:
1982 Depending on the alignment and the mode of the store and
1986 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1987 and READ_INSN are for the read. Return true if the replacement
1991 replace_read (store_info
*store_info
, insn_info_t store_insn
,
1992 read_info_t read_info
, insn_info_t read_insn
, rtx
*loc
)
1994 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1995 machine_mode read_mode
= GET_MODE (read_info
->mem
);
1996 rtx_insn
*insns
, *this_insn
;
2003 /* Create a sequence of instructions to set up the read register.
2004 This sequence goes immediately before the store and its result
2005 is read by the load.
2007 We need to keep this in perspective. We are replacing a read
2008 with a sequence of insns, but the read will almost certainly be
2009 in cache, so it is not going to be an expensive one. Thus, we
2010 are not willing to do a multi insn shift or worse a subroutine
2011 call to get rid of the read. */
2012 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2013 fprintf (dump_file
, "trying to replace %smode load in insn %d"
2014 " from %smode store in insn %d\n",
2015 GET_MODE_NAME (read_mode
), INSN_UID (read_insn
->insn
),
2016 GET_MODE_NAME (store_mode
), INSN_UID (store_insn
->insn
));
2018 bb
= BLOCK_FOR_INSN (read_insn
->insn
);
2019 read_reg
= get_stored_val (store_info
,
2020 read_mode
, read_info
->offset
, read_info
->width
,
2022 if (read_reg
== NULL_RTX
)
2025 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2026 fprintf (dump_file
, " -- could not extract bits of stored value\n");
2029 /* Force the value into a new register so that it won't be clobbered
2030 between the store and the load. */
2031 if (WORD_REGISTER_OPERATIONS
2032 && GET_CODE (read_reg
) == SUBREG
2033 && REG_P (SUBREG_REG (read_reg
))
2034 && GET_MODE (SUBREG_REG (read_reg
)) == word_mode
)
2036 /* For WORD_REGISTER_OPERATIONS with subreg of word_mode register
2037 force SUBREG_REG into a new register rather than the SUBREG. */
2038 rtx r
= copy_to_mode_reg (word_mode
, SUBREG_REG (read_reg
));
2039 read_reg
= shallow_copy_rtx (read_reg
);
2040 SUBREG_REG (read_reg
) = r
;
2043 read_reg
= copy_to_mode_reg (read_mode
, read_reg
);
2044 insns
= get_insns ();
2047 if (insns
!= NULL_RTX
)
2049 /* Now we have to scan the set of new instructions to see if the
2050 sequence contains and sets of hardregs that happened to be
2051 live at this point. For instance, this can happen if one of
2052 the insns sets the CC and the CC happened to be live at that
2053 point. This does occasionally happen, see PR 37922. */
2054 bitmap regs_set
= BITMAP_ALLOC (®_obstack
);
2056 for (this_insn
= insns
;
2057 this_insn
!= NULL_RTX
; this_insn
= NEXT_INSN (this_insn
))
2059 if (insn_invalid_p (this_insn
, false))
2061 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2063 fprintf (dump_file
, " -- replacing the loaded MEM with ");
2064 print_simple_rtl (dump_file
, read_reg
);
2065 fprintf (dump_file
, " led to an invalid instruction\n");
2067 BITMAP_FREE (regs_set
);
2070 note_stores (this_insn
, look_for_hardregs
, regs_set
);
2073 if (store_insn
->fixed_regs_live
)
2074 bitmap_and_into (regs_set
, store_insn
->fixed_regs_live
);
2075 if (!bitmap_empty_p (regs_set
))
2077 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2079 fprintf (dump_file
, "abandoning replacement because sequence "
2080 "clobbers live hardregs:");
2081 df_print_regset (dump_file
, regs_set
);
2084 BITMAP_FREE (regs_set
);
2087 BITMAP_FREE (regs_set
);
2090 subrtx_iterator::array_type array
;
2091 FOR_EACH_SUBRTX (iter
, array
, *loc
, NONCONST
)
2093 const_rtx x
= *iter
;
2094 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
)
2096 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2097 fprintf (dump_file
, " -- replacing the MEM failed due to address "
2103 if (validate_change (read_insn
->insn
, loc
, read_reg
, 0))
2105 deferred_change
*change
= deferred_change_pool
.allocate ();
2107 /* Insert this right before the store insn where it will be safe
2108 from later insns that might change it before the read. */
2109 emit_insn_before (insns
, store_insn
->insn
);
2111 /* And now for the kludge part: cselib croaks if you just
2112 return at this point. There are two reasons for this:
2114 1) Cselib has an idea of how many pseudos there are and
2115 that does not include the new ones we just added.
2117 2) Cselib does not know about the move insn we added
2118 above the store_info, and there is no way to tell it
2119 about it, because it has "moved on".
2121 Problem (1) is fixable with a certain amount of engineering.
2122 Problem (2) is requires starting the bb from scratch. This
2125 So we are just going to have to lie. The move/extraction
2126 insns are not really an issue, cselib did not see them. But
2127 the use of the new pseudo read_insn is a real problem because
2128 cselib has not scanned this insn. The way that we solve this
2129 problem is that we are just going to put the mem back for now
2130 and when we are finished with the block, we undo this. We
2131 keep a table of mems to get rid of. At the end of the basic
2132 block we can put them back. */
2134 *loc
= read_info
->mem
;
2135 change
->next
= deferred_change_list
;
2136 deferred_change_list
= change
;
2138 change
->reg
= read_reg
;
2140 /* Get rid of the read_info, from the point of view of the
2141 rest of dse, play like this read never happened. */
2142 read_insn
->read_rec
= read_info
->next
;
2143 read_info_type_pool
.remove (read_info
);
2144 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2146 fprintf (dump_file
, " -- replaced the loaded MEM with ");
2147 print_simple_rtl (dump_file
, read_reg
);
2148 fprintf (dump_file
, "\n");
2154 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2156 fprintf (dump_file
, " -- replacing the loaded MEM with ");
2157 print_simple_rtl (dump_file
, read_reg
);
2158 fprintf (dump_file
, " led to an invalid instruction\n");
2164 /* Check the address of MEM *LOC and kill any appropriate stores that may
2168 check_mem_read_rtx (rtx
*loc
, bb_info_t bb_info
, bool used_in_call
= false)
2170 rtx mem
= *loc
, mem_addr
;
2171 insn_info_t insn_info
;
2172 poly_int64 offset
= 0;
2173 poly_int64 width
= 0;
2174 cselib_val
*base
= NULL
;
2176 read_info_t read_info
;
2178 insn_info
= bb_info
->last_insn
;
2180 if ((MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2181 || MEM_VOLATILE_P (mem
))
2183 if (crtl
->stack_protect_guard
2184 && (MEM_EXPR (mem
) == crtl
->stack_protect_guard
2185 || (crtl
->stack_protect_guard_decl
2186 && MEM_EXPR (mem
) == crtl
->stack_protect_guard_decl
))
2187 && MEM_VOLATILE_P (mem
))
2189 /* This is either the stack protector canary on the stack,
2190 which ought to be written by a MEM_VOLATILE_P store and
2191 thus shouldn't be deleted and is read at the very end of
2192 function, but shouldn't conflict with any other store.
2193 Or it is __stack_chk_guard variable or TLS or whatever else
2194 MEM holding the canary value, which really shouldn't be
2195 ever modified in -fstack-protector* protected functions,
2196 otherwise the prologue store wouldn't match the epilogue
2198 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2199 fprintf (dump_file
, " stack protector canary read ignored.\n");
2200 insn_info
->cannot_delete
= true;
2204 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2205 fprintf (dump_file
, " adding wild read, volatile or barrier.\n");
2206 add_wild_read (bb_info
);
2207 insn_info
->cannot_delete
= true;
2211 /* If it is reading readonly mem, then there can be no conflict with
2213 if (MEM_READONLY_P (mem
))
2216 if (!canon_address (mem
, &group_id
, &offset
, &base
))
2218 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2219 fprintf (dump_file
, " adding wild read, canon_address failure.\n");
2220 add_wild_read (bb_info
);
2224 if (GET_MODE (mem
) == BLKmode
)
2227 width
= GET_MODE_SIZE (GET_MODE (mem
));
2229 if (!endpoint_representable_p (offset
, known_eq (width
, -1) ? 1 : width
))
2231 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2232 fprintf (dump_file
, " adding wild read, due to overflow.\n");
2233 add_wild_read (bb_info
);
2237 read_info
= read_info_type_pool
.allocate ();
2238 read_info
->group_id
= group_id
;
2239 read_info
->mem
= mem
;
2240 read_info
->offset
= offset
;
2241 read_info
->width
= width
;
2242 read_info
->next
= insn_info
->read_rec
;
2243 insn_info
->read_rec
= read_info
;
2245 mem_addr
= base
->val_rtx
;
2248 group_info
*group
= rtx_group_vec
[group_id
];
2249 mem_addr
= group
->canon_base_addr
;
2251 if (maybe_ne (offset
, 0))
2252 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
2253 /* Avoid passing VALUE RTXen as mem_addr to canon_true_dependence
2254 which will over and over re-create proper RTL and re-apply the
2255 offset above. See PR80960 where we almost allocate 1.6GB of PLUS
2257 mem_addr
= get_addr (mem_addr
);
2261 /* This is the restricted case where the base is a constant or
2262 the frame pointer and offset is a constant. */
2263 insn_info_t i_ptr
= active_local_stores
;
2264 insn_info_t last
= NULL
;
2266 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2268 if (!known_size_p (width
))
2269 fprintf (dump_file
, " processing const load gid=%d[BLK]\n",
2273 fprintf (dump_file
, " processing const load gid=%d", group_id
);
2274 print_range (dump_file
, offset
, width
);
2275 fprintf (dump_file
, "\n");
2281 bool remove
= false;
2282 store_info
*store_info
= i_ptr
->store_rec
;
2284 /* Skip the clobbers. */
2285 while (!store_info
->is_set
)
2286 store_info
= store_info
->next
;
2288 /* There are three cases here. */
2289 if (store_info
->group_id
< 0)
2290 /* We have a cselib store followed by a read from a
2293 = canon_true_dependence (store_info
->mem
,
2294 GET_MODE (store_info
->mem
),
2295 store_info
->mem_addr
,
2298 else if (group_id
== store_info
->group_id
)
2300 /* This is a block mode load. We may get lucky and
2301 canon_true_dependence may save the day. */
2302 if (!known_size_p (width
))
2304 = canon_true_dependence (store_info
->mem
,
2305 GET_MODE (store_info
->mem
),
2306 store_info
->mem_addr
,
2309 /* If this read is just reading back something that we just
2310 stored, rewrite the read. */
2315 && known_subrange_p (offset
, width
, store_info
->offset
,
2317 && all_positions_needed_p (store_info
,
2318 offset
- store_info
->offset
,
2320 && replace_read (store_info
, i_ptr
, read_info
,
2324 /* The bases are the same, just see if the offsets
2326 if (ranges_maybe_overlap_p (offset
, width
,
2334 The else case that is missing here is that the
2335 bases are constant but different. There is nothing
2336 to do here because there is no overlap. */
2340 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2341 dump_insn_info ("removing from active", i_ptr
);
2343 active_local_stores_len
--;
2345 last
->next_local_store
= i_ptr
->next_local_store
;
2347 active_local_stores
= i_ptr
->next_local_store
;
2351 i_ptr
= i_ptr
->next_local_store
;
2356 insn_info_t i_ptr
= active_local_stores
;
2357 insn_info_t last
= NULL
;
2358 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2360 fprintf (dump_file
, " processing cselib load mem:");
2361 print_inline_rtx (dump_file
, mem
, 0);
2362 fprintf (dump_file
, "\n");
2367 bool remove
= false;
2368 store_info
*store_info
= i_ptr
->store_rec
;
2370 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2371 fprintf (dump_file
, " processing cselib load against insn %d\n",
2372 INSN_UID (i_ptr
->insn
));
2374 /* Skip the clobbers. */
2375 while (!store_info
->is_set
)
2376 store_info
= store_info
->next
;
2378 /* If this read is just reading back something that we just
2379 stored, rewrite the read. */
2382 && store_info
->group_id
== -1
2383 && store_info
->cse_base
== base
2384 && known_subrange_p (offset
, width
, store_info
->offset
,
2386 && all_positions_needed_p (store_info
,
2387 offset
- store_info
->offset
, width
)
2388 && replace_read (store_info
, i_ptr
, read_info
, insn_info
, loc
))
2391 remove
= canon_true_dependence (store_info
->mem
,
2392 GET_MODE (store_info
->mem
),
2393 store_info
->mem_addr
,
2398 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2399 dump_insn_info ("removing from active", i_ptr
);
2401 active_local_stores_len
--;
2403 last
->next_local_store
= i_ptr
->next_local_store
;
2405 active_local_stores
= i_ptr
->next_local_store
;
2409 i_ptr
= i_ptr
->next_local_store
;
2414 /* A note_uses callback in which DATA points the INSN_INFO for
2415 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2416 true for any part of *LOC. */
2419 check_mem_read_use (rtx
*loc
, void *data
)
2421 subrtx_ptr_iterator::array_type array
;
2422 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
2426 check_mem_read_rtx (loc
, (bb_info_t
) data
);
2431 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2432 So far it only handles arguments passed in registers. */
2435 get_call_args (rtx call_insn
, tree fn
, rtx
*args
, int nargs
)
2437 CUMULATIVE_ARGS args_so_far_v
;
2438 cumulative_args_t args_so_far
;
2442 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
2443 args_so_far
= pack_cumulative_args (&args_so_far_v
);
2445 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
2447 arg
!= void_list_node
&& idx
< nargs
;
2448 arg
= TREE_CHAIN (arg
), idx
++)
2450 scalar_int_mode mode
;
2453 if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg
)), &mode
))
2456 function_arg_info
arg (mode
, /*named=*/true);
2457 reg
= targetm
.calls
.function_arg (args_so_far
, arg
);
2458 if (!reg
|| !REG_P (reg
) || GET_MODE (reg
) != mode
)
2461 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
2463 link
= XEXP (link
, 1))
2464 if (GET_CODE (XEXP (link
, 0)) == USE
)
2466 scalar_int_mode arg_mode
;
2467 args
[idx
] = XEXP (XEXP (link
, 0), 0);
2468 if (REG_P (args
[idx
])
2469 && REGNO (args
[idx
]) == REGNO (reg
)
2470 && (GET_MODE (args
[idx
]) == mode
2471 || (is_int_mode (GET_MODE (args
[idx
]), &arg_mode
)
2472 && (GET_MODE_SIZE (arg_mode
) <= UNITS_PER_WORD
)
2473 && (GET_MODE_SIZE (arg_mode
) > GET_MODE_SIZE (mode
)))))
2479 tmp
= cselib_expand_value_rtx (args
[idx
], scratch
, 5);
2480 if (GET_MODE (args
[idx
]) != mode
)
2482 if (!tmp
|| !CONST_INT_P (tmp
))
2484 tmp
= gen_int_mode (INTVAL (tmp
), mode
);
2489 targetm
.calls
.function_arg_advance (args_so_far
, arg
);
2491 if (arg
!= void_list_node
|| idx
!= nargs
)
2496 /* Return a bitmap of the fixed registers contained in IN. */
2499 copy_fixed_regs (const_bitmap in
)
2503 ret
= ALLOC_REG_SET (NULL
);
2504 bitmap_and (ret
, in
, bitmap_view
<HARD_REG_SET
> (fixed_reg_set
));
2508 /* Apply record_store to all candidate stores in INSN. Mark INSN
2509 if some part of it is not a candidate store and assigns to a
2510 non-register target. */
2513 scan_insn (bb_info_t bb_info
, rtx_insn
*insn
, int max_active_local_stores
)
2516 insn_info_type
*insn_info
= insn_info_type_pool
.allocate ();
2518 memset (insn_info
, 0, sizeof (struct insn_info_type
));
2520 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2521 fprintf (dump_file
, "\n**scanning insn=%d\n",
2524 insn_info
->prev_insn
= bb_info
->last_insn
;
2525 insn_info
->insn
= insn
;
2526 bb_info
->last_insn
= insn_info
;
2528 if (DEBUG_INSN_P (insn
))
2530 insn_info
->cannot_delete
= true;
2534 /* Look at all of the uses in the insn. */
2535 note_uses (&PATTERN (insn
), check_mem_read_use
, bb_info
);
2541 tree memset_call
= NULL_TREE
;
2543 insn_info
->cannot_delete
= true;
2545 /* Const functions cannot do anything bad i.e. read memory,
2546 however, they can read their parameters which may have
2547 been pushed onto the stack.
2548 memset and bzero don't read memory either. */
2549 const_call
= RTL_CONST_CALL_P (insn
);
2551 && (call
= get_call_rtx_from (insn
))
2552 && (sym
= XEXP (XEXP (call
, 0), 0))
2553 && GET_CODE (sym
) == SYMBOL_REF
2554 && SYMBOL_REF_DECL (sym
)
2555 && TREE_CODE (SYMBOL_REF_DECL (sym
)) == FUNCTION_DECL
2556 && fndecl_built_in_p (SYMBOL_REF_DECL (sym
), BUILT_IN_MEMSET
))
2557 memset_call
= SYMBOL_REF_DECL (sym
);
2559 if (const_call
|| memset_call
)
2561 insn_info_t i_ptr
= active_local_stores
;
2562 insn_info_t last
= NULL
;
2564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2565 fprintf (dump_file
, "%s call %d\n",
2566 const_call
? "const" : "memset", INSN_UID (insn
));
2568 /* See the head comment of the frame_read field. */
2569 if (reload_completed
2570 /* Tail calls are storing their arguments using
2571 arg pointer. If it is a frame pointer on the target,
2572 even before reload we need to kill frame pointer based
2574 || (SIBLING_CALL_P (insn
)
2575 && HARD_FRAME_POINTER_IS_ARG_POINTER
))
2576 insn_info
->frame_read
= true;
2578 /* Loop over the active stores and remove those which are
2579 killed by the const function call. */
2582 bool remove_store
= false;
2584 /* The stack pointer based stores are always killed. */
2585 if (i_ptr
->stack_pointer_based
)
2586 remove_store
= true;
2588 /* If the frame is read, the frame related stores are killed. */
2589 else if (insn_info
->frame_read
)
2591 store_info
*store_info
= i_ptr
->store_rec
;
2593 /* Skip the clobbers. */
2594 while (!store_info
->is_set
)
2595 store_info
= store_info
->next
;
2597 if (store_info
->group_id
>= 0
2598 && rtx_group_vec
[store_info
->group_id
]->frame_related
)
2599 remove_store
= true;
2604 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2605 dump_insn_info ("removing from active", i_ptr
);
2607 active_local_stores_len
--;
2609 last
->next_local_store
= i_ptr
->next_local_store
;
2611 active_local_stores
= i_ptr
->next_local_store
;
2616 i_ptr
= i_ptr
->next_local_store
;
2622 if (get_call_args (insn
, memset_call
, args
, 3)
2623 && CONST_INT_P (args
[1])
2624 && CONST_INT_P (args
[2])
2625 && INTVAL (args
[2]) > 0)
2627 rtx mem
= gen_rtx_MEM (BLKmode
, args
[0]);
2628 set_mem_size (mem
, INTVAL (args
[2]));
2629 body
= gen_rtx_SET (mem
, args
[1]);
2630 mems_found
+= record_store (body
, bb_info
);
2631 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2632 fprintf (dump_file
, "handling memset as BLKmode store\n");
2633 if (mems_found
== 1)
2635 if (active_local_stores_len
++ >= max_active_local_stores
)
2637 active_local_stores_len
= 1;
2638 active_local_stores
= NULL
;
2640 insn_info
->fixed_regs_live
2641 = copy_fixed_regs (bb_info
->regs_live
);
2642 insn_info
->next_local_store
= active_local_stores
;
2643 active_local_stores
= insn_info
;
2647 clear_rhs_from_active_local_stores ();
2650 else if (SIBLING_CALL_P (insn
)
2651 && (reload_completed
|| HARD_FRAME_POINTER_IS_ARG_POINTER
))
2652 /* Arguments for a sibling call that are pushed to memory are passed
2653 using the incoming argument pointer of the current function. After
2654 reload that might be (and likely is) frame pointer based. And, if
2655 it is a frame pointer on the target, even before reload we need to
2656 kill frame pointer based stores. */
2657 add_wild_read (bb_info
);
2659 /* Every other call, including pure functions, may read any memory
2660 that is not relative to the frame. */
2661 add_non_frame_wild_read (bb_info
);
2663 for (rtx link
= CALL_INSN_FUNCTION_USAGE (insn
);
2665 link
= XEXP (link
, 1))
2666 if (GET_CODE (XEXP (link
, 0)) == USE
&& MEM_P (XEXP (XEXP (link
, 0),0)))
2667 check_mem_read_rtx (&XEXP (XEXP (link
, 0),0), bb_info
, true);
2672 /* Assuming that there are sets in these insns, we cannot delete
2674 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
)
2675 || volatile_refs_p (PATTERN (insn
))
2676 || (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
2677 || (RTX_FRAME_RELATED_P (insn
))
2678 || find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
))
2679 insn_info
->cannot_delete
= true;
2681 body
= PATTERN (insn
);
2682 if (GET_CODE (body
) == PARALLEL
)
2685 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
2686 mems_found
+= record_store (XVECEXP (body
, 0, i
), bb_info
);
2689 mems_found
+= record_store (body
, bb_info
);
2691 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2692 fprintf (dump_file
, "mems_found = %d, cannot_delete = %s\n",
2693 mems_found
, insn_info
->cannot_delete
? "true" : "false");
2695 /* If we found some sets of mems, add it into the active_local_stores so
2696 that it can be locally deleted if found dead or used for
2697 replace_read and redundant constant store elimination. Otherwise mark
2698 it as cannot delete. This simplifies the processing later. */
2699 if (mems_found
== 1)
2701 if (active_local_stores_len
++ >= max_active_local_stores
)
2703 active_local_stores_len
= 1;
2704 active_local_stores
= NULL
;
2706 insn_info
->fixed_regs_live
= copy_fixed_regs (bb_info
->regs_live
);
2707 insn_info
->next_local_store
= active_local_stores
;
2708 active_local_stores
= insn_info
;
2711 insn_info
->cannot_delete
= true;
2715 /* Remove BASE from the set of active_local_stores. This is a
2716 callback from cselib that is used to get rid of the stores in
2717 active_local_stores. */
2720 remove_useless_values (cselib_val
*base
)
2722 insn_info_t insn_info
= active_local_stores
;
2723 insn_info_t last
= NULL
;
2727 store_info
*store_info
= insn_info
->store_rec
;
2730 /* If ANY of the store_infos match the cselib group that is
2731 being deleted, then the insn cannot be deleted. */
2734 if ((store_info
->group_id
== -1)
2735 && (store_info
->cse_base
== base
))
2740 store_info
= store_info
->next
;
2745 active_local_stores_len
--;
2747 last
->next_local_store
= insn_info
->next_local_store
;
2749 active_local_stores
= insn_info
->next_local_store
;
2750 free_store_info (insn_info
);
2755 insn_info
= insn_info
->next_local_store
;
2760 /* Do all of step 1. */
2766 bitmap regs_live
= BITMAP_ALLOC (®_obstack
);
2769 all_blocks
= BITMAP_ALLOC (NULL
);
2770 bitmap_set_bit (all_blocks
, ENTRY_BLOCK
);
2771 bitmap_set_bit (all_blocks
, EXIT_BLOCK
);
2773 /* For -O1 reduce the maximum number of active local stores for RTL DSE
2774 since this can consume huge amounts of memory (PR89115). */
2775 int max_active_local_stores
= param_max_dse_active_local_stores
;
2777 max_active_local_stores
/= 10;
2779 FOR_ALL_BB_FN (bb
, cfun
)
2782 bb_info_t bb_info
= dse_bb_info_type_pool
.allocate ();
2784 memset (bb_info
, 0, sizeof (dse_bb_info_type
));
2785 bitmap_set_bit (all_blocks
, bb
->index
);
2786 bb_info
->regs_live
= regs_live
;
2788 bitmap_copy (regs_live
, DF_LR_IN (bb
));
2789 df_simulate_initialize_forwards (bb
, regs_live
);
2791 bb_table
[bb
->index
] = bb_info
;
2792 cselib_discard_hook
= remove_useless_values
;
2794 if (bb
->index
>= NUM_FIXED_BLOCKS
)
2798 active_local_stores
= NULL
;
2799 active_local_stores_len
= 0;
2800 cselib_clear_table ();
2802 /* Scan the insns. */
2803 FOR_BB_INSNS (bb
, insn
)
2806 scan_insn (bb_info
, insn
, max_active_local_stores
);
2807 cselib_process_insn (insn
);
2809 df_simulate_one_insn_forwards (bb
, insn
, regs_live
);
2812 /* This is something of a hack, because the global algorithm
2813 is supposed to take care of the case where stores go dead
2814 at the end of the function. However, the global
2815 algorithm must take a more conservative view of block
2816 mode reads than the local alg does. So to get the case
2817 where you have a store to the frame followed by a non
2818 overlapping block more read, we look at the active local
2819 stores at the end of the function and delete all of the
2820 frame and spill based ones. */
2821 if (stores_off_frame_dead_at_return
2822 && (EDGE_COUNT (bb
->succs
) == 0
2823 || (single_succ_p (bb
)
2824 && single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
)
2825 && ! crtl
->calls_eh_return
)))
2827 insn_info_t i_ptr
= active_local_stores
;
2830 store_info
*store_info
= i_ptr
->store_rec
;
2832 /* Skip the clobbers. */
2833 while (!store_info
->is_set
)
2834 store_info
= store_info
->next
;
2835 if (store_info
->group_id
>= 0)
2837 group_info
*group
= rtx_group_vec
[store_info
->group_id
];
2838 if (group
->frame_related
&& !i_ptr
->cannot_delete
)
2839 delete_dead_store_insn (i_ptr
);
2842 i_ptr
= i_ptr
->next_local_store
;
2846 /* Get rid of the loads that were discovered in
2847 replace_read. Cselib is finished with this block. */
2848 while (deferred_change_list
)
2850 deferred_change
*next
= deferred_change_list
->next
;
2852 /* There is no reason to validate this change. That was
2854 *deferred_change_list
->loc
= deferred_change_list
->reg
;
2855 deferred_change_pool
.remove (deferred_change_list
);
2856 deferred_change_list
= next
;
2859 /* Get rid of all of the cselib based store_infos in this
2860 block and mark the containing insns as not being
2862 ptr
= bb_info
->last_insn
;
2865 if (ptr
->contains_cselib_groups
)
2867 store_info
*s_info
= ptr
->store_rec
;
2868 while (s_info
&& !s_info
->is_set
)
2869 s_info
= s_info
->next
;
2871 && s_info
->redundant_reason
2872 && s_info
->redundant_reason
->insn
2873 && !ptr
->cannot_delete
)
2875 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2876 fprintf (dump_file
, "Locally deleting insn %d "
2877 "because insn %d stores the "
2878 "same value and couldn't be "
2880 INSN_UID (ptr
->insn
),
2881 INSN_UID (s_info
->redundant_reason
->insn
));
2882 delete_dead_store_insn (ptr
);
2884 free_store_info (ptr
);
2890 /* Free at least positions_needed bitmaps. */
2891 for (s_info
= ptr
->store_rec
; s_info
; s_info
= s_info
->next
)
2892 if (s_info
->is_large
)
2894 BITMAP_FREE (s_info
->positions_needed
.large
.bmap
);
2895 s_info
->is_large
= false;
2898 ptr
= ptr
->prev_insn
;
2901 cse_store_info_pool
.release ();
2903 bb_info
->regs_live
= NULL
;
2906 BITMAP_FREE (regs_live
);
2908 rtx_group_table
->empty ();
2912 /*----------------------------------------------------------------------------
2915 Assign each byte position in the stores that we are going to
2916 analyze globally to a position in the bitmaps. Returns true if
2917 there are any bit positions assigned.
2918 ----------------------------------------------------------------------------*/
2921 dse_step2_init (void)
2926 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2928 /* For all non stack related bases, we only consider a store to
2929 be deletable if there are two or more stores for that
2930 position. This is because it takes one store to make the
2931 other store redundant. However, for the stores that are
2932 stack related, we consider them if there is only one store
2933 for the position. We do this because the stack related
2934 stores can be deleted if their is no read between them and
2935 the end of the function.
2937 To make this work in the current framework, we take the stack
2938 related bases add all of the bits from store1 into store2.
2939 This has the effect of making the eligible even if there is
2942 if (stores_off_frame_dead_at_return
&& group
->frame_related
)
2944 bitmap_ior_into (group
->store2_n
, group
->store1_n
);
2945 bitmap_ior_into (group
->store2_p
, group
->store1_p
);
2946 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2947 fprintf (dump_file
, "group %d is frame related ", i
);
2950 group
->offset_map_size_n
++;
2951 group
->offset_map_n
= XOBNEWVEC (&dse_obstack
, int,
2952 group
->offset_map_size_n
);
2953 group
->offset_map_size_p
++;
2954 group
->offset_map_p
= XOBNEWVEC (&dse_obstack
, int,
2955 group
->offset_map_size_p
);
2956 group
->process_globally
= false;
2957 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2959 fprintf (dump_file
, "group %d(%d+%d): ", i
,
2960 (int)bitmap_count_bits (group
->store2_n
),
2961 (int)bitmap_count_bits (group
->store2_p
));
2962 bitmap_print (dump_file
, group
->store2_n
, "n ", " ");
2963 bitmap_print (dump_file
, group
->store2_p
, "p ", "\n");
2969 /* Init the offset tables. */
2976 /* Position 0 is unused because 0 is used in the maps to mean
2978 current_position
= 1;
2979 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2984 memset (group
->offset_map_n
, 0, sizeof (int) * group
->offset_map_size_n
);
2985 memset (group
->offset_map_p
, 0, sizeof (int) * group
->offset_map_size_p
);
2986 bitmap_clear (group
->group_kill
);
2988 EXECUTE_IF_SET_IN_BITMAP (group
->store2_n
, 0, j
, bi
)
2990 bitmap_set_bit (group
->group_kill
, current_position
);
2991 if (bitmap_bit_p (group
->escaped_n
, j
))
2992 bitmap_set_bit (kill_on_calls
, current_position
);
2993 group
->offset_map_n
[j
] = current_position
++;
2994 group
->process_globally
= true;
2996 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
2998 bitmap_set_bit (group
->group_kill
, current_position
);
2999 if (bitmap_bit_p (group
->escaped_p
, j
))
3000 bitmap_set_bit (kill_on_calls
, current_position
);
3001 group
->offset_map_p
[j
] = current_position
++;
3002 group
->process_globally
= true;
3005 return current_position
!= 1;
3010 /*----------------------------------------------------------------------------
3013 Build the bit vectors for the transfer functions.
3014 ----------------------------------------------------------------------------*/
3017 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3021 get_bitmap_index (group_info
*group_info
, HOST_WIDE_INT offset
)
3025 HOST_WIDE_INT offset_p
= -offset
;
3026 if (offset_p
>= group_info
->offset_map_size_n
)
3028 return group_info
->offset_map_n
[offset_p
];
3032 if (offset
>= group_info
->offset_map_size_p
)
3034 return group_info
->offset_map_p
[offset
];
3039 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3043 scan_stores (store_info
*store_info
, bitmap gen
, bitmap kill
)
3047 HOST_WIDE_INT i
, offset
, width
;
3048 group_info
*group_info
3049 = rtx_group_vec
[store_info
->group_id
];
3050 /* We can (conservatively) ignore stores whose bounds aren't known;
3051 they simply don't generate new global dse opportunities. */
3052 if (group_info
->process_globally
3053 && store_info
->offset
.is_constant (&offset
)
3054 && store_info
->width
.is_constant (&width
))
3056 HOST_WIDE_INT end
= offset
+ width
;
3057 for (i
= offset
; i
< end
; i
++)
3059 int index
= get_bitmap_index (group_info
, i
);
3062 bitmap_set_bit (gen
, index
);
3064 bitmap_clear_bit (kill
, index
);
3068 store_info
= store_info
->next
;
3073 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3077 scan_reads (insn_info_t insn_info
, bitmap gen
, bitmap kill
)
3079 read_info_t read_info
= insn_info
->read_rec
;
3083 /* If this insn reads the frame, kill all the frame related stores. */
3084 if (insn_info
->frame_read
)
3086 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
3087 if (group
->process_globally
&& group
->frame_related
)
3090 bitmap_ior_into (kill
, group
->group_kill
);
3091 bitmap_and_compl_into (gen
, group
->group_kill
);
3094 if (insn_info
->non_frame_wild_read
)
3096 /* Kill all non-frame related stores. Kill all stores of variables that
3099 bitmap_ior_into (kill
, kill_on_calls
);
3100 bitmap_and_compl_into (gen
, kill_on_calls
);
3101 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
3102 if (group
->process_globally
&& !group
->frame_related
)
3105 bitmap_ior_into (kill
, group
->group_kill
);
3106 bitmap_and_compl_into (gen
, group
->group_kill
);
3111 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
3113 if (group
->process_globally
)
3115 if (i
== read_info
->group_id
)
3117 HOST_WIDE_INT offset
, width
;
3118 /* Reads with non-constant size kill all DSE opportunities
3120 if (!read_info
->offset
.is_constant (&offset
)
3121 || !read_info
->width
.is_constant (&width
)
3122 || !known_size_p (width
))
3124 /* Handle block mode reads. */
3126 bitmap_ior_into (kill
, group
->group_kill
);
3127 bitmap_and_compl_into (gen
, group
->group_kill
);
3131 /* The groups are the same, just process the
3134 HOST_WIDE_INT end
= offset
+ width
;
3135 for (j
= offset
; j
< end
; j
++)
3137 int index
= get_bitmap_index (group
, j
);
3141 bitmap_set_bit (kill
, index
);
3142 bitmap_clear_bit (gen
, index
);
3149 /* The groups are different, if the alias sets
3150 conflict, clear the entire group. We only need
3151 to apply this test if the read_info is a cselib
3152 read. Anything with a constant base cannot alias
3153 something else with a different constant
3155 if ((read_info
->group_id
< 0)
3156 && canon_true_dependence (group
->base_mem
,
3157 GET_MODE (group
->base_mem
),
3158 group
->canon_base_addr
,
3159 read_info
->mem
, NULL_RTX
))
3162 bitmap_ior_into (kill
, group
->group_kill
);
3163 bitmap_and_compl_into (gen
, group
->group_kill
);
3169 read_info
= read_info
->next
;
3174 /* Return the insn in BB_INFO before the first wild read or if there
3175 are no wild reads in the block, return the last insn. */
3178 find_insn_before_first_wild_read (bb_info_t bb_info
)
3180 insn_info_t insn_info
= bb_info
->last_insn
;
3181 insn_info_t last_wild_read
= NULL
;
3185 if (insn_info
->wild_read
)
3187 last_wild_read
= insn_info
->prev_insn
;
3188 /* Block starts with wild read. */
3189 if (!last_wild_read
)
3193 insn_info
= insn_info
->prev_insn
;
3197 return last_wild_read
;
3199 return bb_info
->last_insn
;
3203 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3204 the block in order to build the gen and kill sets for the block.
3205 We start at ptr which may be the last insn in the block or may be
3206 the first insn with a wild read. In the latter case we are able to
3207 skip the rest of the block because it just does not matter:
3208 anything that happens is hidden by the wild read. */
3211 dse_step3_scan (basic_block bb
)
3213 bb_info_t bb_info
= bb_table
[bb
->index
];
3214 insn_info_t insn_info
;
3216 insn_info
= find_insn_before_first_wild_read (bb_info
);
3218 /* In the spill case or in the no_spill case if there is no wild
3219 read in the block, we will need a kill set. */
3220 if (insn_info
== bb_info
->last_insn
)
3223 bitmap_clear (bb_info
->kill
);
3225 bb_info
->kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3229 BITMAP_FREE (bb_info
->kill
);
3233 /* There may have been code deleted by the dce pass run before
3235 if (insn_info
->insn
&& INSN_P (insn_info
->insn
))
3237 scan_stores (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3238 scan_reads (insn_info
, bb_info
->gen
, bb_info
->kill
);
3241 insn_info
= insn_info
->prev_insn
;
3246 /* Set the gen set of the exit block, and also any block with no
3247 successors that does not have a wild read. */
3250 dse_step3_exit_block_scan (bb_info_t bb_info
)
3252 /* The gen set is all 0's for the exit block except for the
3253 frame_pointer_group. */
3255 if (stores_off_frame_dead_at_return
)
3260 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
3262 if (group
->process_globally
&& group
->frame_related
)
3263 bitmap_ior_into (bb_info
->gen
, group
->group_kill
);
3269 /* Find all of the blocks that are not backwards reachable from the
3270 exit block or any block with no successors (BB). These are the
3271 infinite loops or infinite self loops. These blocks will still
3272 have their bits set in UNREACHABLE_BLOCKS. */
3275 mark_reachable_blocks (sbitmap unreachable_blocks
, basic_block bb
)
3280 if (bitmap_bit_p (unreachable_blocks
, bb
->index
))
3282 bitmap_clear_bit (unreachable_blocks
, bb
->index
);
3283 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3285 mark_reachable_blocks (unreachable_blocks
, e
->src
);
3290 /* Build the transfer functions for the function. */
3296 sbitmap_iterator sbi
;
3297 bitmap all_ones
= NULL
;
3300 auto_sbitmap
unreachable_blocks (last_basic_block_for_fn (cfun
));
3301 bitmap_ones (unreachable_blocks
);
3303 FOR_ALL_BB_FN (bb
, cfun
)
3305 bb_info_t bb_info
= bb_table
[bb
->index
];
3307 bitmap_clear (bb_info
->gen
);
3309 bb_info
->gen
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3311 if (bb
->index
== ENTRY_BLOCK
)
3313 else if (bb
->index
== EXIT_BLOCK
)
3314 dse_step3_exit_block_scan (bb_info
);
3316 dse_step3_scan (bb
);
3317 if (EDGE_COUNT (bb
->succs
) == 0)
3318 mark_reachable_blocks (unreachable_blocks
, bb
);
3320 /* If this is the second time dataflow is run, delete the old
3323 BITMAP_FREE (bb_info
->in
);
3325 BITMAP_FREE (bb_info
->out
);
3328 /* For any block in an infinite loop, we must initialize the out set
3329 to all ones. This could be expensive, but almost never occurs in
3330 practice. However, it is common in regression tests. */
3331 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks
, 0, i
, sbi
)
3333 if (bitmap_bit_p (all_blocks
, i
))
3335 bb_info_t bb_info
= bb_table
[i
];
3341 all_ones
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3342 FOR_EACH_VEC_ELT (rtx_group_vec
, j
, group
)
3343 bitmap_ior_into (all_ones
, group
->group_kill
);
3347 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3348 bitmap_copy (bb_info
->out
, all_ones
);
3354 BITMAP_FREE (all_ones
);
3359 /*----------------------------------------------------------------------------
3362 Solve the bitvector equations.
3363 ----------------------------------------------------------------------------*/
3366 /* Confluence function for blocks with no successors. Create an out
3367 set from the gen set of the exit block. This block logically has
3368 the exit block as a successor. */
3373 dse_confluence_0 (basic_block bb
)
3375 bb_info_t bb_info
= bb_table
[bb
->index
];
3377 if (bb
->index
== EXIT_BLOCK
)
3382 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3383 bitmap_copy (bb_info
->out
, bb_table
[EXIT_BLOCK
]->gen
);
3387 /* Propagate the information from the in set of the dest of E to the
3388 out set of the src of E. If the various in or out sets are not
3389 there, that means they are all ones. */
3392 dse_confluence_n (edge e
)
3394 bb_info_t src_info
= bb_table
[e
->src
->index
];
3395 bb_info_t dest_info
= bb_table
[e
->dest
->index
];
3400 bitmap_and_into (src_info
->out
, dest_info
->in
);
3403 src_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3404 bitmap_copy (src_info
->out
, dest_info
->in
);
3411 /* Propagate the info from the out to the in set of BB_INDEX's basic
3412 block. There are three cases:
3414 1) The block has no kill set. In this case the kill set is all
3415 ones. It does not matter what the out set of the block is, none of
3416 the info can reach the top. The only thing that reaches the top is
3417 the gen set and we just copy the set.
3419 2) There is a kill set but no out set and bb has successors. In
3420 this case we just return. Eventually an out set will be created and
3421 it is better to wait than to create a set of ones.
3423 3) There is both a kill and out set. We apply the obvious transfer
3428 dse_transfer_function (int bb_index
)
3430 bb_info_t bb_info
= bb_table
[bb_index
];
3438 return bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3439 bb_info
->out
, bb_info
->kill
);
3442 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3443 bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3444 bb_info
->out
, bb_info
->kill
);
3454 /* Case 1 above. If there is already an in set, nothing
3460 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3461 bitmap_copy (bb_info
->in
, bb_info
->gen
);
3467 /* Solve the dataflow equations. */
3472 df_simple_dataflow (DF_BACKWARD
, NULL
, dse_confluence_0
,
3473 dse_confluence_n
, dse_transfer_function
,
3474 all_blocks
, df_get_postorder (DF_BACKWARD
),
3475 df_get_n_blocks (DF_BACKWARD
));
3476 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3480 fprintf (dump_file
, "\n\n*** Global dataflow info after analysis.\n");
3481 FOR_ALL_BB_FN (bb
, cfun
)
3483 bb_info_t bb_info
= bb_table
[bb
->index
];
3485 df_print_bb_index (bb
, dump_file
);
3487 bitmap_print (dump_file
, bb_info
->in
, " in: ", "\n");
3489 fprintf (dump_file
, " in: *MISSING*\n");
3491 bitmap_print (dump_file
, bb_info
->gen
, " gen: ", "\n");
3493 fprintf (dump_file
, " gen: *MISSING*\n");
3495 bitmap_print (dump_file
, bb_info
->kill
, " kill: ", "\n");
3497 fprintf (dump_file
, " kill: *MISSING*\n");
3499 bitmap_print (dump_file
, bb_info
->out
, " out: ", "\n");
3501 fprintf (dump_file
, " out: *MISSING*\n\n");
3508 /*----------------------------------------------------------------------------
3511 Delete the stores that can only be deleted using the global information.
3512 ----------------------------------------------------------------------------*/
3519 FOR_EACH_BB_FN (bb
, cfun
)
3521 bb_info_t bb_info
= bb_table
[bb
->index
];
3522 insn_info_t insn_info
= bb_info
->last_insn
;
3523 bitmap v
= bb_info
->out
;
3527 bool deleted
= false;
3528 if (dump_file
&& insn_info
->insn
)
3530 fprintf (dump_file
, "starting to process insn %d\n",
3531 INSN_UID (insn_info
->insn
));
3532 bitmap_print (dump_file
, v
, " v: ", "\n");
3535 /* There may have been code deleted by the dce pass run before
3538 && INSN_P (insn_info
->insn
)
3539 && (!insn_info
->cannot_delete
)
3540 && (!bitmap_empty_p (v
)))
3542 store_info
*store_info
= insn_info
->store_rec
;
3544 /* Try to delete the current insn. */
3547 /* Skip the clobbers. */
3548 while (!store_info
->is_set
)
3549 store_info
= store_info
->next
;
3551 HOST_WIDE_INT i
, offset
, width
;
3552 group_info
*group_info
= rtx_group_vec
[store_info
->group_id
];
3554 if (!store_info
->offset
.is_constant (&offset
)
3555 || !store_info
->width
.is_constant (&width
))
3559 HOST_WIDE_INT end
= offset
+ width
;
3560 for (i
= offset
; i
< end
; i
++)
3562 int index
= get_bitmap_index (group_info
, i
);
3564 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3565 fprintf (dump_file
, "i = %d, index = %d\n",
3567 if (index
== 0 || !bitmap_bit_p (v
, index
))
3569 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3570 fprintf (dump_file
, "failing at i = %d\n",
3580 && check_for_inc_dec_1 (insn_info
))
3582 delete_insn (insn_info
->insn
);
3583 insn_info
->insn
= NULL
;
3588 /* We do want to process the local info if the insn was
3589 deleted. For instance, if the insn did a wild read, we
3590 no longer need to trash the info. */
3592 && INSN_P (insn_info
->insn
)
3595 scan_stores (insn_info
->store_rec
, v
, NULL
);
3596 if (insn_info
->wild_read
)
3598 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3599 fprintf (dump_file
, "wild read\n");
3602 else if (insn_info
->read_rec
3603 || insn_info
->non_frame_wild_read
3604 || insn_info
->frame_read
)
3606 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3608 if (!insn_info
->non_frame_wild_read
3609 && !insn_info
->frame_read
)
3610 fprintf (dump_file
, "regular read\n");
3611 if (insn_info
->non_frame_wild_read
)
3612 fprintf (dump_file
, "non-frame wild read\n");
3613 if (insn_info
->frame_read
)
3614 fprintf (dump_file
, "frame read\n");
3616 scan_reads (insn_info
, v
, NULL
);
3620 insn_info
= insn_info
->prev_insn
;
3627 /*----------------------------------------------------------------------------
3630 Delete stores made redundant by earlier stores (which store the same
3631 value) that couldn't be eliminated.
3632 ----------------------------------------------------------------------------*/
3639 FOR_ALL_BB_FN (bb
, cfun
)
3641 bb_info_t bb_info
= bb_table
[bb
->index
];
3642 insn_info_t insn_info
= bb_info
->last_insn
;
3646 /* There may have been code deleted by the dce pass run before
3649 && INSN_P (insn_info
->insn
)
3650 && !insn_info
->cannot_delete
)
3652 store_info
*s_info
= insn_info
->store_rec
;
3654 while (s_info
&& !s_info
->is_set
)
3655 s_info
= s_info
->next
;
3657 && s_info
->redundant_reason
3658 && s_info
->redundant_reason
->insn
3659 && INSN_P (s_info
->redundant_reason
->insn
))
3661 rtx_insn
*rinsn
= s_info
->redundant_reason
->insn
;
3662 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3663 fprintf (dump_file
, "Locally deleting insn %d "
3664 "because insn %d stores the "
3665 "same value and couldn't be "
3667 INSN_UID (insn_info
->insn
),
3669 delete_dead_store_insn (insn_info
);
3672 insn_info
= insn_info
->prev_insn
;
3677 /*----------------------------------------------------------------------------
3680 Destroy everything left standing.
3681 ----------------------------------------------------------------------------*/
3686 bitmap_obstack_release (&dse_bitmap_obstack
);
3687 obstack_free (&dse_obstack
, NULL
);
3689 end_alias_analysis ();
3691 delete rtx_group_table
;
3692 rtx_group_table
= NULL
;
3693 rtx_group_vec
.release ();
3694 BITMAP_FREE (all_blocks
);
3695 BITMAP_FREE (scratch
);
3697 rtx_store_info_pool
.release ();
3698 read_info_type_pool
.release ();
3699 insn_info_type_pool
.release ();
3700 dse_bb_info_type_pool
.release ();
3701 group_info_pool
.release ();
3702 deferred_change_pool
.release ();
3706 /* -------------------------------------------------------------------------
3708 ------------------------------------------------------------------------- */
3710 /* Callback for running pass_rtl_dse. */
3713 rest_of_handle_dse (void)
3715 df_set_flags (DF_DEFER_INSN_RESCAN
);
3717 /* Need the notes since we must track live hardregs in the forwards
3719 df_note_add_problem ();
3724 /* DSE can eliminate potentially-trapping MEMs.
3725 Remove any EH edges associated with them, since otherwise
3726 DF_LR_RUN_DCE will complain later. */
3727 if ((locally_deleted
|| globally_deleted
)
3728 && cfun
->can_throw_non_call_exceptions
3729 && purge_all_dead_edges ())
3731 free_dominance_info (CDI_DOMINATORS
);
3732 delete_unreachable_blocks ();
3737 df_set_flags (DF_LR_RUN_DCE
);
3739 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3740 fprintf (dump_file
, "doing global processing\n");
3750 fprintf (dump_file
, "dse: local deletions = %d, global deletions = %d\n",
3751 locally_deleted
, globally_deleted
);
3753 /* DSE can eliminate potentially-trapping MEMs.
3754 Remove any EH edges associated with them. */
3755 if ((locally_deleted
|| globally_deleted
)
3756 && cfun
->can_throw_non_call_exceptions
3757 && purge_all_dead_edges ())
3759 free_dominance_info (CDI_DOMINATORS
);
3768 const pass_data pass_data_rtl_dse1
=
3770 RTL_PASS
, /* type */
3772 OPTGROUP_NONE
, /* optinfo_flags */
3773 TV_DSE1
, /* tv_id */
3774 0, /* properties_required */
3775 0, /* properties_provided */
3776 0, /* properties_destroyed */
3777 0, /* todo_flags_start */
3778 TODO_df_finish
, /* todo_flags_finish */
3781 class pass_rtl_dse1
: public rtl_opt_pass
3784 pass_rtl_dse1 (gcc::context
*ctxt
)
3785 : rtl_opt_pass (pass_data_rtl_dse1
, ctxt
)
3788 /* opt_pass methods: */
3789 bool gate (function
*) final override
3791 return optimize
> 0 && flag_dse
&& dbg_cnt (dse1
);
3794 unsigned int execute (function
*) final override
3796 return rest_of_handle_dse ();
3799 }; // class pass_rtl_dse1
3804 make_pass_rtl_dse1 (gcc::context
*ctxt
)
3806 return new pass_rtl_dse1 (ctxt
);
3811 const pass_data pass_data_rtl_dse2
=
3813 RTL_PASS
, /* type */
3815 OPTGROUP_NONE
, /* optinfo_flags */
3816 TV_DSE2
, /* tv_id */
3817 0, /* properties_required */
3818 0, /* properties_provided */
3819 0, /* properties_destroyed */
3820 0, /* todo_flags_start */
3821 TODO_df_finish
, /* todo_flags_finish */
3824 class pass_rtl_dse2
: public rtl_opt_pass
3827 pass_rtl_dse2 (gcc::context
*ctxt
)
3828 : rtl_opt_pass (pass_data_rtl_dse2
, ctxt
)
3831 /* opt_pass methods: */
3832 bool gate (function
*) final override
3834 return optimize
> 0 && flag_dse
&& dbg_cnt (dse2
);
3837 unsigned int execute (function
*) final override
3839 return rest_of_handle_dse ();
3842 }; // class pass_rtl_dse2
3847 make_pass_rtl_dse2 (gcc::context
*ctxt
)
3849 return new pass_rtl_dse2 (ctxt
);