1 /* RTL dead store elimination.
2 Copyright (C) 2005-2019 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
27 #include "coretypes.h"
37 #include "gimple-ssa.h"
43 #include "stor-layout.h"
46 #include "tree-pass.h"
52 #include "cfgcleanup.h"
55 /* This file contains three techniques for performing Dead Store
58 * The first technique performs dse locally on any base address. It
59 is based on the cselib which is a local value numbering technique.
60 This technique is local to a basic block but deals with a fairly
63 * The second technique performs dse globally but is restricted to
64 base addresses that are either constant or are relative to the
67 * The third technique, (which is only done after register allocation)
68 processes the spill slots. This differs from the second
69 technique because it takes advantage of the fact that spilling is
70 completely free from the effects of aliasing.
72 Logically, dse is a backwards dataflow problem. A store can be
73 deleted if it if cannot be reached in the backward direction by any
74 use of the value being stored. However, the local technique uses a
75 forwards scan of the basic block because cselib requires that the
76 block be processed in that order.
78 The pass is logically broken into 7 steps:
82 1) The local algorithm, as well as scanning the insns for the two
85 2) Analysis to see if the global algs are necessary. In the case
86 of stores base on a constant address, there must be at least two
87 stores to that address, to make it possible to delete some of the
88 stores. In the case of stores off of the frame or spill related
89 stores, only one store to an address is necessary because those
90 stores die at the end of the function.
92 3) Set up the global dataflow equations based on processing the
93 info parsed in the first step.
95 4) Solve the dataflow equations.
97 5) Delete the insns that the global analysis has indicated are
100 6) Delete insns that store the same value as preceding store
101 where the earlier store couldn't be eliminated.
105 This step uses cselib and canon_rtx to build the largest expression
106 possible for each address. This pass is a forwards pass through
107 each basic block. From the point of view of the global technique,
108 the first pass could examine a block in either direction. The
109 forwards ordering is to accommodate cselib.
111 We make a simplifying assumption: addresses fall into four broad
114 1) base has rtx_varies_p == false, offset is constant.
115 2) base has rtx_varies_p == false, offset variable.
116 3) base has rtx_varies_p == true, offset constant.
117 4) base has rtx_varies_p == true, offset variable.
119 The local passes are able to process all 4 kinds of addresses. The
120 global pass only handles 1).
122 The global problem is formulated as follows:
124 A store, S1, to address A, where A is not relative to the stack
125 frame, can be eliminated if all paths from S1 to the end of the
126 function contain another store to A before a read to A.
128 If the address A is relative to the stack frame, a store S2 to A
129 can be eliminated if there are no paths from S2 that reach the
130 end of the function that read A before another store to A. In
131 this case S2 can be deleted if there are paths from S2 to the
132 end of the function that have no reads or writes to A. This
133 second case allows stores to the stack frame to be deleted that
134 would otherwise die when the function returns. This cannot be
135 done if stores_off_frame_dead_at_return is not true. See the doc
136 for that variable for when this variable is false.
138 The global problem is formulated as a backwards set union
139 dataflow problem where the stores are the gens and reads are the
140 kills. Set union problems are rare and require some special
141 handling given our representation of bitmaps. A straightforward
142 implementation requires a lot of bitmaps filled with 1s.
143 These are expensive and cumbersome in our bitmap formulation so
144 care has been taken to avoid large vectors filled with 1s. See
145 the comments in bb_info and in the dataflow confluence functions
148 There are two places for further enhancements to this algorithm:
150 1) The original dse which was embedded in a pass called flow also
151 did local address forwarding. For example in
156 flow would replace the right hand side of the second insn with a
157 reference to r100. Most of the information is available to add this
158 to this pass. It has not done it because it is a lot of work in
159 the case that either r100 is assigned to between the first and
160 second insn and/or the second insn is a load of part of the value
161 stored by the first insn.
163 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
164 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
165 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
166 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
168 2) The cleaning up of spill code is quite profitable. It currently
169 depends on reading tea leaves and chicken entrails left by reload.
170 This pass depends on reload creating a singleton alias set for each
171 spill slot and telling the next dse pass which of these alias sets
172 are the singletons. Rather than analyze the addresses of the
173 spills, dse's spill processing just does analysis of the loads and
174 stores that use those alias sets. There are three cases where this
177 a) Reload sometimes creates the slot for one mode of access, and
178 then inserts loads and/or stores for a smaller mode. In this
179 case, the current code just punts on the slot. The proper thing
180 to do is to back out and use one bit vector position for each
181 byte of the entity associated with the slot. This depends on
182 KNOWING that reload always generates the accesses for each of the
183 bytes in some canonical (read that easy to understand several
184 passes after reload happens) way.
186 b) Reload sometimes decides that spill slot it allocated was not
187 large enough for the mode and goes back and allocates more slots
188 with the same mode and alias set. The backout in this case is a
189 little more graceful than (a). In this case the slot is unmarked
190 as being a spill slot and if final address comes out to be based
191 off the frame pointer, the global algorithm handles this slot.
193 c) For any pass that may prespill, there is currently no
194 mechanism to tell the dse pass that the slot being used has the
195 special properties that reload uses. It may be that all that is
196 required is to have those passes make the same calls that reload
197 does, assuming that the alias sets can be manipulated in the same
200 /* There are limits to the size of constant offsets we model for the
201 global problem. There are certainly test cases, that exceed this
202 limit, however, it is unlikely that there are important programs
203 that really have constant offsets this size. */
204 #define MAX_OFFSET (64 * 1024)
206 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
207 on the default obstack because these bitmaps can grow quite large
208 (~2GB for the small (!) test case of PR54146) and we'll hold on to
209 all that memory until the end of the compiler run.
210 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
211 releasing the whole obstack. */
212 static bitmap_obstack dse_bitmap_obstack
;
214 /* Obstack for other data. As for above: Kinda nice to be able to
215 throw it all away at the end in one big sweep. */
216 static struct obstack dse_obstack
;
218 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
219 static bitmap scratch
= NULL
;
221 struct insn_info_type
;
223 /* This structure holds information about a candidate store. */
228 /* False means this is a clobber. */
231 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
234 /* The id of the mem group of the base address. If rtx_varies_p is
235 true, this is -1. Otherwise, it is the index into the group
239 /* This is the cselib value. */
240 cselib_val
*cse_base
;
242 /* This canonized mem. */
245 /* Canonized MEM address for use by canon_true_dependence. */
248 /* The offset of the first byte associated with the operation. */
251 /* The number of bytes covered by the operation. This is always exact
252 and known (rather than -1). */
257 /* A bitmask as wide as the number of bytes in the word that
258 contains a 1 if the byte may be needed. The store is unused if
259 all of the bits are 0. This is used if IS_LARGE is false. */
260 unsigned HOST_WIDE_INT small_bitmask
;
264 /* A bitmap with one bit per byte, or null if the number of
265 bytes isn't known at compile time. A cleared bit means
266 the position is needed. Used if IS_LARGE is true. */
269 /* When BITMAP is nonnull, this counts the number of set bits
270 (i.e. unneeded bytes) in the bitmap. If it is equal to
271 WIDTH, the whole store is unused.
274 - the store is definitely not needed when COUNT == 1
275 - all the store is needed when COUNT == 0 and RHS is nonnull
276 - otherwise we don't know which parts of the store are needed. */
281 /* The next store info for this insn. */
282 class store_info
*next
;
284 /* The right hand side of the store. This is used if there is a
285 subsequent reload of the mems address somewhere later in the
289 /* If rhs is or holds a constant, this contains that constant,
293 /* Set if this store stores the same constant value as REDUNDANT_REASON
294 insn stored. These aren't eliminated early, because doing that
295 might prevent the earlier larger store to be eliminated. */
296 struct insn_info_type
*redundant_reason
;
299 /* Return a bitmask with the first N low bits set. */
301 static unsigned HOST_WIDE_INT
302 lowpart_bitmask (int n
)
304 unsigned HOST_WIDE_INT mask
= HOST_WIDE_INT_M1U
;
305 return mask
>> (HOST_BITS_PER_WIDE_INT
- n
);
308 static object_allocator
<store_info
> cse_store_info_pool ("cse_store_info_pool");
310 static object_allocator
<store_info
> rtx_store_info_pool ("rtx_store_info_pool");
312 /* This structure holds information about a load. These are only
313 built for rtx bases. */
317 /* The id of the mem group of the base address. */
320 /* The offset of the first byte associated with the operation. */
323 /* The number of bytes covered by the operation, or -1 if not known. */
326 /* The mem being read. */
329 /* The next read_info for this insn. */
330 class read_info_type
*next
;
332 typedef class read_info_type
*read_info_t
;
334 static object_allocator
<read_info_type
> read_info_type_pool ("read_info_pool");
336 /* One of these records is created for each insn. */
338 struct insn_info_type
340 /* Set true if the insn contains a store but the insn itself cannot
341 be deleted. This is set if the insn is a parallel and there is
342 more than one non dead output or if the insn is in some way
346 /* This field is only used by the global algorithm. It is set true
347 if the insn contains any read of mem except for a (1). This is
348 also set if the insn is a call or has a clobber mem. If the insn
349 contains a wild read, the use_rec will be null. */
352 /* This is true only for CALL instructions which could potentially read
353 any non-frame memory location. This field is used by the global
355 bool non_frame_wild_read
;
357 /* This field is only used for the processing of const functions.
358 These functions cannot read memory, but they can read the stack
359 because that is where they may get their parms. We need to be
360 this conservative because, like the store motion pass, we don't
361 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
362 Moreover, we need to distinguish two cases:
363 1. Before reload (register elimination), the stores related to
364 outgoing arguments are stack pointer based and thus deemed
365 of non-constant base in this pass. This requires special
366 handling but also means that the frame pointer based stores
367 need not be killed upon encountering a const function call.
368 2. After reload, the stores related to outgoing arguments can be
369 either stack pointer or hard frame pointer based. This means
370 that we have no other choice than also killing all the frame
371 pointer based stores upon encountering a const function call.
372 This field is set after reload for const function calls and before
373 reload for const tail function calls on targets where arg pointer
374 is the frame pointer. Having this set is less severe than a wild
375 read, it just means that all the frame related stores are killed
376 rather than all the stores. */
379 /* This field is only used for the processing of const functions.
380 It is set if the insn may contain a stack pointer based store. */
381 bool stack_pointer_based
;
383 /* This is true if any of the sets within the store contains a
384 cselib base. Such stores can only be deleted by the local
386 bool contains_cselib_groups
;
391 /* The list of mem sets or mem clobbers that are contained in this
392 insn. If the insn is deletable, it contains only one mem set.
393 But it could also contain clobbers. Insns that contain more than
394 one mem set are not deletable, but each of those mems are here in
395 order to provide info to delete other insns. */
396 store_info
*store_rec
;
398 /* The linked list of mem uses in this insn. Only the reads from
399 rtx bases are listed here. The reads to cselib bases are
400 completely processed during the first scan and so are never
402 read_info_t read_rec
;
404 /* The live fixed registers. We assume only fixed registers can
405 cause trouble by being clobbered from an expanded pattern;
406 storing only the live fixed registers (rather than all registers)
407 means less memory needs to be allocated / copied for the individual
409 regset fixed_regs_live
;
411 /* The prev insn in the basic block. */
412 struct insn_info_type
* prev_insn
;
414 /* The linked list of insns that are in consideration for removal in
415 the forwards pass through the basic block. This pointer may be
416 trash as it is not cleared when a wild read occurs. The only
417 time it is guaranteed to be correct is when the traversal starts
418 at active_local_stores. */
419 struct insn_info_type
* next_local_store
;
421 typedef struct insn_info_type
*insn_info_t
;
423 static object_allocator
<insn_info_type
> insn_info_type_pool ("insn_info_pool");
425 /* The linked list of stores that are under consideration in this
427 static insn_info_t active_local_stores
;
428 static int active_local_stores_len
;
430 struct dse_bb_info_type
432 /* Pointer to the insn info for the last insn in the block. These
433 are linked so this is how all of the insns are reached. During
434 scanning this is the current insn being scanned. */
435 insn_info_t last_insn
;
437 /* The info for the global dataflow problem. */
440 /* This is set if the transfer function should and in the wild_read
441 bitmap before applying the kill and gen sets. That vector knocks
442 out most of the bits in the bitmap and thus speeds up the
444 bool apply_wild_read
;
446 /* The following 4 bitvectors hold information about which positions
447 of which stores are live or dead. They are indexed by
450 /* The set of store positions that exist in this block before a wild read. */
453 /* The set of load positions that exist in this block above the
454 same position of a store. */
457 /* The set of stores that reach the top of the block without being
460 Do not represent the in if it is all ones. Note that this is
461 what the bitvector should logically be initialized to for a set
462 intersection problem. However, like the kill set, this is too
463 expensive. So initially, the in set will only be created for the
464 exit block and any block that contains a wild read. */
467 /* The set of stores that reach the bottom of the block from it's
470 Do not represent the in if it is all ones. Note that this is
471 what the bitvector should logically be initialized to for a set
472 intersection problem. However, like the kill and in set, this is
473 too expensive. So what is done is that the confluence operator
474 just initializes the vector from one of the out sets of the
475 successors of the block. */
478 /* The following bitvector is indexed by the reg number. It
479 contains the set of regs that are live at the current instruction
480 being processed. While it contains info for all of the
481 registers, only the hard registers are actually examined. It is used
482 to assure that shift and/or add sequences that are inserted do not
483 accidentally clobber live hard regs. */
487 typedef struct dse_bb_info_type
*bb_info_t
;
489 static object_allocator
<dse_bb_info_type
> dse_bb_info_type_pool
492 /* Table to hold all bb_infos. */
493 static bb_info_t
*bb_table
;
495 /* There is a group_info for each rtx base that is used to reference
496 memory. There are also not many of the rtx bases because they are
497 very limited in scope. */
501 /* The actual base of the address. */
504 /* The sequential id of the base. This allows us to have a
505 canonical ordering of these that is not based on addresses. */
508 /* True if there are any positions that are to be processed
510 bool process_globally
;
512 /* True if the base of this group is either the frame_pointer or
513 hard_frame_pointer. */
516 /* A mem wrapped around the base pointer for the group in order to do
517 read dependency. It must be given BLKmode in order to encompass all
518 the possible offsets from the base. */
521 /* Canonized version of base_mem's address. */
524 /* These two sets of two bitmaps are used to keep track of how many
525 stores are actually referencing that position from this base. We
526 only do this for rtx bases as this will be used to assign
527 positions in the bitmaps for the global problem. Bit N is set in
528 store1 on the first store for offset N. Bit N is set in store2
529 for the second store to offset N. This is all we need since we
530 only care about offsets that have two or more stores for them.
532 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
533 for 0 and greater offsets.
535 There is one special case here, for stores into the stack frame,
536 we will or store1 into store2 before deciding which stores look
537 at globally. This is because stores to the stack frame that have
538 no other reads before the end of the function can also be
540 bitmap store1_n
, store1_p
, store2_n
, store2_p
;
542 /* These bitmaps keep track of offsets in this group escape this function.
543 An offset escapes if it corresponds to a named variable whose
544 addressable flag is set. */
545 bitmap escaped_n
, escaped_p
;
547 /* The positions in this bitmap have the same assignments as the in,
548 out, gen and kill bitmaps. This bitmap is all zeros except for
549 the positions that are occupied by stores for this group. */
552 /* The offset_map is used to map the offsets from this base into
553 positions in the global bitmaps. It is only created after all of
554 the all of stores have been scanned and we know which ones we
556 int *offset_map_n
, *offset_map_p
;
557 int offset_map_size_n
, offset_map_size_p
;
560 static object_allocator
<group_info
> group_info_pool ("rtx_group_info_pool");
562 /* Index into the rtx_group_vec. */
563 static int rtx_group_next_id
;
566 static vec
<group_info
*> rtx_group_vec
;
569 /* This structure holds the set of changes that are being deferred
570 when removing read operation. See replace_read. */
571 struct deferred_change
574 /* The mem that is being replaced. */
577 /* The reg it is being replaced with. */
580 struct deferred_change
*next
;
583 static object_allocator
<deferred_change
> deferred_change_pool
584 ("deferred_change_pool");
586 static deferred_change
*deferred_change_list
= NULL
;
588 /* This is true except if cfun->stdarg -- i.e. we cannot do
589 this for vararg functions because they play games with the frame. */
590 static bool stores_off_frame_dead_at_return
;
592 /* Counter for stats. */
593 static int globally_deleted
;
594 static int locally_deleted
;
596 static bitmap all_blocks
;
598 /* Locations that are killed by calls in the global phase. */
599 static bitmap kill_on_calls
;
601 /* The number of bits used in the global bitmaps. */
602 static unsigned int current_position
;
604 /* Print offset range [OFFSET, OFFSET + WIDTH) to FILE. */
607 print_range (FILE *file
, poly_int64 offset
, poly_int64 width
)
610 print_dec (offset
, file
, SIGNED
);
611 fprintf (file
, "..");
612 print_dec (offset
+ width
, file
, SIGNED
);
616 /*----------------------------------------------------------------------------
620 ----------------------------------------------------------------------------*/
623 /* Hashtable callbacks for maintaining the "bases" field of
624 store_group_info, given that the addresses are function invariants. */
626 struct invariant_group_base_hasher
: nofree_ptr_hash
<group_info
>
628 static inline hashval_t
hash (const group_info
*);
629 static inline bool equal (const group_info
*, const group_info
*);
633 invariant_group_base_hasher::equal (const group_info
*gi1
,
634 const group_info
*gi2
)
636 return rtx_equal_p (gi1
->rtx_base
, gi2
->rtx_base
);
640 invariant_group_base_hasher::hash (const group_info
*gi
)
643 return hash_rtx (gi
->rtx_base
, Pmode
, &do_not_record
, NULL
, false);
646 /* Tables of group_info structures, hashed by base value. */
647 static hash_table
<invariant_group_base_hasher
> *rtx_group_table
;
650 /* Get the GROUP for BASE. Add a new group if it is not there. */
653 get_group_info (rtx base
)
655 struct group_info tmp_gi
;
659 gcc_assert (base
!= NULL_RTX
);
661 /* Find the store_base_info structure for BASE, creating a new one
663 tmp_gi
.rtx_base
= base
;
664 slot
= rtx_group_table
->find_slot (&tmp_gi
, INSERT
);
669 *slot
= gi
= group_info_pool
.allocate ();
671 gi
->id
= rtx_group_next_id
++;
672 gi
->base_mem
= gen_rtx_MEM (BLKmode
, base
);
673 gi
->canon_base_addr
= canon_rtx (base
);
674 gi
->store1_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
675 gi
->store1_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
676 gi
->store2_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
677 gi
->store2_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
678 gi
->escaped_p
= BITMAP_ALLOC (&dse_bitmap_obstack
);
679 gi
->escaped_n
= BITMAP_ALLOC (&dse_bitmap_obstack
);
680 gi
->group_kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
681 gi
->process_globally
= false;
683 (base
== frame_pointer_rtx
) || (base
== hard_frame_pointer_rtx
);
684 gi
->offset_map_size_n
= 0;
685 gi
->offset_map_size_p
= 0;
686 gi
->offset_map_n
= NULL
;
687 gi
->offset_map_p
= NULL
;
688 rtx_group_vec
.safe_push (gi
);
695 /* Initialization of data structures. */
701 globally_deleted
= 0;
703 bitmap_obstack_initialize (&dse_bitmap_obstack
);
704 gcc_obstack_init (&dse_obstack
);
706 scratch
= BITMAP_ALLOC (®_obstack
);
707 kill_on_calls
= BITMAP_ALLOC (&dse_bitmap_obstack
);
710 rtx_group_table
= new hash_table
<invariant_group_base_hasher
> (11);
712 bb_table
= XNEWVEC (bb_info_t
, last_basic_block_for_fn (cfun
));
713 rtx_group_next_id
= 0;
715 stores_off_frame_dead_at_return
= !cfun
->stdarg
;
717 init_alias_analysis ();
722 /*----------------------------------------------------------------------------
725 Scan all of the insns. Any random ordering of the blocks is fine.
726 Each block is scanned in forward order to accommodate cselib which
727 is used to remove stores with non-constant bases.
728 ----------------------------------------------------------------------------*/
730 /* Delete all of the store_info recs from INSN_INFO. */
733 free_store_info (insn_info_t insn_info
)
735 store_info
*cur
= insn_info
->store_rec
;
738 store_info
*next
= cur
->next
;
740 BITMAP_FREE (cur
->positions_needed
.large
.bmap
);
742 cse_store_info_pool
.remove (cur
);
744 rtx_store_info_pool
.remove (cur
);
748 insn_info
->cannot_delete
= true;
749 insn_info
->contains_cselib_groups
= false;
750 insn_info
->store_rec
= NULL
;
753 struct note_add_store_info
755 rtx_insn
*first
, *current
;
756 regset fixed_regs_live
;
760 /* Callback for emit_inc_dec_insn_before via note_stores.
761 Check if a register is clobbered which is live afterwards. */
764 note_add_store (rtx loc
, const_rtx expr ATTRIBUTE_UNUSED
, void *data
)
767 note_add_store_info
*info
= (note_add_store_info
*) data
;
772 /* If this register is referenced by the current or an earlier insn,
773 that's OK. E.g. this applies to the register that is being incremented
774 with this addition. */
775 for (insn
= info
->first
;
776 insn
!= NEXT_INSN (info
->current
);
777 insn
= NEXT_INSN (insn
))
778 if (reg_referenced_p (loc
, PATTERN (insn
)))
781 /* If we come here, we have a clobber of a register that's only OK
782 if that register is not live. If we don't have liveness information
783 available, fail now. */
784 if (!info
->fixed_regs_live
)
786 info
->failure
= true;
789 /* Now check if this is a live fixed register. */
790 unsigned int end_regno
= END_REGNO (loc
);
791 for (unsigned int regno
= REGNO (loc
); regno
< end_regno
; ++regno
)
792 if (REGNO_REG_SET_P (info
->fixed_regs_live
, regno
))
793 info
->failure
= true;
796 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
797 SRC + SRCOFF before insn ARG. */
800 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED
,
801 rtx op ATTRIBUTE_UNUSED
,
802 rtx dest
, rtx src
, rtx srcoff
, void *arg
)
804 insn_info_t insn_info
= (insn_info_t
) arg
;
805 rtx_insn
*insn
= insn_info
->insn
, *new_insn
, *cur
;
806 note_add_store_info info
;
808 /* We can reuse all operands without copying, because we are about
809 to delete the insn that contained it. */
813 emit_insn (gen_add3_insn (dest
, src
, srcoff
));
814 new_insn
= get_insns ();
818 new_insn
= gen_move_insn (dest
, src
);
819 info
.first
= new_insn
;
820 info
.fixed_regs_live
= insn_info
->fixed_regs_live
;
821 info
.failure
= false;
822 for (cur
= new_insn
; cur
; cur
= NEXT_INSN (cur
))
825 note_stores (cur
, note_add_store
, &info
);
828 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
829 return it immediately, communicating the failure to its caller. */
833 emit_insn_before (new_insn
, insn
);
838 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
839 is there, is split into a separate insn.
840 Return true on success (or if there was nothing to do), false on failure. */
843 check_for_inc_dec_1 (insn_info_t insn_info
)
845 rtx_insn
*insn
= insn_info
->insn
;
846 rtx note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
848 return for_each_inc_dec (PATTERN (insn
), emit_inc_dec_insn_before
,
854 /* Entry point for postreload. If you work on reload_cse, or you need this
855 anywhere else, consider if you can provide register liveness information
856 and add a parameter to this function so that it can be passed down in
857 insn_info.fixed_regs_live. */
859 check_for_inc_dec (rtx_insn
*insn
)
861 insn_info_type insn_info
;
864 insn_info
.insn
= insn
;
865 insn_info
.fixed_regs_live
= NULL
;
866 note
= find_reg_note (insn
, REG_INC
, NULL_RTX
);
868 return for_each_inc_dec (PATTERN (insn
), emit_inc_dec_insn_before
,
873 /* Delete the insn and free all of the fields inside INSN_INFO. */
876 delete_dead_store_insn (insn_info_t insn_info
)
878 read_info_t read_info
;
883 if (!check_for_inc_dec_1 (insn_info
))
885 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
886 fprintf (dump_file
, "Locally deleting insn %d\n",
887 INSN_UID (insn_info
->insn
));
889 free_store_info (insn_info
);
890 read_info
= insn_info
->read_rec
;
894 read_info_t next
= read_info
->next
;
895 read_info_type_pool
.remove (read_info
);
898 insn_info
->read_rec
= NULL
;
900 delete_insn (insn_info
->insn
);
902 insn_info
->insn
= NULL
;
904 insn_info
->wild_read
= false;
907 /* Return whether DECL, a local variable, can possibly escape the current
911 local_variable_can_escape (tree decl
)
913 if (TREE_ADDRESSABLE (decl
))
916 /* If this is a partitioned variable, we need to consider all the variables
917 in the partition. This is necessary because a store into one of them can
918 be replaced with a store into another and this may not change the outcome
919 of the escape analysis. */
920 if (cfun
->gimple_df
->decls_to_pointers
!= NULL
)
922 tree
*namep
= cfun
->gimple_df
->decls_to_pointers
->get (decl
);
924 return TREE_ADDRESSABLE (*namep
);
930 /* Return whether EXPR can possibly escape the current function scope. */
933 can_escape (tree expr
)
938 base
= get_base_address (expr
);
940 && !may_be_aliased (base
)
942 && !DECL_EXTERNAL (base
)
943 && !TREE_STATIC (base
)
944 && local_variable_can_escape (base
)))
949 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
953 set_usage_bits (group_info
*group
, poly_int64 offset
, poly_int64 width
,
956 /* Non-constant offsets and widths act as global kills, so there's no point
957 trying to use them to derive global DSE candidates. */
958 HOST_WIDE_INT i
, const_offset
, const_width
;
959 bool expr_escapes
= can_escape (expr
);
960 if (offset
.is_constant (&const_offset
)
961 && width
.is_constant (&const_width
)
962 && const_offset
> -MAX_OFFSET
963 && const_offset
+ const_width
< MAX_OFFSET
)
964 for (i
= const_offset
; i
< const_offset
+ const_width
; ++i
)
972 store1
= group
->store1_n
;
973 store2
= group
->store2_n
;
974 escaped
= group
->escaped_n
;
979 store1
= group
->store1_p
;
980 store2
= group
->store2_p
;
981 escaped
= group
->escaped_p
;
985 if (!bitmap_set_bit (store1
, ai
))
986 bitmap_set_bit (store2
, ai
);
991 if (group
->offset_map_size_n
< ai
)
992 group
->offset_map_size_n
= ai
;
996 if (group
->offset_map_size_p
< ai
)
997 group
->offset_map_size_p
= ai
;
1001 bitmap_set_bit (escaped
, ai
);
1006 reset_active_stores (void)
1008 active_local_stores
= NULL
;
1009 active_local_stores_len
= 0;
1012 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1015 free_read_records (bb_info_t bb_info
)
1017 insn_info_t insn_info
= bb_info
->last_insn
;
1018 read_info_t
*ptr
= &insn_info
->read_rec
;
1021 read_info_t next
= (*ptr
)->next
;
1022 read_info_type_pool
.remove (*ptr
);
1027 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1030 add_wild_read (bb_info_t bb_info
)
1032 insn_info_t insn_info
= bb_info
->last_insn
;
1033 insn_info
->wild_read
= true;
1034 free_read_records (bb_info
);
1035 reset_active_stores ();
1038 /* Set the BB_INFO so that the last insn is marked as a wild read of
1039 non-frame locations. */
1042 add_non_frame_wild_read (bb_info_t bb_info
)
1044 insn_info_t insn_info
= bb_info
->last_insn
;
1045 insn_info
->non_frame_wild_read
= true;
1046 free_read_records (bb_info
);
1047 reset_active_stores ();
1050 /* Return true if X is a constant or one of the registers that behave
1051 as a constant over the life of a function. This is equivalent to
1052 !rtx_varies_p for memory addresses. */
1055 const_or_frame_p (rtx x
)
1060 if (GET_CODE (x
) == REG
)
1062 /* Note that we have to test for the actual rtx used for the frame
1063 and arg pointers and not just the register number in case we have
1064 eliminated the frame and/or arg pointer and are using it
1066 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
1067 /* The arg pointer varies if it is not a fixed register. */
1068 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
1069 || x
== pic_offset_table_rtx
)
1077 /* Take all reasonable action to put the address of MEM into the form
1078 that we can do analysis on.
1080 The gold standard is to get the address into the form: address +
1081 OFFSET where address is something that rtx_varies_p considers a
1082 constant. When we can get the address in this form, we can do
1083 global analysis on it. Note that for constant bases, address is
1084 not actually returned, only the group_id. The address can be
1087 If that fails, we try cselib to get a value we can at least use
1088 locally. If that fails we return false.
1090 The GROUP_ID is set to -1 for cselib bases and the index of the
1091 group for non_varying bases.
1093 FOR_READ is true if this is a mem read and false if not. */
1096 canon_address (rtx mem
,
1101 machine_mode address_mode
= get_address_mode (mem
);
1102 rtx mem_address
= XEXP (mem
, 0);
1103 rtx expanded_address
, address
;
1106 cselib_lookup (mem_address
, address_mode
, 1, GET_MODE (mem
));
1108 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1110 fprintf (dump_file
, " mem: ");
1111 print_inline_rtx (dump_file
, mem_address
, 0);
1112 fprintf (dump_file
, "\n");
1115 /* First see if just canon_rtx (mem_address) is const or frame,
1116 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1118 for (expanded
= 0; expanded
< 2; expanded
++)
1122 /* Use cselib to replace all of the reg references with the full
1123 expression. This will take care of the case where we have
1125 r_x = base + offset;
1130 val = *(base + offset); */
1132 expanded_address
= cselib_expand_value_rtx (mem_address
,
1135 /* If this fails, just go with the address from first
1137 if (!expanded_address
)
1141 expanded_address
= mem_address
;
1143 /* Split the address into canonical BASE + OFFSET terms. */
1144 address
= canon_rtx (expanded_address
);
1148 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1152 fprintf (dump_file
, "\n after cselib_expand address: ");
1153 print_inline_rtx (dump_file
, expanded_address
, 0);
1154 fprintf (dump_file
, "\n");
1157 fprintf (dump_file
, "\n after canon_rtx address: ");
1158 print_inline_rtx (dump_file
, address
, 0);
1159 fprintf (dump_file
, "\n");
1162 if (GET_CODE (address
) == CONST
)
1163 address
= XEXP (address
, 0);
1165 address
= strip_offset_and_add (address
, offset
);
1167 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem
))
1168 && const_or_frame_p (address
))
1170 group_info
*group
= get_group_info (address
);
1172 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1174 fprintf (dump_file
, " gid=%d offset=", group
->id
);
1175 print_dec (*offset
, dump_file
);
1176 fprintf (dump_file
, "\n");
1179 *group_id
= group
->id
;
1184 *base
= cselib_lookup (address
, address_mode
, true, GET_MODE (mem
));
1189 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1190 fprintf (dump_file
, " no cselib val - should be a wild read.\n");
1193 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1195 fprintf (dump_file
, " varying cselib base=%u:%u offset = ",
1196 (*base
)->uid
, (*base
)->hash
);
1197 print_dec (*offset
, dump_file
);
1198 fprintf (dump_file
, "\n");
1204 /* Clear the rhs field from the active_local_stores array. */
1207 clear_rhs_from_active_local_stores (void)
1209 insn_info_t ptr
= active_local_stores
;
1213 store_info
*store_info
= ptr
->store_rec
;
1214 /* Skip the clobbers. */
1215 while (!store_info
->is_set
)
1216 store_info
= store_info
->next
;
1218 store_info
->rhs
= NULL
;
1219 store_info
->const_rhs
= NULL
;
1221 ptr
= ptr
->next_local_store
;
1226 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1229 set_position_unneeded (store_info
*s_info
, int pos
)
1231 if (__builtin_expect (s_info
->is_large
, false))
1233 if (bitmap_set_bit (s_info
->positions_needed
.large
.bmap
, pos
))
1234 s_info
->positions_needed
.large
.count
++;
1237 s_info
->positions_needed
.small_bitmask
1238 &= ~(HOST_WIDE_INT_1U
<< pos
);
1241 /* Mark the whole store S_INFO as unneeded. */
1244 set_all_positions_unneeded (store_info
*s_info
)
1246 if (__builtin_expect (s_info
->is_large
, false))
1248 HOST_WIDE_INT width
;
1249 if (s_info
->width
.is_constant (&width
))
1251 bitmap_set_range (s_info
->positions_needed
.large
.bmap
, 0, width
);
1252 s_info
->positions_needed
.large
.count
= width
;
1256 gcc_checking_assert (!s_info
->positions_needed
.large
.bmap
);
1257 s_info
->positions_needed
.large
.count
= 1;
1261 s_info
->positions_needed
.small_bitmask
= HOST_WIDE_INT_0U
;
1264 /* Return TRUE if any bytes from S_INFO store are needed. */
1267 any_positions_needed_p (store_info
*s_info
)
1269 if (__builtin_expect (s_info
->is_large
, false))
1271 HOST_WIDE_INT width
;
1272 if (s_info
->width
.is_constant (&width
))
1274 gcc_checking_assert (s_info
->positions_needed
.large
.bmap
);
1275 return s_info
->positions_needed
.large
.count
< width
;
1279 gcc_checking_assert (!s_info
->positions_needed
.large
.bmap
);
1280 return s_info
->positions_needed
.large
.count
== 0;
1284 return (s_info
->positions_needed
.small_bitmask
!= HOST_WIDE_INT_0U
);
1287 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1288 store are known to be needed. */
1291 all_positions_needed_p (store_info
*s_info
, poly_int64 start
,
1294 gcc_assert (s_info
->rhs
);
1295 if (!s_info
->width
.is_constant ())
1297 gcc_assert (s_info
->is_large
1298 && !s_info
->positions_needed
.large
.bmap
);
1299 return s_info
->positions_needed
.large
.count
== 0;
1302 /* Otherwise, if START and WIDTH are non-constant, we're asking about
1303 a non-constant region of a constant-sized store. We can't say for
1304 sure that all positions are needed. */
1305 HOST_WIDE_INT const_start
, const_width
;
1306 if (!start
.is_constant (&const_start
)
1307 || !width
.is_constant (&const_width
))
1310 if (__builtin_expect (s_info
->is_large
, false))
1312 for (HOST_WIDE_INT i
= const_start
; i
< const_start
+ const_width
; ++i
)
1313 if (bitmap_bit_p (s_info
->positions_needed
.large
.bmap
, i
))
1319 unsigned HOST_WIDE_INT mask
1320 = lowpart_bitmask (const_width
) << const_start
;
1321 return (s_info
->positions_needed
.small_bitmask
& mask
) == mask
;
1326 static rtx
get_stored_val (store_info
*, machine_mode
, poly_int64
,
1327 poly_int64
, basic_block
, bool);
1330 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1331 there is a candidate store, after adding it to the appropriate
1332 local store group if so. */
1335 record_store (rtx body
, bb_info_t bb_info
)
1337 rtx mem
, rhs
, const_rhs
, mem_addr
;
1338 poly_int64 offset
= 0;
1339 poly_int64 width
= 0;
1340 insn_info_t insn_info
= bb_info
->last_insn
;
1341 store_info
*store_info
= NULL
;
1343 cselib_val
*base
= NULL
;
1344 insn_info_t ptr
, last
, redundant_reason
;
1345 bool store_is_unused
;
1347 if (GET_CODE (body
) != SET
&& GET_CODE (body
) != CLOBBER
)
1350 mem
= SET_DEST (body
);
1352 /* If this is not used, then this cannot be used to keep the insn
1353 from being deleted. On the other hand, it does provide something
1354 that can be used to prove that another store is dead. */
1356 = (find_reg_note (insn_info
->insn
, REG_UNUSED
, mem
) != NULL
);
1358 /* Check whether that value is a suitable memory location. */
1361 /* If the set or clobber is unused, then it does not effect our
1362 ability to get rid of the entire insn. */
1363 if (!store_is_unused
)
1364 insn_info
->cannot_delete
= true;
1368 /* At this point we know mem is a mem. */
1369 if (GET_MODE (mem
) == BLKmode
)
1371 HOST_WIDE_INT const_size
;
1372 if (GET_CODE (XEXP (mem
, 0)) == SCRATCH
)
1374 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1375 fprintf (dump_file
, " adding wild read for (clobber (mem:BLK (scratch))\n");
1376 add_wild_read (bb_info
);
1377 insn_info
->cannot_delete
= true;
1380 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1381 as memset (addr, 0, 36); */
1382 else if (!MEM_SIZE_KNOWN_P (mem
)
1383 || maybe_le (MEM_SIZE (mem
), 0)
1384 /* This is a limit on the bitmap size, which is only relevant
1385 for constant-sized MEMs. */
1386 || (MEM_SIZE (mem
).is_constant (&const_size
)
1387 && const_size
> MAX_OFFSET
)
1388 || GET_CODE (body
) != SET
1389 || !CONST_INT_P (SET_SRC (body
)))
1391 if (!store_is_unused
)
1393 /* If the set or clobber is unused, then it does not effect our
1394 ability to get rid of the entire insn. */
1395 insn_info
->cannot_delete
= true;
1396 clear_rhs_from_active_local_stores ();
1402 /* We can still process a volatile mem, we just cannot delete it. */
1403 if (MEM_VOLATILE_P (mem
))
1404 insn_info
->cannot_delete
= true;
1406 if (!canon_address (mem
, &group_id
, &offset
, &base
))
1408 clear_rhs_from_active_local_stores ();
1412 if (GET_MODE (mem
) == BLKmode
)
1413 width
= MEM_SIZE (mem
);
1415 width
= GET_MODE_SIZE (GET_MODE (mem
));
1417 if (!endpoint_representable_p (offset
, width
))
1419 clear_rhs_from_active_local_stores ();
1423 if (known_eq (width
, 0))
1428 /* In the restrictive case where the base is a constant or the
1429 frame pointer we can do global analysis. */
1432 = rtx_group_vec
[group_id
];
1433 tree expr
= MEM_EXPR (mem
);
1435 store_info
= rtx_store_info_pool
.allocate ();
1436 set_usage_bits (group
, offset
, width
, expr
);
1438 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1440 fprintf (dump_file
, " processing const base store gid=%d",
1442 print_range (dump_file
, offset
, width
);
1443 fprintf (dump_file
, "\n");
1448 if (may_be_sp_based_p (XEXP (mem
, 0)))
1449 insn_info
->stack_pointer_based
= true;
1450 insn_info
->contains_cselib_groups
= true;
1452 store_info
= cse_store_info_pool
.allocate ();
1455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1457 fprintf (dump_file
, " processing cselib store ");
1458 print_range (dump_file
, offset
, width
);
1459 fprintf (dump_file
, "\n");
1463 const_rhs
= rhs
= NULL_RTX
;
1464 if (GET_CODE (body
) == SET
1465 /* No place to keep the value after ra. */
1466 && !reload_completed
1467 && (REG_P (SET_SRC (body
))
1468 || GET_CODE (SET_SRC (body
)) == SUBREG
1469 || CONSTANT_P (SET_SRC (body
)))
1470 && !MEM_VOLATILE_P (mem
)
1471 /* Sometimes the store and reload is used for truncation and
1473 && !(FLOAT_MODE_P (GET_MODE (mem
)) && (flag_float_store
)))
1475 rhs
= SET_SRC (body
);
1476 if (CONSTANT_P (rhs
))
1478 else if (body
== PATTERN (insn_info
->insn
))
1480 rtx tem
= find_reg_note (insn_info
->insn
, REG_EQUAL
, NULL_RTX
);
1481 if (tem
&& CONSTANT_P (XEXP (tem
, 0)))
1482 const_rhs
= XEXP (tem
, 0);
1484 if (const_rhs
== NULL_RTX
&& REG_P (rhs
))
1486 rtx tem
= cselib_expand_value_rtx (rhs
, scratch
, 5);
1488 if (tem
&& CONSTANT_P (tem
))
1493 /* Check to see if this stores causes some other stores to be
1495 ptr
= active_local_stores
;
1497 redundant_reason
= NULL
;
1498 mem
= canon_rtx (mem
);
1501 mem_addr
= base
->val_rtx
;
1504 group_info
*group
= rtx_group_vec
[group_id
];
1505 mem_addr
= group
->canon_base_addr
;
1507 if (maybe_ne (offset
, 0))
1508 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
1512 insn_info_t next
= ptr
->next_local_store
;
1513 class store_info
*s_info
= ptr
->store_rec
;
1516 /* Skip the clobbers. We delete the active insn if this insn
1517 shadows the set. To have been put on the active list, it
1518 has exactly on set. */
1519 while (!s_info
->is_set
)
1520 s_info
= s_info
->next
;
1522 if (s_info
->group_id
== group_id
&& s_info
->cse_base
== base
)
1525 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1527 fprintf (dump_file
, " trying store in insn=%d gid=%d",
1528 INSN_UID (ptr
->insn
), s_info
->group_id
);
1529 print_range (dump_file
, s_info
->offset
, s_info
->width
);
1530 fprintf (dump_file
, "\n");
1533 /* Even if PTR won't be eliminated as unneeded, if both
1534 PTR and this insn store the same constant value, we might
1535 eliminate this insn instead. */
1536 if (s_info
->const_rhs
1538 && known_subrange_p (offset
, width
,
1539 s_info
->offset
, s_info
->width
)
1540 && all_positions_needed_p (s_info
, offset
- s_info
->offset
,
1542 /* We can only remove the later store if the earlier aliases
1543 at least all accesses the later one. */
1544 && (MEM_ALIAS_SET (mem
) == MEM_ALIAS_SET (s_info
->mem
)
1545 || alias_set_subset_of (MEM_ALIAS_SET (mem
),
1546 MEM_ALIAS_SET (s_info
->mem
))))
1548 if (GET_MODE (mem
) == BLKmode
)
1550 if (GET_MODE (s_info
->mem
) == BLKmode
1551 && s_info
->const_rhs
== const_rhs
)
1552 redundant_reason
= ptr
;
1554 else if (s_info
->const_rhs
== const0_rtx
1555 && const_rhs
== const0_rtx
)
1556 redundant_reason
= ptr
;
1561 val
= get_stored_val (s_info
, GET_MODE (mem
), offset
, width
,
1562 BLOCK_FOR_INSN (insn_info
->insn
),
1564 if (get_insns () != NULL
)
1567 if (val
&& rtx_equal_p (val
, const_rhs
))
1568 redundant_reason
= ptr
;
1572 HOST_WIDE_INT begin_unneeded
, const_s_width
, const_width
;
1573 if (known_subrange_p (s_info
->offset
, s_info
->width
, offset
, width
))
1574 /* The new store touches every byte that S_INFO does. */
1575 set_all_positions_unneeded (s_info
);
1576 else if ((offset
- s_info
->offset
).is_constant (&begin_unneeded
)
1577 && s_info
->width
.is_constant (&const_s_width
)
1578 && width
.is_constant (&const_width
))
1580 HOST_WIDE_INT end_unneeded
= begin_unneeded
+ const_width
;
1581 begin_unneeded
= MAX (begin_unneeded
, 0);
1582 end_unneeded
= MIN (end_unneeded
, const_s_width
);
1583 for (i
= begin_unneeded
; i
< end_unneeded
; ++i
)
1584 set_position_unneeded (s_info
, i
);
1588 /* We don't know which parts of S_INFO are needed and
1589 which aren't, so invalidate the RHS. */
1591 s_info
->const_rhs
= NULL
;
1594 else if (s_info
->rhs
)
1595 /* Need to see if it is possible for this store to overwrite
1596 the value of store_info. If it is, set the rhs to NULL to
1597 keep it from being used to remove a load. */
1599 if (canon_output_dependence (s_info
->mem
, true,
1600 mem
, GET_MODE (mem
),
1604 s_info
->const_rhs
= NULL
;
1608 /* An insn can be deleted if every position of every one of
1609 its s_infos is zero. */
1610 if (any_positions_needed_p (s_info
))
1615 insn_info_t insn_to_delete
= ptr
;
1617 active_local_stores_len
--;
1619 last
->next_local_store
= ptr
->next_local_store
;
1621 active_local_stores
= ptr
->next_local_store
;
1623 if (!insn_to_delete
->cannot_delete
)
1624 delete_dead_store_insn (insn_to_delete
);
1632 /* Finish filling in the store_info. */
1633 store_info
->next
= insn_info
->store_rec
;
1634 insn_info
->store_rec
= store_info
;
1635 store_info
->mem
= mem
;
1636 store_info
->mem_addr
= mem_addr
;
1637 store_info
->cse_base
= base
;
1638 HOST_WIDE_INT const_width
;
1639 if (!width
.is_constant (&const_width
))
1641 store_info
->is_large
= true;
1642 store_info
->positions_needed
.large
.count
= 0;
1643 store_info
->positions_needed
.large
.bmap
= NULL
;
1645 else if (const_width
> HOST_BITS_PER_WIDE_INT
)
1647 store_info
->is_large
= true;
1648 store_info
->positions_needed
.large
.count
= 0;
1649 store_info
->positions_needed
.large
.bmap
= BITMAP_ALLOC (&dse_bitmap_obstack
);
1653 store_info
->is_large
= false;
1654 store_info
->positions_needed
.small_bitmask
1655 = lowpart_bitmask (const_width
);
1657 store_info
->group_id
= group_id
;
1658 store_info
->offset
= offset
;
1659 store_info
->width
= width
;
1660 store_info
->is_set
= GET_CODE (body
) == SET
;
1661 store_info
->rhs
= rhs
;
1662 store_info
->const_rhs
= const_rhs
;
1663 store_info
->redundant_reason
= redundant_reason
;
1665 /* If this is a clobber, we return 0. We will only be able to
1666 delete this insn if there is only one store USED store, but we
1667 can use the clobber to delete other stores earlier. */
1668 return store_info
->is_set
? 1 : 0;
1673 dump_insn_info (const char * start
, insn_info_t insn_info
)
1675 fprintf (dump_file
, "%s insn=%d %s\n", start
,
1676 INSN_UID (insn_info
->insn
),
1677 insn_info
->store_rec
? "has store" : "naked");
1681 /* If the modes are different and the value's source and target do not
1682 line up, we need to extract the value from lower part of the rhs of
1683 the store, shift it, and then put it into a form that can be shoved
1684 into the read_insn. This function generates a right SHIFT of a
1685 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1686 shift sequence is returned or NULL if we failed to find a
1690 find_shift_sequence (poly_int64 access_size
,
1691 store_info
*store_info
,
1692 machine_mode read_mode
,
1693 poly_int64 shift
, bool speed
, bool require_cst
)
1695 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1696 scalar_int_mode new_mode
;
1697 rtx read_reg
= NULL
;
1699 /* Some machines like the x86 have shift insns for each size of
1700 operand. Other machines like the ppc or the ia-64 may only have
1701 shift insns that shift values within 32 or 64 bit registers.
1702 This loop tries to find the smallest shift insn that will right
1703 justify the value we want to read but is available in one insn on
1706 opt_scalar_int_mode new_mode_iter
;
1707 FOR_EACH_MODE_FROM (new_mode_iter
,
1708 smallest_int_mode_for_size (access_size
* BITS_PER_UNIT
))
1710 rtx target
, new_reg
, new_lhs
;
1711 rtx_insn
*shift_seq
, *insn
;
1714 new_mode
= new_mode_iter
.require ();
1715 if (GET_MODE_BITSIZE (new_mode
) > BITS_PER_WORD
)
1718 /* If a constant was stored into memory, try to simplify it here,
1719 otherwise the cost of the shift might preclude this optimization
1720 e.g. at -Os, even when no actual shift will be needed. */
1721 if (store_info
->const_rhs
)
1723 poly_uint64 byte
= subreg_lowpart_offset (new_mode
, store_mode
);
1724 rtx ret
= simplify_subreg (new_mode
, store_info
->const_rhs
,
1726 if (ret
&& CONSTANT_P (ret
))
1728 rtx shift_rtx
= gen_int_shift_amount (new_mode
, shift
);
1729 ret
= simplify_const_binary_operation (LSHIFTRT
, new_mode
,
1731 if (ret
&& CONSTANT_P (ret
))
1733 byte
= subreg_lowpart_offset (read_mode
, new_mode
);
1734 ret
= simplify_subreg (read_mode
, ret
, new_mode
, byte
);
1735 if (ret
&& CONSTANT_P (ret
)
1736 && (set_src_cost (ret
, read_mode
, speed
)
1737 <= COSTS_N_INSNS (1)))
1746 /* Try a wider mode if truncating the store mode to NEW_MODE
1747 requires a real instruction. */
1748 if (maybe_lt (GET_MODE_SIZE (new_mode
), GET_MODE_SIZE (store_mode
))
1749 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode
, store_mode
))
1752 /* Also try a wider mode if the necessary punning is either not
1753 desirable or not possible. */
1754 if (!CONSTANT_P (store_info
->rhs
)
1755 && !targetm
.modes_tieable_p (new_mode
, store_mode
))
1758 new_reg
= gen_reg_rtx (new_mode
);
1762 /* In theory we could also check for an ashr. Ian Taylor knows
1763 of one dsp where the cost of these two was not the same. But
1764 this really is a rare case anyway. */
1765 target
= expand_binop (new_mode
, lshr_optab
, new_reg
,
1766 gen_int_shift_amount (new_mode
, shift
),
1767 new_reg
, 1, OPTAB_DIRECT
);
1769 shift_seq
= get_insns ();
1772 if (target
!= new_reg
|| shift_seq
== NULL
)
1776 for (insn
= shift_seq
; insn
!= NULL_RTX
; insn
= NEXT_INSN (insn
))
1778 cost
+= insn_cost (insn
, speed
);
1780 /* The computation up to here is essentially independent
1781 of the arguments and could be precomputed. It may
1782 not be worth doing so. We could precompute if
1783 worthwhile or at least cache the results. The result
1784 technically depends on both SHIFT and ACCESS_SIZE,
1785 but in practice the answer will depend only on ACCESS_SIZE. */
1787 if (cost
> COSTS_N_INSNS (1))
1790 new_lhs
= extract_low_bits (new_mode
, store_mode
,
1791 copy_rtx (store_info
->rhs
));
1792 if (new_lhs
== NULL_RTX
)
1795 /* We found an acceptable shift. Generate a move to
1796 take the value from the store and put it into the
1797 shift pseudo, then shift it, then generate another
1798 move to put in into the target of the read. */
1799 emit_move_insn (new_reg
, new_lhs
);
1800 emit_insn (shift_seq
);
1801 read_reg
= extract_low_bits (read_mode
, new_mode
, new_reg
);
1809 /* Call back for note_stores to find the hard regs set or clobbered by
1810 insn. Data is a bitmap of the hardregs set so far. */
1813 look_for_hardregs (rtx x
, const_rtx pat ATTRIBUTE_UNUSED
, void *data
)
1815 bitmap regs_set
= (bitmap
) data
;
1818 && HARD_REGISTER_P (x
))
1819 bitmap_set_range (regs_set
, REGNO (x
), REG_NREGS (x
));
1822 /* Helper function for replace_read and record_store.
1823 Attempt to return a value of mode READ_MODE stored in STORE_INFO,
1824 consisting of READ_WIDTH bytes starting from READ_OFFSET. Return NULL
1825 if not successful. If REQUIRE_CST is true, return always constant. */
1828 get_stored_val (store_info
*store_info
, machine_mode read_mode
,
1829 poly_int64 read_offset
, poly_int64 read_width
,
1830 basic_block bb
, bool require_cst
)
1832 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1836 /* To get here the read is within the boundaries of the write so
1837 shift will never be negative. Start out with the shift being in
1839 if (store_mode
== BLKmode
)
1841 else if (BYTES_BIG_ENDIAN
)
1842 gap
= ((store_info
->offset
+ store_info
->width
)
1843 - (read_offset
+ read_width
));
1845 gap
= read_offset
- store_info
->offset
;
1847 if (gap
.is_constant () && maybe_ne (gap
, 0))
1849 poly_int64 shift
= gap
* BITS_PER_UNIT
;
1850 poly_int64 access_size
= GET_MODE_SIZE (read_mode
) + gap
;
1851 read_reg
= find_shift_sequence (access_size
, store_info
, read_mode
,
1852 shift
, optimize_bb_for_speed_p (bb
),
1855 else if (store_mode
== BLKmode
)
1857 /* The store is a memset (addr, const_val, const_size). */
1858 gcc_assert (CONST_INT_P (store_info
->rhs
));
1859 scalar_int_mode int_store_mode
;
1860 if (!int_mode_for_mode (read_mode
).exists (&int_store_mode
))
1861 read_reg
= NULL_RTX
;
1862 else if (store_info
->rhs
== const0_rtx
)
1863 read_reg
= extract_low_bits (read_mode
, int_store_mode
, const0_rtx
);
1864 else if (GET_MODE_BITSIZE (int_store_mode
) > HOST_BITS_PER_WIDE_INT
1865 || BITS_PER_UNIT
>= HOST_BITS_PER_WIDE_INT
)
1866 read_reg
= NULL_RTX
;
1869 unsigned HOST_WIDE_INT c
1870 = INTVAL (store_info
->rhs
)
1871 & ((HOST_WIDE_INT_1
<< BITS_PER_UNIT
) - 1);
1872 int shift
= BITS_PER_UNIT
;
1873 while (shift
< HOST_BITS_PER_WIDE_INT
)
1878 read_reg
= gen_int_mode (c
, int_store_mode
);
1879 read_reg
= extract_low_bits (read_mode
, int_store_mode
, read_reg
);
1882 else if (store_info
->const_rhs
1884 || GET_MODE_CLASS (read_mode
) != GET_MODE_CLASS (store_mode
)))
1885 read_reg
= extract_low_bits (read_mode
, store_mode
,
1886 copy_rtx (store_info
->const_rhs
));
1888 read_reg
= extract_low_bits (read_mode
, store_mode
,
1889 copy_rtx (store_info
->rhs
));
1890 if (require_cst
&& read_reg
&& !CONSTANT_P (read_reg
))
1891 read_reg
= NULL_RTX
;
1895 /* Take a sequence of:
1918 Depending on the alignment and the mode of the store and
1922 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1923 and READ_INSN are for the read. Return true if the replacement
1927 replace_read (store_info
*store_info
, insn_info_t store_insn
,
1928 read_info_t read_info
, insn_info_t read_insn
, rtx
*loc
,
1931 machine_mode store_mode
= GET_MODE (store_info
->mem
);
1932 machine_mode read_mode
= GET_MODE (read_info
->mem
);
1933 rtx_insn
*insns
, *this_insn
;
1940 /* Create a sequence of instructions to set up the read register.
1941 This sequence goes immediately before the store and its result
1942 is read by the load.
1944 We need to keep this in perspective. We are replacing a read
1945 with a sequence of insns, but the read will almost certainly be
1946 in cache, so it is not going to be an expensive one. Thus, we
1947 are not willing to do a multi insn shift or worse a subroutine
1948 call to get rid of the read. */
1949 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1950 fprintf (dump_file
, "trying to replace %smode load in insn %d"
1951 " from %smode store in insn %d\n",
1952 GET_MODE_NAME (read_mode
), INSN_UID (read_insn
->insn
),
1953 GET_MODE_NAME (store_mode
), INSN_UID (store_insn
->insn
));
1955 bb
= BLOCK_FOR_INSN (read_insn
->insn
);
1956 read_reg
= get_stored_val (store_info
,
1957 read_mode
, read_info
->offset
, read_info
->width
,
1959 if (read_reg
== NULL_RTX
)
1962 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1963 fprintf (dump_file
, " -- could not extract bits of stored value\n");
1966 /* Force the value into a new register so that it won't be clobbered
1967 between the store and the load. */
1968 read_reg
= copy_to_mode_reg (read_mode
, read_reg
);
1969 insns
= get_insns ();
1972 if (insns
!= NULL_RTX
)
1974 /* Now we have to scan the set of new instructions to see if the
1975 sequence contains and sets of hardregs that happened to be
1976 live at this point. For instance, this can happen if one of
1977 the insns sets the CC and the CC happened to be live at that
1978 point. This does occasionally happen, see PR 37922. */
1979 bitmap regs_set
= BITMAP_ALLOC (®_obstack
);
1981 for (this_insn
= insns
; this_insn
!= NULL_RTX
; this_insn
= NEXT_INSN (this_insn
))
1982 note_stores (this_insn
, look_for_hardregs
, regs_set
);
1984 bitmap_and_into (regs_set
, regs_live
);
1985 if (!bitmap_empty_p (regs_set
))
1987 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1990 "abandoning replacement because sequence clobbers live hardregs:");
1991 df_print_regset (dump_file
, regs_set
);
1994 BITMAP_FREE (regs_set
);
1997 BITMAP_FREE (regs_set
);
2000 if (validate_change (read_insn
->insn
, loc
, read_reg
, 0))
2002 deferred_change
*change
= deferred_change_pool
.allocate ();
2004 /* Insert this right before the store insn where it will be safe
2005 from later insns that might change it before the read. */
2006 emit_insn_before (insns
, store_insn
->insn
);
2008 /* And now for the kludge part: cselib croaks if you just
2009 return at this point. There are two reasons for this:
2011 1) Cselib has an idea of how many pseudos there are and
2012 that does not include the new ones we just added.
2014 2) Cselib does not know about the move insn we added
2015 above the store_info, and there is no way to tell it
2016 about it, because it has "moved on".
2018 Problem (1) is fixable with a certain amount of engineering.
2019 Problem (2) is requires starting the bb from scratch. This
2022 So we are just going to have to lie. The move/extraction
2023 insns are not really an issue, cselib did not see them. But
2024 the use of the new pseudo read_insn is a real problem because
2025 cselib has not scanned this insn. The way that we solve this
2026 problem is that we are just going to put the mem back for now
2027 and when we are finished with the block, we undo this. We
2028 keep a table of mems to get rid of. At the end of the basic
2029 block we can put them back. */
2031 *loc
= read_info
->mem
;
2032 change
->next
= deferred_change_list
;
2033 deferred_change_list
= change
;
2035 change
->reg
= read_reg
;
2037 /* Get rid of the read_info, from the point of view of the
2038 rest of dse, play like this read never happened. */
2039 read_insn
->read_rec
= read_info
->next
;
2040 read_info_type_pool
.remove (read_info
);
2041 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2043 fprintf (dump_file
, " -- replaced the loaded MEM with ");
2044 print_simple_rtl (dump_file
, read_reg
);
2045 fprintf (dump_file
, "\n");
2051 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2053 fprintf (dump_file
, " -- replacing the loaded MEM with ");
2054 print_simple_rtl (dump_file
, read_reg
);
2055 fprintf (dump_file
, " led to an invalid instruction\n");
2061 /* Check the address of MEM *LOC and kill any appropriate stores that may
2065 check_mem_read_rtx (rtx
*loc
, bb_info_t bb_info
)
2067 rtx mem
= *loc
, mem_addr
;
2068 insn_info_t insn_info
;
2069 poly_int64 offset
= 0;
2070 poly_int64 width
= 0;
2071 cselib_val
*base
= NULL
;
2073 read_info_t read_info
;
2075 insn_info
= bb_info
->last_insn
;
2077 if ((MEM_ALIAS_SET (mem
) == ALIAS_SET_MEMORY_BARRIER
)
2078 || MEM_VOLATILE_P (mem
))
2080 if (crtl
->stack_protect_guard
2081 && (MEM_EXPR (mem
) == crtl
->stack_protect_guard
2082 || (crtl
->stack_protect_guard_decl
2083 && MEM_EXPR (mem
) == crtl
->stack_protect_guard_decl
))
2084 && MEM_VOLATILE_P (mem
))
2086 /* This is either the stack protector canary on the stack,
2087 which ought to be written by a MEM_VOLATILE_P store and
2088 thus shouldn't be deleted and is read at the very end of
2089 function, but shouldn't conflict with any other store.
2090 Or it is __stack_chk_guard variable or TLS or whatever else
2091 MEM holding the canary value, which really shouldn't be
2092 ever modified in -fstack-protector* protected functions,
2093 otherwise the prologue store wouldn't match the epilogue
2095 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2096 fprintf (dump_file
, " stack protector canary read ignored.\n");
2097 insn_info
->cannot_delete
= true;
2101 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2102 fprintf (dump_file
, " adding wild read, volatile or barrier.\n");
2103 add_wild_read (bb_info
);
2104 insn_info
->cannot_delete
= true;
2108 /* If it is reading readonly mem, then there can be no conflict with
2110 if (MEM_READONLY_P (mem
))
2113 if (!canon_address (mem
, &group_id
, &offset
, &base
))
2115 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2116 fprintf (dump_file
, " adding wild read, canon_address failure.\n");
2117 add_wild_read (bb_info
);
2121 if (GET_MODE (mem
) == BLKmode
)
2124 width
= GET_MODE_SIZE (GET_MODE (mem
));
2126 if (!endpoint_representable_p (offset
, known_eq (width
, -1) ? 1 : width
))
2128 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2129 fprintf (dump_file
, " adding wild read, due to overflow.\n");
2130 add_wild_read (bb_info
);
2134 read_info
= read_info_type_pool
.allocate ();
2135 read_info
->group_id
= group_id
;
2136 read_info
->mem
= mem
;
2137 read_info
->offset
= offset
;
2138 read_info
->width
= width
;
2139 read_info
->next
= insn_info
->read_rec
;
2140 insn_info
->read_rec
= read_info
;
2142 mem_addr
= base
->val_rtx
;
2145 group_info
*group
= rtx_group_vec
[group_id
];
2146 mem_addr
= group
->canon_base_addr
;
2148 if (maybe_ne (offset
, 0))
2149 mem_addr
= plus_constant (get_address_mode (mem
), mem_addr
, offset
);
2153 /* This is the restricted case where the base is a constant or
2154 the frame pointer and offset is a constant. */
2155 insn_info_t i_ptr
= active_local_stores
;
2156 insn_info_t last
= NULL
;
2158 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2160 if (!known_size_p (width
))
2161 fprintf (dump_file
, " processing const load gid=%d[BLK]\n",
2165 fprintf (dump_file
, " processing const load gid=%d", group_id
);
2166 print_range (dump_file
, offset
, width
);
2167 fprintf (dump_file
, "\n");
2173 bool remove
= false;
2174 store_info
*store_info
= i_ptr
->store_rec
;
2176 /* Skip the clobbers. */
2177 while (!store_info
->is_set
)
2178 store_info
= store_info
->next
;
2180 /* There are three cases here. */
2181 if (store_info
->group_id
< 0)
2182 /* We have a cselib store followed by a read from a
2185 = canon_true_dependence (store_info
->mem
,
2186 GET_MODE (store_info
->mem
),
2187 store_info
->mem_addr
,
2190 else if (group_id
== store_info
->group_id
)
2192 /* This is a block mode load. We may get lucky and
2193 canon_true_dependence may save the day. */
2194 if (!known_size_p (width
))
2196 = canon_true_dependence (store_info
->mem
,
2197 GET_MODE (store_info
->mem
),
2198 store_info
->mem_addr
,
2201 /* If this read is just reading back something that we just
2202 stored, rewrite the read. */
2206 && known_subrange_p (offset
, width
, store_info
->offset
,
2208 && all_positions_needed_p (store_info
,
2209 offset
- store_info
->offset
,
2211 && replace_read (store_info
, i_ptr
, read_info
,
2212 insn_info
, loc
, bb_info
->regs_live
))
2215 /* The bases are the same, just see if the offsets
2217 if (ranges_maybe_overlap_p (offset
, width
,
2225 The else case that is missing here is that the
2226 bases are constant but different. There is nothing
2227 to do here because there is no overlap. */
2231 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2232 dump_insn_info ("removing from active", i_ptr
);
2234 active_local_stores_len
--;
2236 last
->next_local_store
= i_ptr
->next_local_store
;
2238 active_local_stores
= i_ptr
->next_local_store
;
2242 i_ptr
= i_ptr
->next_local_store
;
2247 insn_info_t i_ptr
= active_local_stores
;
2248 insn_info_t last
= NULL
;
2249 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2251 fprintf (dump_file
, " processing cselib load mem:");
2252 print_inline_rtx (dump_file
, mem
, 0);
2253 fprintf (dump_file
, "\n");
2258 bool remove
= false;
2259 store_info
*store_info
= i_ptr
->store_rec
;
2261 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2262 fprintf (dump_file
, " processing cselib load against insn %d\n",
2263 INSN_UID (i_ptr
->insn
));
2265 /* Skip the clobbers. */
2266 while (!store_info
->is_set
)
2267 store_info
= store_info
->next
;
2269 /* If this read is just reading back something that we just
2270 stored, rewrite the read. */
2272 && store_info
->group_id
== -1
2273 && store_info
->cse_base
== base
2274 && known_subrange_p (offset
, width
, store_info
->offset
,
2276 && all_positions_needed_p (store_info
,
2277 offset
- store_info
->offset
, width
)
2278 && replace_read (store_info
, i_ptr
, read_info
, insn_info
, loc
,
2279 bb_info
->regs_live
))
2282 remove
= canon_true_dependence (store_info
->mem
,
2283 GET_MODE (store_info
->mem
),
2284 store_info
->mem_addr
,
2289 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2290 dump_insn_info ("removing from active", i_ptr
);
2292 active_local_stores_len
--;
2294 last
->next_local_store
= i_ptr
->next_local_store
;
2296 active_local_stores
= i_ptr
->next_local_store
;
2300 i_ptr
= i_ptr
->next_local_store
;
2305 /* A note_uses callback in which DATA points the INSN_INFO for
2306 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2307 true for any part of *LOC. */
2310 check_mem_read_use (rtx
*loc
, void *data
)
2312 subrtx_ptr_iterator::array_type array
;
2313 FOR_EACH_SUBRTX_PTR (iter
, array
, loc
, NONCONST
)
2317 check_mem_read_rtx (loc
, (bb_info_t
) data
);
2322 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2323 So far it only handles arguments passed in registers. */
2326 get_call_args (rtx call_insn
, tree fn
, rtx
*args
, int nargs
)
2328 CUMULATIVE_ARGS args_so_far_v
;
2329 cumulative_args_t args_so_far
;
2333 INIT_CUMULATIVE_ARGS (args_so_far_v
, TREE_TYPE (fn
), NULL_RTX
, 0, 3);
2334 args_so_far
= pack_cumulative_args (&args_so_far_v
);
2336 arg
= TYPE_ARG_TYPES (TREE_TYPE (fn
));
2338 arg
!= void_list_node
&& idx
< nargs
;
2339 arg
= TREE_CHAIN (arg
), idx
++)
2341 scalar_int_mode mode
;
2344 if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg
)), &mode
))
2347 function_arg_info
arg (mode
, /*named=*/true);
2348 reg
= targetm
.calls
.function_arg (args_so_far
, arg
);
2349 if (!reg
|| !REG_P (reg
) || GET_MODE (reg
) != mode
)
2352 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
);
2354 link
= XEXP (link
, 1))
2355 if (GET_CODE (XEXP (link
, 0)) == USE
)
2357 scalar_int_mode arg_mode
;
2358 args
[idx
] = XEXP (XEXP (link
, 0), 0);
2359 if (REG_P (args
[idx
])
2360 && REGNO (args
[idx
]) == REGNO (reg
)
2361 && (GET_MODE (args
[idx
]) == mode
2362 || (is_int_mode (GET_MODE (args
[idx
]), &arg_mode
)
2363 && (GET_MODE_SIZE (arg_mode
) <= UNITS_PER_WORD
)
2364 && (GET_MODE_SIZE (arg_mode
) > GET_MODE_SIZE (mode
)))))
2370 tmp
= cselib_expand_value_rtx (args
[idx
], scratch
, 5);
2371 if (GET_MODE (args
[idx
]) != mode
)
2373 if (!tmp
|| !CONST_INT_P (tmp
))
2375 tmp
= gen_int_mode (INTVAL (tmp
), mode
);
2380 targetm
.calls
.function_arg_advance (args_so_far
, arg
);
2382 if (arg
!= void_list_node
|| idx
!= nargs
)
2387 /* Return a bitmap of the fixed registers contained in IN. */
2390 copy_fixed_regs (const_bitmap in
)
2394 ret
= ALLOC_REG_SET (NULL
);
2395 bitmap_and (ret
, in
, bitmap_view
<HARD_REG_SET
> (fixed_reg_set
));
2399 /* Apply record_store to all candidate stores in INSN. Mark INSN
2400 if some part of it is not a candidate store and assigns to a
2401 non-register target. */
2404 scan_insn (bb_info_t bb_info
, rtx_insn
*insn
)
2407 insn_info_type
*insn_info
= insn_info_type_pool
.allocate ();
2409 memset (insn_info
, 0, sizeof (struct insn_info_type
));
2411 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2412 fprintf (dump_file
, "\n**scanning insn=%d\n",
2415 insn_info
->prev_insn
= bb_info
->last_insn
;
2416 insn_info
->insn
= insn
;
2417 bb_info
->last_insn
= insn_info
;
2419 if (DEBUG_INSN_P (insn
))
2421 insn_info
->cannot_delete
= true;
2425 /* Look at all of the uses in the insn. */
2426 note_uses (&PATTERN (insn
), check_mem_read_use
, bb_info
);
2432 tree memset_call
= NULL_TREE
;
2434 insn_info
->cannot_delete
= true;
2436 /* Const functions cannot do anything bad i.e. read memory,
2437 however, they can read their parameters which may have
2438 been pushed onto the stack.
2439 memset and bzero don't read memory either. */
2440 const_call
= RTL_CONST_CALL_P (insn
);
2442 && (call
= get_call_rtx_from (insn
))
2443 && (sym
= XEXP (XEXP (call
, 0), 0))
2444 && GET_CODE (sym
) == SYMBOL_REF
2445 && SYMBOL_REF_DECL (sym
)
2446 && TREE_CODE (SYMBOL_REF_DECL (sym
)) == FUNCTION_DECL
2447 && fndecl_built_in_p (SYMBOL_REF_DECL (sym
), BUILT_IN_MEMSET
))
2448 memset_call
= SYMBOL_REF_DECL (sym
);
2450 if (const_call
|| memset_call
)
2452 insn_info_t i_ptr
= active_local_stores
;
2453 insn_info_t last
= NULL
;
2455 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2456 fprintf (dump_file
, "%s call %d\n",
2457 const_call
? "const" : "memset", INSN_UID (insn
));
2459 /* See the head comment of the frame_read field. */
2460 if (reload_completed
2461 /* Tail calls are storing their arguments using
2462 arg pointer. If it is a frame pointer on the target,
2463 even before reload we need to kill frame pointer based
2465 || (SIBLING_CALL_P (insn
)
2466 && HARD_FRAME_POINTER_IS_ARG_POINTER
))
2467 insn_info
->frame_read
= true;
2469 /* Loop over the active stores and remove those which are
2470 killed by the const function call. */
2473 bool remove_store
= false;
2475 /* The stack pointer based stores are always killed. */
2476 if (i_ptr
->stack_pointer_based
)
2477 remove_store
= true;
2479 /* If the frame is read, the frame related stores are killed. */
2480 else if (insn_info
->frame_read
)
2482 store_info
*store_info
= i_ptr
->store_rec
;
2484 /* Skip the clobbers. */
2485 while (!store_info
->is_set
)
2486 store_info
= store_info
->next
;
2488 if (store_info
->group_id
>= 0
2489 && rtx_group_vec
[store_info
->group_id
]->frame_related
)
2490 remove_store
= true;
2495 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2496 dump_insn_info ("removing from active", i_ptr
);
2498 active_local_stores_len
--;
2500 last
->next_local_store
= i_ptr
->next_local_store
;
2502 active_local_stores
= i_ptr
->next_local_store
;
2507 i_ptr
= i_ptr
->next_local_store
;
2513 if (get_call_args (insn
, memset_call
, args
, 3)
2514 && CONST_INT_P (args
[1])
2515 && CONST_INT_P (args
[2])
2516 && INTVAL (args
[2]) > 0)
2518 rtx mem
= gen_rtx_MEM (BLKmode
, args
[0]);
2519 set_mem_size (mem
, INTVAL (args
[2]));
2520 body
= gen_rtx_SET (mem
, args
[1]);
2521 mems_found
+= record_store (body
, bb_info
);
2522 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2523 fprintf (dump_file
, "handling memset as BLKmode store\n");
2524 if (mems_found
== 1)
2526 if (active_local_stores_len
++
2527 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES
))
2529 active_local_stores_len
= 1;
2530 active_local_stores
= NULL
;
2532 insn_info
->fixed_regs_live
2533 = copy_fixed_regs (bb_info
->regs_live
);
2534 insn_info
->next_local_store
= active_local_stores
;
2535 active_local_stores
= insn_info
;
2539 clear_rhs_from_active_local_stores ();
2542 else if (SIBLING_CALL_P (insn
)
2543 && (reload_completed
|| HARD_FRAME_POINTER_IS_ARG_POINTER
))
2544 /* Arguments for a sibling call that are pushed to memory are passed
2545 using the incoming argument pointer of the current function. After
2546 reload that might be (and likely is) frame pointer based. And, if
2547 it is a frame pointer on the target, even before reload we need to
2548 kill frame pointer based stores. */
2549 add_wild_read (bb_info
);
2551 /* Every other call, including pure functions, may read any memory
2552 that is not relative to the frame. */
2553 add_non_frame_wild_read (bb_info
);
2558 /* Assuming that there are sets in these insns, we cannot delete
2560 if ((GET_CODE (PATTERN (insn
)) == CLOBBER
)
2561 || volatile_refs_p (PATTERN (insn
))
2562 || (!cfun
->can_delete_dead_exceptions
&& !insn_nothrow_p (insn
))
2563 || (RTX_FRAME_RELATED_P (insn
))
2564 || find_reg_note (insn
, REG_FRAME_RELATED_EXPR
, NULL_RTX
))
2565 insn_info
->cannot_delete
= true;
2567 body
= PATTERN (insn
);
2568 if (GET_CODE (body
) == PARALLEL
)
2571 for (i
= 0; i
< XVECLEN (body
, 0); i
++)
2572 mems_found
+= record_store (XVECEXP (body
, 0, i
), bb_info
);
2575 mems_found
+= record_store (body
, bb_info
);
2577 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2578 fprintf (dump_file
, "mems_found = %d, cannot_delete = %s\n",
2579 mems_found
, insn_info
->cannot_delete
? "true" : "false");
2581 /* If we found some sets of mems, add it into the active_local_stores so
2582 that it can be locally deleted if found dead or used for
2583 replace_read and redundant constant store elimination. Otherwise mark
2584 it as cannot delete. This simplifies the processing later. */
2585 if (mems_found
== 1)
2587 if (active_local_stores_len
++
2588 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES
))
2590 active_local_stores_len
= 1;
2591 active_local_stores
= NULL
;
2593 insn_info
->fixed_regs_live
= copy_fixed_regs (bb_info
->regs_live
);
2594 insn_info
->next_local_store
= active_local_stores
;
2595 active_local_stores
= insn_info
;
2598 insn_info
->cannot_delete
= true;
2602 /* Remove BASE from the set of active_local_stores. This is a
2603 callback from cselib that is used to get rid of the stores in
2604 active_local_stores. */
2607 remove_useless_values (cselib_val
*base
)
2609 insn_info_t insn_info
= active_local_stores
;
2610 insn_info_t last
= NULL
;
2614 store_info
*store_info
= insn_info
->store_rec
;
2617 /* If ANY of the store_infos match the cselib group that is
2618 being deleted, then the insn cannot be deleted. */
2621 if ((store_info
->group_id
== -1)
2622 && (store_info
->cse_base
== base
))
2627 store_info
= store_info
->next
;
2632 active_local_stores_len
--;
2634 last
->next_local_store
= insn_info
->next_local_store
;
2636 active_local_stores
= insn_info
->next_local_store
;
2637 free_store_info (insn_info
);
2642 insn_info
= insn_info
->next_local_store
;
2647 /* Do all of step 1. */
2653 bitmap regs_live
= BITMAP_ALLOC (®_obstack
);
2656 all_blocks
= BITMAP_ALLOC (NULL
);
2657 bitmap_set_bit (all_blocks
, ENTRY_BLOCK
);
2658 bitmap_set_bit (all_blocks
, EXIT_BLOCK
);
2660 FOR_ALL_BB_FN (bb
, cfun
)
2663 bb_info_t bb_info
= dse_bb_info_type_pool
.allocate ();
2665 memset (bb_info
, 0, sizeof (dse_bb_info_type
));
2666 bitmap_set_bit (all_blocks
, bb
->index
);
2667 bb_info
->regs_live
= regs_live
;
2669 bitmap_copy (regs_live
, DF_LR_IN (bb
));
2670 df_simulate_initialize_forwards (bb
, regs_live
);
2672 bb_table
[bb
->index
] = bb_info
;
2673 cselib_discard_hook
= remove_useless_values
;
2675 if (bb
->index
>= NUM_FIXED_BLOCKS
)
2679 active_local_stores
= NULL
;
2680 active_local_stores_len
= 0;
2681 cselib_clear_table ();
2683 /* Scan the insns. */
2684 FOR_BB_INSNS (bb
, insn
)
2687 scan_insn (bb_info
, insn
);
2688 cselib_process_insn (insn
);
2690 df_simulate_one_insn_forwards (bb
, insn
, regs_live
);
2693 /* This is something of a hack, because the global algorithm
2694 is supposed to take care of the case where stores go dead
2695 at the end of the function. However, the global
2696 algorithm must take a more conservative view of block
2697 mode reads than the local alg does. So to get the case
2698 where you have a store to the frame followed by a non
2699 overlapping block more read, we look at the active local
2700 stores at the end of the function and delete all of the
2701 frame and spill based ones. */
2702 if (stores_off_frame_dead_at_return
2703 && (EDGE_COUNT (bb
->succs
) == 0
2704 || (single_succ_p (bb
)
2705 && single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
)
2706 && ! crtl
->calls_eh_return
)))
2708 insn_info_t i_ptr
= active_local_stores
;
2711 store_info
*store_info
= i_ptr
->store_rec
;
2713 /* Skip the clobbers. */
2714 while (!store_info
->is_set
)
2715 store_info
= store_info
->next
;
2716 if (store_info
->group_id
>= 0)
2718 group_info
*group
= rtx_group_vec
[store_info
->group_id
];
2719 if (group
->frame_related
&& !i_ptr
->cannot_delete
)
2720 delete_dead_store_insn (i_ptr
);
2723 i_ptr
= i_ptr
->next_local_store
;
2727 /* Get rid of the loads that were discovered in
2728 replace_read. Cselib is finished with this block. */
2729 while (deferred_change_list
)
2731 deferred_change
*next
= deferred_change_list
->next
;
2733 /* There is no reason to validate this change. That was
2735 *deferred_change_list
->loc
= deferred_change_list
->reg
;
2736 deferred_change_pool
.remove (deferred_change_list
);
2737 deferred_change_list
= next
;
2740 /* Get rid of all of the cselib based store_infos in this
2741 block and mark the containing insns as not being
2743 ptr
= bb_info
->last_insn
;
2746 if (ptr
->contains_cselib_groups
)
2748 store_info
*s_info
= ptr
->store_rec
;
2749 while (s_info
&& !s_info
->is_set
)
2750 s_info
= s_info
->next
;
2752 && s_info
->redundant_reason
2753 && s_info
->redundant_reason
->insn
2754 && !ptr
->cannot_delete
)
2756 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2757 fprintf (dump_file
, "Locally deleting insn %d "
2758 "because insn %d stores the "
2759 "same value and couldn't be "
2761 INSN_UID (ptr
->insn
),
2762 INSN_UID (s_info
->redundant_reason
->insn
));
2763 delete_dead_store_insn (ptr
);
2765 free_store_info (ptr
);
2771 /* Free at least positions_needed bitmaps. */
2772 for (s_info
= ptr
->store_rec
; s_info
; s_info
= s_info
->next
)
2773 if (s_info
->is_large
)
2775 BITMAP_FREE (s_info
->positions_needed
.large
.bmap
);
2776 s_info
->is_large
= false;
2779 ptr
= ptr
->prev_insn
;
2782 cse_store_info_pool
.release ();
2784 bb_info
->regs_live
= NULL
;
2787 BITMAP_FREE (regs_live
);
2789 rtx_group_table
->empty ();
2793 /*----------------------------------------------------------------------------
2796 Assign each byte position in the stores that we are going to
2797 analyze globally to a position in the bitmaps. Returns true if
2798 there are any bit positions assigned.
2799 ----------------------------------------------------------------------------*/
2802 dse_step2_init (void)
2807 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2809 /* For all non stack related bases, we only consider a store to
2810 be deletable if there are two or more stores for that
2811 position. This is because it takes one store to make the
2812 other store redundant. However, for the stores that are
2813 stack related, we consider them if there is only one store
2814 for the position. We do this because the stack related
2815 stores can be deleted if their is no read between them and
2816 the end of the function.
2818 To make this work in the current framework, we take the stack
2819 related bases add all of the bits from store1 into store2.
2820 This has the effect of making the eligible even if there is
2823 if (stores_off_frame_dead_at_return
&& group
->frame_related
)
2825 bitmap_ior_into (group
->store2_n
, group
->store1_n
);
2826 bitmap_ior_into (group
->store2_p
, group
->store1_p
);
2827 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2828 fprintf (dump_file
, "group %d is frame related ", i
);
2831 group
->offset_map_size_n
++;
2832 group
->offset_map_n
= XOBNEWVEC (&dse_obstack
, int,
2833 group
->offset_map_size_n
);
2834 group
->offset_map_size_p
++;
2835 group
->offset_map_p
= XOBNEWVEC (&dse_obstack
, int,
2836 group
->offset_map_size_p
);
2837 group
->process_globally
= false;
2838 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2840 fprintf (dump_file
, "group %d(%d+%d): ", i
,
2841 (int)bitmap_count_bits (group
->store2_n
),
2842 (int)bitmap_count_bits (group
->store2_p
));
2843 bitmap_print (dump_file
, group
->store2_n
, "n ", " ");
2844 bitmap_print (dump_file
, group
->store2_p
, "p ", "\n");
2850 /* Init the offset tables. */
2857 /* Position 0 is unused because 0 is used in the maps to mean
2859 current_position
= 1;
2860 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2865 memset (group
->offset_map_n
, 0, sizeof (int) * group
->offset_map_size_n
);
2866 memset (group
->offset_map_p
, 0, sizeof (int) * group
->offset_map_size_p
);
2867 bitmap_clear (group
->group_kill
);
2869 EXECUTE_IF_SET_IN_BITMAP (group
->store2_n
, 0, j
, bi
)
2871 bitmap_set_bit (group
->group_kill
, current_position
);
2872 if (bitmap_bit_p (group
->escaped_n
, j
))
2873 bitmap_set_bit (kill_on_calls
, current_position
);
2874 group
->offset_map_n
[j
] = current_position
++;
2875 group
->process_globally
= true;
2877 EXECUTE_IF_SET_IN_BITMAP (group
->store2_p
, 0, j
, bi
)
2879 bitmap_set_bit (group
->group_kill
, current_position
);
2880 if (bitmap_bit_p (group
->escaped_p
, j
))
2881 bitmap_set_bit (kill_on_calls
, current_position
);
2882 group
->offset_map_p
[j
] = current_position
++;
2883 group
->process_globally
= true;
2886 return current_position
!= 1;
2891 /*----------------------------------------------------------------------------
2894 Build the bit vectors for the transfer functions.
2895 ----------------------------------------------------------------------------*/
2898 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2902 get_bitmap_index (group_info
*group_info
, HOST_WIDE_INT offset
)
2906 HOST_WIDE_INT offset_p
= -offset
;
2907 if (offset_p
>= group_info
->offset_map_size_n
)
2909 return group_info
->offset_map_n
[offset_p
];
2913 if (offset
>= group_info
->offset_map_size_p
)
2915 return group_info
->offset_map_p
[offset
];
2920 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2924 scan_stores (store_info
*store_info
, bitmap gen
, bitmap kill
)
2928 HOST_WIDE_INT i
, offset
, width
;
2929 group_info
*group_info
2930 = rtx_group_vec
[store_info
->group_id
];
2931 /* We can (conservatively) ignore stores whose bounds aren't known;
2932 they simply don't generate new global dse opportunities. */
2933 if (group_info
->process_globally
2934 && store_info
->offset
.is_constant (&offset
)
2935 && store_info
->width
.is_constant (&width
))
2937 HOST_WIDE_INT end
= offset
+ width
;
2938 for (i
= offset
; i
< end
; i
++)
2940 int index
= get_bitmap_index (group_info
, i
);
2943 bitmap_set_bit (gen
, index
);
2945 bitmap_clear_bit (kill
, index
);
2949 store_info
= store_info
->next
;
2954 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2958 scan_reads (insn_info_t insn_info
, bitmap gen
, bitmap kill
)
2960 read_info_t read_info
= insn_info
->read_rec
;
2964 /* If this insn reads the frame, kill all the frame related stores. */
2965 if (insn_info
->frame_read
)
2967 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2968 if (group
->process_globally
&& group
->frame_related
)
2971 bitmap_ior_into (kill
, group
->group_kill
);
2972 bitmap_and_compl_into (gen
, group
->group_kill
);
2975 if (insn_info
->non_frame_wild_read
)
2977 /* Kill all non-frame related stores. Kill all stores of variables that
2980 bitmap_ior_into (kill
, kill_on_calls
);
2981 bitmap_and_compl_into (gen
, kill_on_calls
);
2982 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2983 if (group
->process_globally
&& !group
->frame_related
)
2986 bitmap_ior_into (kill
, group
->group_kill
);
2987 bitmap_and_compl_into (gen
, group
->group_kill
);
2992 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
2994 if (group
->process_globally
)
2996 if (i
== read_info
->group_id
)
2998 HOST_WIDE_INT offset
, width
;
2999 /* Reads with non-constant size kill all DSE opportunities
3001 if (!read_info
->offset
.is_constant (&offset
)
3002 || !read_info
->width
.is_constant (&width
)
3003 || !known_size_p (width
))
3005 /* Handle block mode reads. */
3007 bitmap_ior_into (kill
, group
->group_kill
);
3008 bitmap_and_compl_into (gen
, group
->group_kill
);
3012 /* The groups are the same, just process the
3015 HOST_WIDE_INT end
= offset
+ width
;
3016 for (j
= offset
; j
< end
; j
++)
3018 int index
= get_bitmap_index (group
, j
);
3022 bitmap_set_bit (kill
, index
);
3023 bitmap_clear_bit (gen
, index
);
3030 /* The groups are different, if the alias sets
3031 conflict, clear the entire group. We only need
3032 to apply this test if the read_info is a cselib
3033 read. Anything with a constant base cannot alias
3034 something else with a different constant
3036 if ((read_info
->group_id
< 0)
3037 && canon_true_dependence (group
->base_mem
,
3038 GET_MODE (group
->base_mem
),
3039 group
->canon_base_addr
,
3040 read_info
->mem
, NULL_RTX
))
3043 bitmap_ior_into (kill
, group
->group_kill
);
3044 bitmap_and_compl_into (gen
, group
->group_kill
);
3050 read_info
= read_info
->next
;
3055 /* Return the insn in BB_INFO before the first wild read or if there
3056 are no wild reads in the block, return the last insn. */
3059 find_insn_before_first_wild_read (bb_info_t bb_info
)
3061 insn_info_t insn_info
= bb_info
->last_insn
;
3062 insn_info_t last_wild_read
= NULL
;
3066 if (insn_info
->wild_read
)
3068 last_wild_read
= insn_info
->prev_insn
;
3069 /* Block starts with wild read. */
3070 if (!last_wild_read
)
3074 insn_info
= insn_info
->prev_insn
;
3078 return last_wild_read
;
3080 return bb_info
->last_insn
;
3084 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3085 the block in order to build the gen and kill sets for the block.
3086 We start at ptr which may be the last insn in the block or may be
3087 the first insn with a wild read. In the latter case we are able to
3088 skip the rest of the block because it just does not matter:
3089 anything that happens is hidden by the wild read. */
3092 dse_step3_scan (basic_block bb
)
3094 bb_info_t bb_info
= bb_table
[bb
->index
];
3095 insn_info_t insn_info
;
3097 insn_info
= find_insn_before_first_wild_read (bb_info
);
3099 /* In the spill case or in the no_spill case if there is no wild
3100 read in the block, we will need a kill set. */
3101 if (insn_info
== bb_info
->last_insn
)
3104 bitmap_clear (bb_info
->kill
);
3106 bb_info
->kill
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3110 BITMAP_FREE (bb_info
->kill
);
3114 /* There may have been code deleted by the dce pass run before
3116 if (insn_info
->insn
&& INSN_P (insn_info
->insn
))
3118 scan_stores (insn_info
->store_rec
, bb_info
->gen
, bb_info
->kill
);
3119 scan_reads (insn_info
, bb_info
->gen
, bb_info
->kill
);
3122 insn_info
= insn_info
->prev_insn
;
3127 /* Set the gen set of the exit block, and also any block with no
3128 successors that does not have a wild read. */
3131 dse_step3_exit_block_scan (bb_info_t bb_info
)
3133 /* The gen set is all 0's for the exit block except for the
3134 frame_pointer_group. */
3136 if (stores_off_frame_dead_at_return
)
3141 FOR_EACH_VEC_ELT (rtx_group_vec
, i
, group
)
3143 if (group
->process_globally
&& group
->frame_related
)
3144 bitmap_ior_into (bb_info
->gen
, group
->group_kill
);
3150 /* Find all of the blocks that are not backwards reachable from the
3151 exit block or any block with no successors (BB). These are the
3152 infinite loops or infinite self loops. These blocks will still
3153 have their bits set in UNREACHABLE_BLOCKS. */
3156 mark_reachable_blocks (sbitmap unreachable_blocks
, basic_block bb
)
3161 if (bitmap_bit_p (unreachable_blocks
, bb
->index
))
3163 bitmap_clear_bit (unreachable_blocks
, bb
->index
);
3164 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3166 mark_reachable_blocks (unreachable_blocks
, e
->src
);
3171 /* Build the transfer functions for the function. */
3177 sbitmap_iterator sbi
;
3178 bitmap all_ones
= NULL
;
3181 auto_sbitmap
unreachable_blocks (last_basic_block_for_fn (cfun
));
3182 bitmap_ones (unreachable_blocks
);
3184 FOR_ALL_BB_FN (bb
, cfun
)
3186 bb_info_t bb_info
= bb_table
[bb
->index
];
3188 bitmap_clear (bb_info
->gen
);
3190 bb_info
->gen
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3192 if (bb
->index
== ENTRY_BLOCK
)
3194 else if (bb
->index
== EXIT_BLOCK
)
3195 dse_step3_exit_block_scan (bb_info
);
3197 dse_step3_scan (bb
);
3198 if (EDGE_COUNT (bb
->succs
) == 0)
3199 mark_reachable_blocks (unreachable_blocks
, bb
);
3201 /* If this is the second time dataflow is run, delete the old
3204 BITMAP_FREE (bb_info
->in
);
3206 BITMAP_FREE (bb_info
->out
);
3209 /* For any block in an infinite loop, we must initialize the out set
3210 to all ones. This could be expensive, but almost never occurs in
3211 practice. However, it is common in regression tests. */
3212 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks
, 0, i
, sbi
)
3214 if (bitmap_bit_p (all_blocks
, i
))
3216 bb_info_t bb_info
= bb_table
[i
];
3222 all_ones
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3223 FOR_EACH_VEC_ELT (rtx_group_vec
, j
, group
)
3224 bitmap_ior_into (all_ones
, group
->group_kill
);
3228 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3229 bitmap_copy (bb_info
->out
, all_ones
);
3235 BITMAP_FREE (all_ones
);
3240 /*----------------------------------------------------------------------------
3243 Solve the bitvector equations.
3244 ----------------------------------------------------------------------------*/
3247 /* Confluence function for blocks with no successors. Create an out
3248 set from the gen set of the exit block. This block logically has
3249 the exit block as a successor. */
3254 dse_confluence_0 (basic_block bb
)
3256 bb_info_t bb_info
= bb_table
[bb
->index
];
3258 if (bb
->index
== EXIT_BLOCK
)
3263 bb_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3264 bitmap_copy (bb_info
->out
, bb_table
[EXIT_BLOCK
]->gen
);
3268 /* Propagate the information from the in set of the dest of E to the
3269 out set of the src of E. If the various in or out sets are not
3270 there, that means they are all ones. */
3273 dse_confluence_n (edge e
)
3275 bb_info_t src_info
= bb_table
[e
->src
->index
];
3276 bb_info_t dest_info
= bb_table
[e
->dest
->index
];
3281 bitmap_and_into (src_info
->out
, dest_info
->in
);
3284 src_info
->out
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3285 bitmap_copy (src_info
->out
, dest_info
->in
);
3292 /* Propagate the info from the out to the in set of BB_INDEX's basic
3293 block. There are three cases:
3295 1) The block has no kill set. In this case the kill set is all
3296 ones. It does not matter what the out set of the block is, none of
3297 the info can reach the top. The only thing that reaches the top is
3298 the gen set and we just copy the set.
3300 2) There is a kill set but no out set and bb has successors. In
3301 this case we just return. Eventually an out set will be created and
3302 it is better to wait than to create a set of ones.
3304 3) There is both a kill and out set. We apply the obvious transfer
3309 dse_transfer_function (int bb_index
)
3311 bb_info_t bb_info
= bb_table
[bb_index
];
3319 return bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3320 bb_info
->out
, bb_info
->kill
);
3323 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3324 bitmap_ior_and_compl (bb_info
->in
, bb_info
->gen
,
3325 bb_info
->out
, bb_info
->kill
);
3335 /* Case 1 above. If there is already an in set, nothing
3341 bb_info
->in
= BITMAP_ALLOC (&dse_bitmap_obstack
);
3342 bitmap_copy (bb_info
->in
, bb_info
->gen
);
3348 /* Solve the dataflow equations. */
3353 df_simple_dataflow (DF_BACKWARD
, NULL
, dse_confluence_0
,
3354 dse_confluence_n
, dse_transfer_function
,
3355 all_blocks
, df_get_postorder (DF_BACKWARD
),
3356 df_get_n_blocks (DF_BACKWARD
));
3357 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3361 fprintf (dump_file
, "\n\n*** Global dataflow info after analysis.\n");
3362 FOR_ALL_BB_FN (bb
, cfun
)
3364 bb_info_t bb_info
= bb_table
[bb
->index
];
3366 df_print_bb_index (bb
, dump_file
);
3368 bitmap_print (dump_file
, bb_info
->in
, " in: ", "\n");
3370 fprintf (dump_file
, " in: *MISSING*\n");
3372 bitmap_print (dump_file
, bb_info
->gen
, " gen: ", "\n");
3374 fprintf (dump_file
, " gen: *MISSING*\n");
3376 bitmap_print (dump_file
, bb_info
->kill
, " kill: ", "\n");
3378 fprintf (dump_file
, " kill: *MISSING*\n");
3380 bitmap_print (dump_file
, bb_info
->out
, " out: ", "\n");
3382 fprintf (dump_file
, " out: *MISSING*\n\n");
3389 /*----------------------------------------------------------------------------
3392 Delete the stores that can only be deleted using the global information.
3393 ----------------------------------------------------------------------------*/
3400 FOR_EACH_BB_FN (bb
, cfun
)
3402 bb_info_t bb_info
= bb_table
[bb
->index
];
3403 insn_info_t insn_info
= bb_info
->last_insn
;
3404 bitmap v
= bb_info
->out
;
3408 bool deleted
= false;
3409 if (dump_file
&& insn_info
->insn
)
3411 fprintf (dump_file
, "starting to process insn %d\n",
3412 INSN_UID (insn_info
->insn
));
3413 bitmap_print (dump_file
, v
, " v: ", "\n");
3416 /* There may have been code deleted by the dce pass run before
3419 && INSN_P (insn_info
->insn
)
3420 && (!insn_info
->cannot_delete
)
3421 && (!bitmap_empty_p (v
)))
3423 store_info
*store_info
= insn_info
->store_rec
;
3425 /* Try to delete the current insn. */
3428 /* Skip the clobbers. */
3429 while (!store_info
->is_set
)
3430 store_info
= store_info
->next
;
3432 HOST_WIDE_INT i
, offset
, width
;
3433 group_info
*group_info
= rtx_group_vec
[store_info
->group_id
];
3435 if (!store_info
->offset
.is_constant (&offset
)
3436 || !store_info
->width
.is_constant (&width
))
3440 HOST_WIDE_INT end
= offset
+ width
;
3441 for (i
= offset
; i
< end
; i
++)
3443 int index
= get_bitmap_index (group_info
, i
);
3445 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3446 fprintf (dump_file
, "i = %d, index = %d\n",
3448 if (index
== 0 || !bitmap_bit_p (v
, index
))
3450 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3451 fprintf (dump_file
, "failing at i = %d\n",
3461 && check_for_inc_dec_1 (insn_info
))
3463 delete_insn (insn_info
->insn
);
3464 insn_info
->insn
= NULL
;
3469 /* We do want to process the local info if the insn was
3470 deleted. For instance, if the insn did a wild read, we
3471 no longer need to trash the info. */
3473 && INSN_P (insn_info
->insn
)
3476 scan_stores (insn_info
->store_rec
, v
, NULL
);
3477 if (insn_info
->wild_read
)
3479 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3480 fprintf (dump_file
, "wild read\n");
3483 else if (insn_info
->read_rec
3484 || insn_info
->non_frame_wild_read
3485 || insn_info
->frame_read
)
3487 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3489 if (!insn_info
->non_frame_wild_read
3490 && !insn_info
->frame_read
)
3491 fprintf (dump_file
, "regular read\n");
3492 if (insn_info
->non_frame_wild_read
)
3493 fprintf (dump_file
, "non-frame wild read\n");
3494 if (insn_info
->frame_read
)
3495 fprintf (dump_file
, "frame read\n");
3497 scan_reads (insn_info
, v
, NULL
);
3501 insn_info
= insn_info
->prev_insn
;
3508 /*----------------------------------------------------------------------------
3511 Delete stores made redundant by earlier stores (which store the same
3512 value) that couldn't be eliminated.
3513 ----------------------------------------------------------------------------*/
3520 FOR_ALL_BB_FN (bb
, cfun
)
3522 bb_info_t bb_info
= bb_table
[bb
->index
];
3523 insn_info_t insn_info
= bb_info
->last_insn
;
3527 /* There may have been code deleted by the dce pass run before
3530 && INSN_P (insn_info
->insn
)
3531 && !insn_info
->cannot_delete
)
3533 store_info
*s_info
= insn_info
->store_rec
;
3535 while (s_info
&& !s_info
->is_set
)
3536 s_info
= s_info
->next
;
3538 && s_info
->redundant_reason
3539 && s_info
->redundant_reason
->insn
3540 && INSN_P (s_info
->redundant_reason
->insn
))
3542 rtx_insn
*rinsn
= s_info
->redundant_reason
->insn
;
3543 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3544 fprintf (dump_file
, "Locally deleting insn %d "
3545 "because insn %d stores the "
3546 "same value and couldn't be "
3548 INSN_UID (insn_info
->insn
),
3550 delete_dead_store_insn (insn_info
);
3553 insn_info
= insn_info
->prev_insn
;
3558 /*----------------------------------------------------------------------------
3561 Destroy everything left standing.
3562 ----------------------------------------------------------------------------*/
3567 bitmap_obstack_release (&dse_bitmap_obstack
);
3568 obstack_free (&dse_obstack
, NULL
);
3570 end_alias_analysis ();
3572 delete rtx_group_table
;
3573 rtx_group_table
= NULL
;
3574 rtx_group_vec
.release ();
3575 BITMAP_FREE (all_blocks
);
3576 BITMAP_FREE (scratch
);
3578 rtx_store_info_pool
.release ();
3579 read_info_type_pool
.release ();
3580 insn_info_type_pool
.release ();
3581 dse_bb_info_type_pool
.release ();
3582 group_info_pool
.release ();
3583 deferred_change_pool
.release ();
3587 /* -------------------------------------------------------------------------
3589 ------------------------------------------------------------------------- */
3591 /* Callback for running pass_rtl_dse. */
3594 rest_of_handle_dse (void)
3596 df_set_flags (DF_DEFER_INSN_RESCAN
);
3598 /* Need the notes since we must track live hardregs in the forwards
3600 df_note_add_problem ();
3608 df_set_flags (DF_LR_RUN_DCE
);
3610 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3611 fprintf (dump_file
, "doing global processing\n");
3621 fprintf (dump_file
, "dse: local deletions = %d, global deletions = %d\n",
3622 locally_deleted
, globally_deleted
);
3624 /* DSE can eliminate potentially-trapping MEMs.
3625 Remove any EH edges associated with them. */
3626 if ((locally_deleted
|| globally_deleted
)
3627 && cfun
->can_throw_non_call_exceptions
3628 && purge_all_dead_edges ())
3630 free_dominance_info (CDI_DOMINATORS
);
3639 const pass_data pass_data_rtl_dse1
=
3641 RTL_PASS
, /* type */
3643 OPTGROUP_NONE
, /* optinfo_flags */
3644 TV_DSE1
, /* tv_id */
3645 0, /* properties_required */
3646 0, /* properties_provided */
3647 0, /* properties_destroyed */
3648 0, /* todo_flags_start */
3649 TODO_df_finish
, /* todo_flags_finish */
3652 class pass_rtl_dse1
: public rtl_opt_pass
3655 pass_rtl_dse1 (gcc::context
*ctxt
)
3656 : rtl_opt_pass (pass_data_rtl_dse1
, ctxt
)
3659 /* opt_pass methods: */
3660 virtual bool gate (function
*)
3662 return optimize
> 0 && flag_dse
&& dbg_cnt (dse1
);
3665 virtual unsigned int execute (function
*) { return rest_of_handle_dse (); }
3667 }; // class pass_rtl_dse1
3672 make_pass_rtl_dse1 (gcc::context
*ctxt
)
3674 return new pass_rtl_dse1 (ctxt
);
3679 const pass_data pass_data_rtl_dse2
=
3681 RTL_PASS
, /* type */
3683 OPTGROUP_NONE
, /* optinfo_flags */
3684 TV_DSE2
, /* tv_id */
3685 0, /* properties_required */
3686 0, /* properties_provided */
3687 0, /* properties_destroyed */
3688 0, /* todo_flags_start */
3689 TODO_df_finish
, /* todo_flags_finish */
3692 class pass_rtl_dse2
: public rtl_opt_pass
3695 pass_rtl_dse2 (gcc::context
*ctxt
)
3696 : rtl_opt_pass (pass_data_rtl_dse2
, ctxt
)
3699 /* opt_pass methods: */
3700 virtual bool gate (function
*)
3702 return optimize
> 0 && flag_dse
&& dbg_cnt (dse2
);
3705 virtual unsigned int execute (function
*) { return rest_of_handle_dse (); }
3707 }; // class pass_rtl_dse2
3712 make_pass_rtl_dse2 (gcc::context
*ctxt
)
3714 return new pass_rtl_dse2 (ctxt
);