Require target lra in gcc.c-torture/compile/asmgoto-6.c
[official-gcc.git] / gcc / dse.cc
blob8b07be17674cd2c070ead9c4d2dd78da9c81f599
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2023 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "backend.h"
29 #include "target.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "df.h"
35 #include "memmodel.h"
36 #include "tm_p.h"
37 #include "gimple-ssa.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "alias.h"
43 #include "stor-layout.h"
44 #include "cfgrtl.h"
45 #include "cselib.h"
46 #include "tree-pass.h"
47 #include "explow.h"
48 #include "expr.h"
49 #include "dbgcnt.h"
50 #include "rtl-iter.h"
51 #include "cfgcleanup.h"
52 #include "calls.h"
54 /* This file contains three techniques for performing Dead Store
55 Elimination (dse).
57 * The first technique performs dse locally on any base address. It
58 is based on the cselib which is a local value numbering technique.
59 This technique is local to a basic block but deals with a fairly
60 general addresses.
62 * The second technique performs dse globally but is restricted to
63 base addresses that are either constant or are relative to the
64 frame_pointer.
66 * The third technique, (which is only done after register allocation)
67 processes the spill slots. This differs from the second
68 technique because it takes advantage of the fact that spilling is
69 completely free from the effects of aliasing.
71 Logically, dse is a backwards dataflow problem. A store can be
72 deleted if it if cannot be reached in the backward direction by any
73 use of the value being stored. However, the local technique uses a
74 forwards scan of the basic block because cselib requires that the
75 block be processed in that order.
77 The pass is logically broken into 7 steps:
79 0) Initialization.
81 1) The local algorithm, as well as scanning the insns for the two
82 global algorithms.
84 2) Analysis to see if the global algs are necessary. In the case
85 of stores base on a constant address, there must be at least two
86 stores to that address, to make it possible to delete some of the
87 stores. In the case of stores off of the frame or spill related
88 stores, only one store to an address is necessary because those
89 stores die at the end of the function.
91 3) Set up the global dataflow equations based on processing the
92 info parsed in the first step.
94 4) Solve the dataflow equations.
96 5) Delete the insns that the global analysis has indicated are
97 unnecessary.
99 6) Delete insns that store the same value as preceding store
100 where the earlier store couldn't be eliminated.
102 7) Cleanup.
104 This step uses cselib and canon_rtx to build the largest expression
105 possible for each address. This pass is a forwards pass through
106 each basic block. From the point of view of the global technique,
107 the first pass could examine a block in either direction. The
108 forwards ordering is to accommodate cselib.
110 We make a simplifying assumption: addresses fall into four broad
111 categories:
113 1) base has rtx_varies_p == false, offset is constant.
114 2) base has rtx_varies_p == false, offset variable.
115 3) base has rtx_varies_p == true, offset constant.
116 4) base has rtx_varies_p == true, offset variable.
118 The local passes are able to process all 4 kinds of addresses. The
119 global pass only handles 1).
121 The global problem is formulated as follows:
123 A store, S1, to address A, where A is not relative to the stack
124 frame, can be eliminated if all paths from S1 to the end of the
125 function contain another store to A before a read to A.
127 If the address A is relative to the stack frame, a store S2 to A
128 can be eliminated if there are no paths from S2 that reach the
129 end of the function that read A before another store to A. In
130 this case S2 can be deleted if there are paths from S2 to the
131 end of the function that have no reads or writes to A. This
132 second case allows stores to the stack frame to be deleted that
133 would otherwise die when the function returns. This cannot be
134 done if stores_off_frame_dead_at_return is not true. See the doc
135 for that variable for when this variable is false.
137 The global problem is formulated as a backwards set union
138 dataflow problem where the stores are the gens and reads are the
139 kills. Set union problems are rare and require some special
140 handling given our representation of bitmaps. A straightforward
141 implementation requires a lot of bitmaps filled with 1s.
142 These are expensive and cumbersome in our bitmap formulation so
143 care has been taken to avoid large vectors filled with 1s. See
144 the comments in bb_info and in the dataflow confluence functions
145 for details.
147 There are two places for further enhancements to this algorithm:
149 1) The original dse which was embedded in a pass called flow also
150 did local address forwarding. For example in
152 A <- r100
153 ... <- A
155 flow would replace the right hand side of the second insn with a
156 reference to r100. Most of the information is available to add this
157 to this pass. It has not done it because it is a lot of work in
158 the case that either r100 is assigned to between the first and
159 second insn and/or the second insn is a load of part of the value
160 stored by the first insn.
162 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
167 2) The cleaning up of spill code is quite profitable. It currently
168 depends on reading tea leaves and chicken entrails left by reload.
169 This pass depends on reload creating a singleton alias set for each
170 spill slot and telling the next dse pass which of these alias sets
171 are the singletons. Rather than analyze the addresses of the
172 spills, dse's spill processing just does analysis of the loads and
173 stores that use those alias sets. There are three cases where this
174 falls short:
176 a) Reload sometimes creates the slot for one mode of access, and
177 then inserts loads and/or stores for a smaller mode. In this
178 case, the current code just punts on the slot. The proper thing
179 to do is to back out and use one bit vector position for each
180 byte of the entity associated with the slot. This depends on
181 KNOWING that reload always generates the accesses for each of the
182 bytes in some canonical (read that easy to understand several
183 passes after reload happens) way.
185 b) Reload sometimes decides that spill slot it allocated was not
186 large enough for the mode and goes back and allocates more slots
187 with the same mode and alias set. The backout in this case is a
188 little more graceful than (a). In this case the slot is unmarked
189 as being a spill slot and if final address comes out to be based
190 off the frame pointer, the global algorithm handles this slot.
192 c) For any pass that may prespill, there is currently no
193 mechanism to tell the dse pass that the slot being used has the
194 special properties that reload uses. It may be that all that is
195 required is to have those passes make the same calls that reload
196 does, assuming that the alias sets can be manipulated in the same
197 way. */
199 /* There are limits to the size of constant offsets we model for the
200 global problem. There are certainly test cases, that exceed this
201 limit, however, it is unlikely that there are important programs
202 that really have constant offsets this size. */
203 #define MAX_OFFSET (64 * 1024)
205 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
206 on the default obstack because these bitmaps can grow quite large
207 (~2GB for the small (!) test case of PR54146) and we'll hold on to
208 all that memory until the end of the compiler run.
209 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
210 releasing the whole obstack. */
211 static bitmap_obstack dse_bitmap_obstack;
213 /* Obstack for other data. As for above: Kinda nice to be able to
214 throw it all away at the end in one big sweep. */
215 static struct obstack dse_obstack;
217 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
218 static bitmap scratch = NULL;
220 struct insn_info_type;
222 /* This structure holds information about a candidate store. */
223 class store_info
225 public:
227 /* False means this is a clobber. */
228 bool is_set;
230 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
231 bool is_large;
233 /* The id of the mem group of the base address. If rtx_varies_p is
234 true, this is -1. Otherwise, it is the index into the group
235 table. */
236 int group_id;
238 /* This is the cselib value. */
239 cselib_val *cse_base;
241 /* This canonized mem. */
242 rtx mem;
244 /* Canonized MEM address for use by canon_true_dependence. */
245 rtx mem_addr;
247 /* The offset of the first byte associated with the operation. */
248 poly_int64 offset;
250 /* The number of bytes covered by the operation. This is always exact
251 and known (rather than -1). */
252 poly_int64 width;
254 /* The address space that the memory reference uses. */
255 unsigned char addrspace;
257 union
259 /* A bitmask as wide as the number of bytes in the word that
260 contains a 1 if the byte may be needed. The store is unused if
261 all of the bits are 0. This is used if IS_LARGE is false. */
262 unsigned HOST_WIDE_INT small_bitmask;
264 struct
266 /* A bitmap with one bit per byte, or null if the number of
267 bytes isn't known at compile time. A cleared bit means
268 the position is needed. Used if IS_LARGE is true. */
269 bitmap bmap;
271 /* When BITMAP is nonnull, this counts the number of set bits
272 (i.e. unneeded bytes) in the bitmap. If it is equal to
273 WIDTH, the whole store is unused.
275 When BITMAP is null:
276 - the store is definitely not needed when COUNT == 1
277 - all the store is needed when COUNT == 0 and RHS is nonnull
278 - otherwise we don't know which parts of the store are needed. */
279 int count;
280 } large;
281 } positions_needed;
283 /* The next store info for this insn. */
284 class store_info *next;
286 /* The right hand side of the store. This is used if there is a
287 subsequent reload of the mems address somewhere later in the
288 basic block. */
289 rtx rhs;
291 /* If rhs is or holds a constant, this contains that constant,
292 otherwise NULL. */
293 rtx const_rhs;
295 /* Set if this store stores the same constant value as REDUNDANT_REASON
296 insn stored. These aren't eliminated early, because doing that
297 might prevent the earlier larger store to be eliminated. */
298 struct insn_info_type *redundant_reason;
301 /* Return a bitmask with the first N low bits set. */
303 static unsigned HOST_WIDE_INT
304 lowpart_bitmask (int n)
306 unsigned HOST_WIDE_INT mask = HOST_WIDE_INT_M1U;
307 return mask >> (HOST_BITS_PER_WIDE_INT - n);
310 static object_allocator<store_info> cse_store_info_pool ("cse_store_info_pool");
312 static object_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool");
314 /* This structure holds information about a load. These are only
315 built for rtx bases. */
316 class read_info_type
318 public:
319 /* The id of the mem group of the base address. */
320 int group_id;
322 /* The offset of the first byte associated with the operation. */
323 poly_int64 offset;
325 /* The number of bytes covered by the operation, or -1 if not known. */
326 poly_int64 width;
328 /* The mem being read. */
329 rtx mem;
331 /* The next read_info for this insn. */
332 class read_info_type *next;
334 typedef class read_info_type *read_info_t;
336 static object_allocator<read_info_type> read_info_type_pool ("read_info_pool");
338 /* One of these records is created for each insn. */
340 struct insn_info_type
342 /* Set true if the insn contains a store but the insn itself cannot
343 be deleted. This is set if the insn is a parallel and there is
344 more than one non dead output or if the insn is in some way
345 volatile. */
346 bool cannot_delete;
348 /* This field is only used by the global algorithm. It is set true
349 if the insn contains any read of mem except for a (1). This is
350 also set if the insn is a call or has a clobber mem. If the insn
351 contains a wild read, the use_rec will be null. */
352 bool wild_read;
354 /* This is true only for CALL instructions which could potentially read
355 any non-frame memory location. This field is used by the global
356 algorithm. */
357 bool non_frame_wild_read;
359 /* This field is only used for the processing of const functions.
360 These functions cannot read memory, but they can read the stack
361 because that is where they may get their parms. We need to be
362 this conservative because, like the store motion pass, we don't
363 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
364 Moreover, we need to distinguish two cases:
365 1. Before reload (register elimination), the stores related to
366 outgoing arguments are stack pointer based and thus deemed
367 of non-constant base in this pass. This requires special
368 handling but also means that the frame pointer based stores
369 need not be killed upon encountering a const function call.
370 2. After reload, the stores related to outgoing arguments can be
371 either stack pointer or hard frame pointer based. This means
372 that we have no other choice than also killing all the frame
373 pointer based stores upon encountering a const function call.
374 This field is set after reload for const function calls and before
375 reload for const tail function calls on targets where arg pointer
376 is the frame pointer. Having this set is less severe than a wild
377 read, it just means that all the frame related stores are killed
378 rather than all the stores. */
379 bool frame_read;
381 /* This field is only used for the processing of const functions.
382 It is set if the insn may contain a stack pointer based store. */
383 bool stack_pointer_based;
385 /* This is true if any of the sets within the store contains a
386 cselib base. Such stores can only be deleted by the local
387 algorithm. */
388 bool contains_cselib_groups;
390 /* The insn. */
391 rtx_insn *insn;
393 /* The list of mem sets or mem clobbers that are contained in this
394 insn. If the insn is deletable, it contains only one mem set.
395 But it could also contain clobbers. Insns that contain more than
396 one mem set are not deletable, but each of those mems are here in
397 order to provide info to delete other insns. */
398 store_info *store_rec;
400 /* The linked list of mem uses in this insn. Only the reads from
401 rtx bases are listed here. The reads to cselib bases are
402 completely processed during the first scan and so are never
403 created. */
404 read_info_t read_rec;
406 /* The live fixed registers. We assume only fixed registers can
407 cause trouble by being clobbered from an expanded pattern;
408 storing only the live fixed registers (rather than all registers)
409 means less memory needs to be allocated / copied for the individual
410 stores. */
411 regset fixed_regs_live;
413 /* The prev insn in the basic block. */
414 struct insn_info_type * prev_insn;
416 /* The linked list of insns that are in consideration for removal in
417 the forwards pass through the basic block. This pointer may be
418 trash as it is not cleared when a wild read occurs. The only
419 time it is guaranteed to be correct is when the traversal starts
420 at active_local_stores. */
421 struct insn_info_type * next_local_store;
423 typedef struct insn_info_type *insn_info_t;
425 static object_allocator<insn_info_type> insn_info_type_pool ("insn_info_pool");
427 /* The linked list of stores that are under consideration in this
428 basic block. */
429 static insn_info_t active_local_stores;
430 static int active_local_stores_len;
432 struct dse_bb_info_type
434 /* Pointer to the insn info for the last insn in the block. These
435 are linked so this is how all of the insns are reached. During
436 scanning this is the current insn being scanned. */
437 insn_info_t last_insn;
439 /* The info for the global dataflow problem. */
442 /* This is set if the transfer function should and in the wild_read
443 bitmap before applying the kill and gen sets. That vector knocks
444 out most of the bits in the bitmap and thus speeds up the
445 operations. */
446 bool apply_wild_read;
448 /* The following 4 bitvectors hold information about which positions
449 of which stores are live or dead. They are indexed by
450 get_bitmap_index. */
452 /* The set of store positions that exist in this block before a wild read. */
453 bitmap gen;
455 /* The set of load positions that exist in this block above the
456 same position of a store. */
457 bitmap kill;
459 /* The set of stores that reach the top of the block without being
460 killed by a read.
462 Do not represent the in if it is all ones. Note that this is
463 what the bitvector should logically be initialized to for a set
464 intersection problem. However, like the kill set, this is too
465 expensive. So initially, the in set will only be created for the
466 exit block and any block that contains a wild read. */
467 bitmap in;
469 /* The set of stores that reach the bottom of the block from it's
470 successors.
472 Do not represent the in if it is all ones. Note that this is
473 what the bitvector should logically be initialized to for a set
474 intersection problem. However, like the kill and in set, this is
475 too expensive. So what is done is that the confluence operator
476 just initializes the vector from one of the out sets of the
477 successors of the block. */
478 bitmap out;
480 /* The following bitvector is indexed by the reg number. It
481 contains the set of regs that are live at the current instruction
482 being processed. While it contains info for all of the
483 registers, only the hard registers are actually examined. It is used
484 to assure that shift and/or add sequences that are inserted do not
485 accidentally clobber live hard regs. */
486 bitmap regs_live;
489 typedef struct dse_bb_info_type *bb_info_t;
491 static object_allocator<dse_bb_info_type> dse_bb_info_type_pool
492 ("bb_info_pool");
494 /* Table to hold all bb_infos. */
495 static bb_info_t *bb_table;
497 /* There is a group_info for each rtx base that is used to reference
498 memory. There are also not many of the rtx bases because they are
499 very limited in scope. */
501 struct group_info
503 /* The actual base of the address. */
504 rtx rtx_base;
506 /* The sequential id of the base. This allows us to have a
507 canonical ordering of these that is not based on addresses. */
508 int id;
510 /* True if there are any positions that are to be processed
511 globally. */
512 bool process_globally;
514 /* True if the base of this group is either the frame_pointer or
515 hard_frame_pointer. */
516 bool frame_related;
518 /* A mem wrapped around the base pointer for the group in order to do
519 read dependency. It must be given BLKmode in order to encompass all
520 the possible offsets from the base. */
521 rtx base_mem;
523 /* Canonized version of base_mem's address. */
524 rtx canon_base_addr;
526 /* These two sets of two bitmaps are used to keep track of how many
527 stores are actually referencing that position from this base. We
528 only do this for rtx bases as this will be used to assign
529 positions in the bitmaps for the global problem. Bit N is set in
530 store1 on the first store for offset N. Bit N is set in store2
531 for the second store to offset N. This is all we need since we
532 only care about offsets that have two or more stores for them.
534 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
535 for 0 and greater offsets.
537 There is one special case here, for stores into the stack frame,
538 we will or store1 into store2 before deciding which stores look
539 at globally. This is because stores to the stack frame that have
540 no other reads before the end of the function can also be
541 deleted. */
542 bitmap store1_n, store1_p, store2_n, store2_p;
544 /* These bitmaps keep track of offsets in this group escape this function.
545 An offset escapes if it corresponds to a named variable whose
546 addressable flag is set. */
547 bitmap escaped_n, escaped_p;
549 /* The positions in this bitmap have the same assignments as the in,
550 out, gen and kill bitmaps. This bitmap is all zeros except for
551 the positions that are occupied by stores for this group. */
552 bitmap group_kill;
554 /* The offset_map is used to map the offsets from this base into
555 positions in the global bitmaps. It is only created after all of
556 the all of stores have been scanned and we know which ones we
557 care about. */
558 int *offset_map_n, *offset_map_p;
559 int offset_map_size_n, offset_map_size_p;
562 static object_allocator<group_info> group_info_pool ("rtx_group_info_pool");
564 /* Index into the rtx_group_vec. */
565 static int rtx_group_next_id;
568 static vec<group_info *> rtx_group_vec;
571 /* This structure holds the set of changes that are being deferred
572 when removing read operation. See replace_read. */
573 struct deferred_change
576 /* The mem that is being replaced. */
577 rtx *loc;
579 /* The reg it is being replaced with. */
580 rtx reg;
582 struct deferred_change *next;
585 static object_allocator<deferred_change> deferred_change_pool
586 ("deferred_change_pool");
588 static deferred_change *deferred_change_list = NULL;
590 /* This is true except if cfun->stdarg -- i.e. we cannot do
591 this for vararg functions because they play games with the frame. */
592 static bool stores_off_frame_dead_at_return;
594 /* Counter for stats. */
595 static int globally_deleted;
596 static int locally_deleted;
598 static bitmap all_blocks;
600 /* Locations that are killed by calls in the global phase. */
601 static bitmap kill_on_calls;
603 /* The number of bits used in the global bitmaps. */
604 static unsigned int current_position;
606 /* Print offset range [OFFSET, OFFSET + WIDTH) to FILE. */
608 static void
609 print_range (FILE *file, poly_int64 offset, poly_int64 width)
611 fprintf (file, "[");
612 print_dec (offset, file, SIGNED);
613 fprintf (file, "..");
614 print_dec (offset + width, file, SIGNED);
615 fprintf (file, ")");
618 /*----------------------------------------------------------------------------
619 Zeroth step.
621 Initialization.
622 ----------------------------------------------------------------------------*/
625 /* Hashtable callbacks for maintaining the "bases" field of
626 store_group_info, given that the addresses are function invariants. */
628 struct invariant_group_base_hasher : nofree_ptr_hash <group_info>
630 static inline hashval_t hash (const group_info *);
631 static inline bool equal (const group_info *, const group_info *);
634 inline bool
635 invariant_group_base_hasher::equal (const group_info *gi1,
636 const group_info *gi2)
638 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
641 inline hashval_t
642 invariant_group_base_hasher::hash (const group_info *gi)
644 int do_not_record;
645 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
648 /* Tables of group_info structures, hashed by base value. */
649 static hash_table<invariant_group_base_hasher> *rtx_group_table;
652 /* Get the GROUP for BASE. Add a new group if it is not there. */
654 static group_info *
655 get_group_info (rtx base)
657 struct group_info tmp_gi;
658 group_info *gi;
659 group_info **slot;
661 gcc_assert (base != NULL_RTX);
663 /* Find the store_base_info structure for BASE, creating a new one
664 if necessary. */
665 tmp_gi.rtx_base = base;
666 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
667 gi = *slot;
669 if (gi == NULL)
671 *slot = gi = group_info_pool.allocate ();
672 gi->rtx_base = base;
673 gi->id = rtx_group_next_id++;
674 gi->base_mem = gen_rtx_MEM (BLKmode, base);
675 gi->canon_base_addr = canon_rtx (base);
676 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
677 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
678 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
679 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
680 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
681 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
682 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
683 gi->process_globally = false;
684 gi->frame_related =
685 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
686 gi->offset_map_size_n = 0;
687 gi->offset_map_size_p = 0;
688 gi->offset_map_n = NULL;
689 gi->offset_map_p = NULL;
690 rtx_group_vec.safe_push (gi);
693 return gi;
697 /* Initialization of data structures. */
699 static void
700 dse_step0 (void)
702 locally_deleted = 0;
703 globally_deleted = 0;
705 bitmap_obstack_initialize (&dse_bitmap_obstack);
706 gcc_obstack_init (&dse_obstack);
708 scratch = BITMAP_ALLOC (&reg_obstack);
709 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
712 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
714 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
715 rtx_group_next_id = 0;
717 stores_off_frame_dead_at_return = !cfun->stdarg;
719 init_alias_analysis ();
724 /*----------------------------------------------------------------------------
725 First step.
727 Scan all of the insns. Any random ordering of the blocks is fine.
728 Each block is scanned in forward order to accommodate cselib which
729 is used to remove stores with non-constant bases.
730 ----------------------------------------------------------------------------*/
732 /* Delete all of the store_info recs from INSN_INFO. */
734 static void
735 free_store_info (insn_info_t insn_info)
737 store_info *cur = insn_info->store_rec;
738 while (cur)
740 store_info *next = cur->next;
741 if (cur->is_large)
742 BITMAP_FREE (cur->positions_needed.large.bmap);
743 if (cur->cse_base)
744 cse_store_info_pool.remove (cur);
745 else
746 rtx_store_info_pool.remove (cur);
747 cur = next;
750 insn_info->cannot_delete = true;
751 insn_info->contains_cselib_groups = false;
752 insn_info->store_rec = NULL;
755 struct note_add_store_info
757 rtx_insn *first, *current;
758 regset fixed_regs_live;
759 bool failure;
762 /* Callback for emit_inc_dec_insn_before via note_stores.
763 Check if a register is clobbered which is live afterwards. */
765 static void
766 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
768 rtx_insn *insn;
769 note_add_store_info *info = (note_add_store_info *) data;
771 if (!REG_P (loc))
772 return;
774 /* If this register is referenced by the current or an earlier insn,
775 that's OK. E.g. this applies to the register that is being incremented
776 with this addition. */
777 for (insn = info->first;
778 insn != NEXT_INSN (info->current);
779 insn = NEXT_INSN (insn))
780 if (reg_referenced_p (loc, PATTERN (insn)))
781 return;
783 /* If we come here, we have a clobber of a register that's only OK
784 if that register is not live. If we don't have liveness information
785 available, fail now. */
786 if (!info->fixed_regs_live)
788 info->failure = true;
789 return;
791 /* Now check if this is a live fixed register. */
792 unsigned int end_regno = END_REGNO (loc);
793 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
794 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
795 info->failure = true;
798 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
799 SRC + SRCOFF before insn ARG. */
801 static int
802 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
803 rtx op ATTRIBUTE_UNUSED,
804 rtx dest, rtx src, rtx srcoff, void *arg)
806 insn_info_t insn_info = (insn_info_t) arg;
807 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
808 note_add_store_info info;
810 /* We can reuse all operands without copying, because we are about
811 to delete the insn that contained it. */
812 if (srcoff)
814 start_sequence ();
815 emit_insn (gen_add3_insn (dest, src, srcoff));
816 new_insn = get_insns ();
817 end_sequence ();
819 else
820 new_insn = gen_move_insn (dest, src);
821 info.first = new_insn;
822 info.fixed_regs_live = insn_info->fixed_regs_live;
823 info.failure = false;
824 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
826 info.current = cur;
827 note_stores (cur, note_add_store, &info);
830 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
831 return it immediately, communicating the failure to its caller. */
832 if (info.failure)
833 return 1;
835 emit_insn_before (new_insn, insn);
837 return 0;
840 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
841 is there, is split into a separate insn.
842 Return true on success (or if there was nothing to do), false on failure. */
844 static bool
845 check_for_inc_dec_1 (insn_info_t insn_info)
847 rtx_insn *insn = insn_info->insn;
848 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
849 if (note)
850 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
851 insn_info) == 0;
853 /* Punt on stack pushes, those don't have REG_INC notes and we are
854 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
855 subrtx_iterator::array_type array;
856 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
858 const_rtx x = *iter;
859 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
860 return false;
863 return true;
867 /* Entry point for postreload. If you work on reload_cse, or you need this
868 anywhere else, consider if you can provide register liveness information
869 and add a parameter to this function so that it can be passed down in
870 insn_info.fixed_regs_live. */
871 bool
872 check_for_inc_dec (rtx_insn *insn)
874 insn_info_type insn_info;
875 rtx note;
877 insn_info.insn = insn;
878 insn_info.fixed_regs_live = NULL;
879 note = find_reg_note (insn, REG_INC, NULL_RTX);
880 if (note)
881 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
882 &insn_info) == 0;
884 /* Punt on stack pushes, those don't have REG_INC notes and we are
885 unprepared to deal with distribution of REG_ARGS_SIZE notes etc. */
886 subrtx_iterator::array_type array;
887 FOR_EACH_SUBRTX (iter, array, PATTERN (insn), NONCONST)
889 const_rtx x = *iter;
890 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
891 return false;
894 return true;
897 /* Delete the insn and free all of the fields inside INSN_INFO. */
899 static void
900 delete_dead_store_insn (insn_info_t insn_info)
902 read_info_t read_info;
904 if (!dbg_cnt (dse))
905 return;
907 if (!check_for_inc_dec_1 (insn_info))
908 return;
909 if (dump_file && (dump_flags & TDF_DETAILS))
910 fprintf (dump_file, "Locally deleting insn %d\n",
911 INSN_UID (insn_info->insn));
913 free_store_info (insn_info);
914 read_info = insn_info->read_rec;
916 while (read_info)
918 read_info_t next = read_info->next;
919 read_info_type_pool.remove (read_info);
920 read_info = next;
922 insn_info->read_rec = NULL;
924 delete_insn (insn_info->insn);
925 locally_deleted++;
926 insn_info->insn = NULL;
928 insn_info->wild_read = false;
931 /* Return whether DECL, a local variable, can possibly escape the current
932 function scope. */
934 static bool
935 local_variable_can_escape (tree decl)
937 if (TREE_ADDRESSABLE (decl))
938 return true;
940 /* If this is a partitioned variable, we need to consider all the variables
941 in the partition. This is necessary because a store into one of them can
942 be replaced with a store into another and this may not change the outcome
943 of the escape analysis. */
944 if (cfun->gimple_df->decls_to_pointers != NULL)
946 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
947 if (namep)
948 return TREE_ADDRESSABLE (*namep);
951 return false;
954 /* Return whether EXPR can possibly escape the current function scope. */
956 static bool
957 can_escape (tree expr)
959 tree base;
960 if (!expr)
961 return true;
962 base = get_base_address (expr);
963 if (DECL_P (base)
964 && !may_be_aliased (base)
965 && !(VAR_P (base)
966 && !DECL_EXTERNAL (base)
967 && !TREE_STATIC (base)
968 && local_variable_can_escape (base)))
969 return false;
970 return true;
973 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
974 OFFSET and WIDTH. */
976 static void
977 set_usage_bits (group_info *group, poly_int64 offset, poly_int64 width,
978 tree expr)
980 /* Non-constant offsets and widths act as global kills, so there's no point
981 trying to use them to derive global DSE candidates. */
982 HOST_WIDE_INT i, const_offset, const_width;
983 bool expr_escapes = can_escape (expr);
984 if (offset.is_constant (&const_offset)
985 && width.is_constant (&const_width)
986 && const_offset > -MAX_OFFSET
987 && const_offset + const_width < MAX_OFFSET)
988 for (i = const_offset; i < const_offset + const_width; ++i)
990 bitmap store1;
991 bitmap store2;
992 bitmap escaped;
993 int ai;
994 if (i < 0)
996 store1 = group->store1_n;
997 store2 = group->store2_n;
998 escaped = group->escaped_n;
999 ai = -i;
1001 else
1003 store1 = group->store1_p;
1004 store2 = group->store2_p;
1005 escaped = group->escaped_p;
1006 ai = i;
1009 if (!bitmap_set_bit (store1, ai))
1010 bitmap_set_bit (store2, ai);
1011 else
1013 if (i < 0)
1015 if (group->offset_map_size_n < ai)
1016 group->offset_map_size_n = ai;
1018 else
1020 if (group->offset_map_size_p < ai)
1021 group->offset_map_size_p = ai;
1024 if (expr_escapes)
1025 bitmap_set_bit (escaped, ai);
1029 static void
1030 reset_active_stores (void)
1032 active_local_stores = NULL;
1033 active_local_stores_len = 0;
1036 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1038 static void
1039 free_read_records (bb_info_t bb_info)
1041 insn_info_t insn_info = bb_info->last_insn;
1042 read_info_t *ptr = &insn_info->read_rec;
1043 while (*ptr)
1045 read_info_t next = (*ptr)->next;
1046 read_info_type_pool.remove (*ptr);
1047 *ptr = next;
1051 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1053 static void
1054 add_wild_read (bb_info_t bb_info)
1056 insn_info_t insn_info = bb_info->last_insn;
1057 insn_info->wild_read = true;
1058 free_read_records (bb_info);
1059 reset_active_stores ();
1062 /* Set the BB_INFO so that the last insn is marked as a wild read of
1063 non-frame locations. */
1065 static void
1066 add_non_frame_wild_read (bb_info_t bb_info)
1068 insn_info_t insn_info = bb_info->last_insn;
1069 insn_info->non_frame_wild_read = true;
1070 free_read_records (bb_info);
1071 reset_active_stores ();
1074 /* Return true if X is a constant or one of the registers that behave
1075 as a constant over the life of a function. This is equivalent to
1076 !rtx_varies_p for memory addresses. */
1078 static bool
1079 const_or_frame_p (rtx x)
1081 if (CONSTANT_P (x))
1082 return true;
1084 if (GET_CODE (x) == REG)
1086 /* Note that we have to test for the actual rtx used for the frame
1087 and arg pointers and not just the register number in case we have
1088 eliminated the frame and/or arg pointer and are using it
1089 for pseudos. */
1090 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1091 /* The arg pointer varies if it is not a fixed register. */
1092 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1093 || x == pic_offset_table_rtx)
1094 return true;
1095 return false;
1098 return false;
1101 /* Take all reasonable action to put the address of MEM into the form
1102 that we can do analysis on.
1104 The gold standard is to get the address into the form: address +
1105 OFFSET where address is something that rtx_varies_p considers a
1106 constant. When we can get the address in this form, we can do
1107 global analysis on it. Note that for constant bases, address is
1108 not actually returned, only the group_id. The address can be
1109 obtained from that.
1111 If that fails, we try cselib to get a value we can at least use
1112 locally. If that fails we return false.
1114 The GROUP_ID is set to -1 for cselib bases and the index of the
1115 group for non_varying bases.
1117 FOR_READ is true if this is a mem read and false if not. */
1119 static bool
1120 canon_address (rtx mem,
1121 int *group_id,
1122 poly_int64 *offset,
1123 cselib_val **base)
1125 machine_mode address_mode = get_address_mode (mem);
1126 rtx mem_address = XEXP (mem, 0);
1127 rtx expanded_address, address;
1128 int expanded;
1130 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1132 if (dump_file && (dump_flags & TDF_DETAILS))
1134 fprintf (dump_file, " mem: ");
1135 print_inline_rtx (dump_file, mem_address, 0);
1136 fprintf (dump_file, "\n");
1139 /* First see if just canon_rtx (mem_address) is const or frame,
1140 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1141 address = NULL_RTX;
1142 for (expanded = 0; expanded < 2; expanded++)
1144 if (expanded)
1146 /* Use cselib to replace all of the reg references with the full
1147 expression. This will take care of the case where we have
1149 r_x = base + offset;
1150 val = *r_x;
1152 by making it into
1154 val = *(base + offset); */
1156 expanded_address = cselib_expand_value_rtx (mem_address,
1157 scratch, 5);
1159 /* If this fails, just go with the address from first
1160 iteration. */
1161 if (!expanded_address)
1162 break;
1164 else
1165 expanded_address = mem_address;
1167 /* Split the address into canonical BASE + OFFSET terms. */
1168 address = canon_rtx (expanded_address);
1170 *offset = 0;
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1174 if (expanded)
1176 fprintf (dump_file, "\n after cselib_expand address: ");
1177 print_inline_rtx (dump_file, expanded_address, 0);
1178 fprintf (dump_file, "\n");
1181 fprintf (dump_file, "\n after canon_rtx address: ");
1182 print_inline_rtx (dump_file, address, 0);
1183 fprintf (dump_file, "\n");
1186 if (GET_CODE (address) == CONST)
1187 address = XEXP (address, 0);
1189 address = strip_offset_and_add (address, offset);
1191 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1192 && const_or_frame_p (address))
1194 group_info *group = get_group_info (address);
1196 if (dump_file && (dump_flags & TDF_DETAILS))
1198 fprintf (dump_file, " gid=%d offset=", group->id);
1199 print_dec (*offset, dump_file);
1200 fprintf (dump_file, "\n");
1202 *base = NULL;
1203 *group_id = group->id;
1204 return true;
1208 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1209 *group_id = -1;
1211 if (*base == NULL)
1213 if (dump_file && (dump_flags & TDF_DETAILS))
1214 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1215 return false;
1217 if (dump_file && (dump_flags & TDF_DETAILS))
1219 fprintf (dump_file, " varying cselib base=%u:%u offset = ",
1220 (*base)->uid, (*base)->hash);
1221 print_dec (*offset, dump_file);
1222 fprintf (dump_file, "\n");
1224 return true;
1228 /* Clear the rhs field from the active_local_stores array. */
1230 static void
1231 clear_rhs_from_active_local_stores (void)
1233 insn_info_t ptr = active_local_stores;
1235 while (ptr)
1237 store_info *store_info = ptr->store_rec;
1238 /* Skip the clobbers. */
1239 while (!store_info->is_set)
1240 store_info = store_info->next;
1242 store_info->rhs = NULL;
1243 store_info->const_rhs = NULL;
1245 ptr = ptr->next_local_store;
1250 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1252 static inline void
1253 set_position_unneeded (store_info *s_info, int pos)
1255 if (UNLIKELY (s_info->is_large))
1257 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1258 s_info->positions_needed.large.count++;
1260 else
1261 s_info->positions_needed.small_bitmask
1262 &= ~(HOST_WIDE_INT_1U << pos);
1265 /* Mark the whole store S_INFO as unneeded. */
1267 static inline void
1268 set_all_positions_unneeded (store_info *s_info)
1270 if (UNLIKELY (s_info->is_large))
1272 HOST_WIDE_INT width;
1273 if (s_info->width.is_constant (&width))
1275 bitmap_set_range (s_info->positions_needed.large.bmap, 0, width);
1276 s_info->positions_needed.large.count = width;
1278 else
1280 gcc_checking_assert (!s_info->positions_needed.large.bmap);
1281 s_info->positions_needed.large.count = 1;
1284 else
1285 s_info->positions_needed.small_bitmask = HOST_WIDE_INT_0U;
1288 /* Return TRUE if any bytes from S_INFO store are needed. */
1290 static inline bool
1291 any_positions_needed_p (store_info *s_info)
1293 if (UNLIKELY (s_info->is_large))
1295 HOST_WIDE_INT width;
1296 if (s_info->width.is_constant (&width))
1298 gcc_checking_assert (s_info->positions_needed.large.bmap);
1299 return s_info->positions_needed.large.count < width;
1301 else
1303 gcc_checking_assert (!s_info->positions_needed.large.bmap);
1304 return s_info->positions_needed.large.count == 0;
1307 else
1308 return (s_info->positions_needed.small_bitmask != HOST_WIDE_INT_0U);
1311 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1312 store are known to be needed. */
1314 static inline bool
1315 all_positions_needed_p (store_info *s_info, poly_int64 start,
1316 poly_int64 width)
1318 gcc_assert (s_info->rhs);
1319 if (!s_info->width.is_constant ())
1321 gcc_assert (s_info->is_large
1322 && !s_info->positions_needed.large.bmap);
1323 return s_info->positions_needed.large.count == 0;
1326 /* Otherwise, if START and WIDTH are non-constant, we're asking about
1327 a non-constant region of a constant-sized store. We can't say for
1328 sure that all positions are needed. */
1329 HOST_WIDE_INT const_start, const_width;
1330 if (!start.is_constant (&const_start)
1331 || !width.is_constant (&const_width))
1332 return false;
1334 if (UNLIKELY (s_info->is_large))
1336 for (HOST_WIDE_INT i = const_start; i < const_start + const_width; ++i)
1337 if (bitmap_bit_p (s_info->positions_needed.large.bmap, i))
1338 return false;
1339 return true;
1341 else
1343 unsigned HOST_WIDE_INT mask
1344 = lowpart_bitmask (const_width) << const_start;
1345 return (s_info->positions_needed.small_bitmask & mask) == mask;
1350 static rtx get_stored_val (store_info *, machine_mode, poly_int64,
1351 poly_int64, basic_block, bool);
1354 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1355 there is a candidate store, after adding it to the appropriate
1356 local store group if so. */
1358 static int
1359 record_store (rtx body, bb_info_t bb_info)
1361 rtx mem, rhs, const_rhs, mem_addr;
1362 poly_int64 offset = 0;
1363 poly_int64 width = 0;
1364 insn_info_t insn_info = bb_info->last_insn;
1365 store_info *store_info = NULL;
1366 int group_id;
1367 cselib_val *base = NULL;
1368 insn_info_t ptr, last, redundant_reason;
1369 bool store_is_unused;
1371 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1372 return 0;
1374 mem = SET_DEST (body);
1376 /* If this is not used, then this cannot be used to keep the insn
1377 from being deleted. On the other hand, it does provide something
1378 that can be used to prove that another store is dead. */
1379 store_is_unused
1380 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1382 /* Check whether that value is a suitable memory location. */
1383 if (!MEM_P (mem))
1385 /* If the set or clobber is unused, then it does not effect our
1386 ability to get rid of the entire insn. */
1387 if (!store_is_unused)
1388 insn_info->cannot_delete = true;
1389 return 0;
1392 /* At this point we know mem is a mem. */
1393 if (GET_MODE (mem) == BLKmode)
1395 HOST_WIDE_INT const_size;
1396 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1398 if (dump_file && (dump_flags & TDF_DETAILS))
1399 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1400 add_wild_read (bb_info);
1401 insn_info->cannot_delete = true;
1402 return 0;
1404 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1405 as memset (addr, 0, 36); */
1406 else if (!MEM_SIZE_KNOWN_P (mem)
1407 || maybe_le (MEM_SIZE (mem), 0)
1408 /* This is a limit on the bitmap size, which is only relevant
1409 for constant-sized MEMs. */
1410 || (MEM_SIZE (mem).is_constant (&const_size)
1411 && const_size > MAX_OFFSET)
1412 || GET_CODE (body) != SET
1413 || !CONST_INT_P (SET_SRC (body)))
1415 if (!store_is_unused)
1417 /* If the set or clobber is unused, then it does not effect our
1418 ability to get rid of the entire insn. */
1419 insn_info->cannot_delete = true;
1420 clear_rhs_from_active_local_stores ();
1422 return 0;
1426 /* We can still process a volatile mem, we just cannot delete it. */
1427 if (MEM_VOLATILE_P (mem))
1428 insn_info->cannot_delete = true;
1430 if (!canon_address (mem, &group_id, &offset, &base))
1432 clear_rhs_from_active_local_stores ();
1433 return 0;
1436 if (GET_MODE (mem) == BLKmode)
1437 width = MEM_SIZE (mem);
1438 else
1439 width = GET_MODE_SIZE (GET_MODE (mem));
1441 if (!endpoint_representable_p (offset, width))
1443 clear_rhs_from_active_local_stores ();
1444 return 0;
1447 if (known_eq (width, 0))
1448 return 0;
1450 if (group_id >= 0)
1452 /* In the restrictive case where the base is a constant or the
1453 frame pointer we can do global analysis. */
1455 group_info *group
1456 = rtx_group_vec[group_id];
1457 tree expr = MEM_EXPR (mem);
1459 store_info = rtx_store_info_pool.allocate ();
1460 set_usage_bits (group, offset, width, expr);
1462 if (dump_file && (dump_flags & TDF_DETAILS))
1464 fprintf (dump_file, " processing const base store gid=%d",
1465 group_id);
1466 print_range (dump_file, offset, width);
1467 fprintf (dump_file, "\n");
1470 else
1472 if (may_be_sp_based_p (XEXP (mem, 0)))
1473 insn_info->stack_pointer_based = true;
1474 insn_info->contains_cselib_groups = true;
1476 store_info = cse_store_info_pool.allocate ();
1477 group_id = -1;
1479 if (dump_file && (dump_flags & TDF_DETAILS))
1481 fprintf (dump_file, " processing cselib store ");
1482 print_range (dump_file, offset, width);
1483 fprintf (dump_file, "\n");
1487 const_rhs = rhs = NULL_RTX;
1488 if (GET_CODE (body) == SET
1489 /* No place to keep the value after ra. */
1490 && !reload_completed
1491 && (REG_P (SET_SRC (body))
1492 || GET_CODE (SET_SRC (body)) == SUBREG
1493 || CONSTANT_P (SET_SRC (body)))
1494 && !MEM_VOLATILE_P (mem)
1495 /* Sometimes the store and reload is used for truncation and
1496 rounding. */
1497 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1499 rhs = SET_SRC (body);
1500 if (CONSTANT_P (rhs))
1501 const_rhs = rhs;
1502 else if (body == PATTERN (insn_info->insn))
1504 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1505 if (tem && CONSTANT_P (XEXP (tem, 0)))
1506 const_rhs = XEXP (tem, 0);
1508 if (const_rhs == NULL_RTX && REG_P (rhs))
1510 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1512 if (tem && CONSTANT_P (tem))
1513 const_rhs = tem;
1514 else
1516 /* If RHS is set only once to a constant, set CONST_RHS
1517 to the constant. */
1518 rtx def_src = df_find_single_def_src (rhs);
1519 if (def_src != nullptr && CONSTANT_P (def_src))
1520 const_rhs = def_src;
1525 /* Check to see if this stores causes some other stores to be
1526 dead. */
1527 ptr = active_local_stores;
1528 last = NULL;
1529 redundant_reason = NULL;
1530 unsigned char addrspace = MEM_ADDR_SPACE (mem);
1531 mem = canon_rtx (mem);
1533 if (group_id < 0)
1534 mem_addr = base->val_rtx;
1535 else
1537 group_info *group = rtx_group_vec[group_id];
1538 mem_addr = group->canon_base_addr;
1540 if (maybe_ne (offset, 0))
1541 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1543 while (ptr)
1545 insn_info_t next = ptr->next_local_store;
1546 class store_info *s_info = ptr->store_rec;
1547 bool del = true;
1549 /* Skip the clobbers. We delete the active insn if this insn
1550 shadows the set. To have been put on the active list, it
1551 has exactly on set. */
1552 while (!s_info->is_set)
1553 s_info = s_info->next;
1555 if (s_info->group_id == group_id
1556 && s_info->cse_base == base
1557 && s_info->addrspace == addrspace)
1559 HOST_WIDE_INT i;
1560 if (dump_file && (dump_flags & TDF_DETAILS))
1562 fprintf (dump_file, " trying store in insn=%d gid=%d",
1563 INSN_UID (ptr->insn), s_info->group_id);
1564 print_range (dump_file, s_info->offset, s_info->width);
1565 fprintf (dump_file, "\n");
1568 /* Even if PTR won't be eliminated as unneeded, if both
1569 PTR and this insn store the same constant value, we might
1570 eliminate this insn instead. */
1571 if (s_info->const_rhs
1572 && const_rhs
1573 && known_subrange_p (offset, width,
1574 s_info->offset, s_info->width)
1575 && all_positions_needed_p (s_info, offset - s_info->offset,
1576 width)
1577 /* We can only remove the later store if the earlier aliases
1578 at least all accesses the later one. */
1579 && mems_same_for_tbaa_p (s_info->mem, mem))
1581 if (GET_MODE (mem) == BLKmode)
1583 if (GET_MODE (s_info->mem) == BLKmode
1584 && s_info->const_rhs == const_rhs)
1585 redundant_reason = ptr;
1587 else if (s_info->const_rhs == const0_rtx
1588 && const_rhs == const0_rtx)
1589 redundant_reason = ptr;
1590 else
1592 rtx val;
1593 start_sequence ();
1594 val = get_stored_val (s_info, GET_MODE (mem), offset, width,
1595 BLOCK_FOR_INSN (insn_info->insn),
1596 true);
1597 if (get_insns () != NULL)
1598 val = NULL_RTX;
1599 end_sequence ();
1600 if (val && rtx_equal_p (val, const_rhs))
1601 redundant_reason = ptr;
1605 HOST_WIDE_INT begin_unneeded, const_s_width, const_width;
1606 if (known_subrange_p (s_info->offset, s_info->width, offset, width))
1607 /* The new store touches every byte that S_INFO does. */
1608 set_all_positions_unneeded (s_info);
1609 else if ((offset - s_info->offset).is_constant (&begin_unneeded)
1610 && s_info->width.is_constant (&const_s_width)
1611 && width.is_constant (&const_width))
1613 HOST_WIDE_INT end_unneeded = begin_unneeded + const_width;
1614 begin_unneeded = MAX (begin_unneeded, 0);
1615 end_unneeded = MIN (end_unneeded, const_s_width);
1616 for (i = begin_unneeded; i < end_unneeded; ++i)
1617 set_position_unneeded (s_info, i);
1619 else
1621 /* We don't know which parts of S_INFO are needed and
1622 which aren't, so invalidate the RHS. */
1623 s_info->rhs = NULL;
1624 s_info->const_rhs = NULL;
1627 else if (s_info->rhs)
1628 /* Need to see if it is possible for this store to overwrite
1629 the value of store_info. If it is, set the rhs to NULL to
1630 keep it from being used to remove a load. */
1632 if (canon_output_dependence (s_info->mem, true,
1633 mem, GET_MODE (mem),
1634 mem_addr))
1636 s_info->rhs = NULL;
1637 s_info->const_rhs = NULL;
1641 /* An insn can be deleted if every position of every one of
1642 its s_infos is zero. */
1643 if (any_positions_needed_p (s_info))
1644 del = false;
1646 if (del)
1648 insn_info_t insn_to_delete = ptr;
1650 active_local_stores_len--;
1651 if (last)
1652 last->next_local_store = ptr->next_local_store;
1653 else
1654 active_local_stores = ptr->next_local_store;
1656 if (!insn_to_delete->cannot_delete)
1657 delete_dead_store_insn (insn_to_delete);
1659 else
1660 last = ptr;
1662 ptr = next;
1665 /* Finish filling in the store_info. */
1666 store_info->next = insn_info->store_rec;
1667 insn_info->store_rec = store_info;
1668 store_info->mem = mem;
1669 store_info->mem_addr = mem_addr;
1670 store_info->cse_base = base;
1671 HOST_WIDE_INT const_width;
1672 if (!width.is_constant (&const_width))
1674 store_info->is_large = true;
1675 store_info->positions_needed.large.count = 0;
1676 store_info->positions_needed.large.bmap = NULL;
1678 else if (const_width > HOST_BITS_PER_WIDE_INT)
1680 store_info->is_large = true;
1681 store_info->positions_needed.large.count = 0;
1682 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1684 else
1686 store_info->is_large = false;
1687 store_info->positions_needed.small_bitmask
1688 = lowpart_bitmask (const_width);
1690 store_info->group_id = group_id;
1691 store_info->offset = offset;
1692 store_info->width = width;
1693 store_info->is_set = GET_CODE (body) == SET;
1694 store_info->rhs = rhs;
1695 store_info->const_rhs = const_rhs;
1696 store_info->redundant_reason = redundant_reason;
1697 store_info->addrspace = addrspace;
1699 /* If this is a clobber, we return 0. We will only be able to
1700 delete this insn if there is only one store USED store, but we
1701 can use the clobber to delete other stores earlier. */
1702 return store_info->is_set ? 1 : 0;
1706 static void
1707 dump_insn_info (const char * start, insn_info_t insn_info)
1709 fprintf (dump_file, "%s insn=%d %s\n", start,
1710 INSN_UID (insn_info->insn),
1711 insn_info->store_rec ? "has store" : "naked");
1715 /* If the modes are different and the value's source and target do not
1716 line up, we need to extract the value from lower part of the rhs of
1717 the store, shift it, and then put it into a form that can be shoved
1718 into the read_insn. This function generates a right SHIFT of a
1719 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1720 shift sequence is returned or NULL if we failed to find a
1721 shift. */
1723 static rtx
1724 find_shift_sequence (poly_int64 access_size,
1725 store_info *store_info,
1726 machine_mode read_mode,
1727 poly_int64 shift, bool speed, bool require_cst)
1729 machine_mode store_mode = GET_MODE (store_info->mem);
1730 scalar_int_mode new_mode;
1731 rtx read_reg = NULL;
1733 /* If a constant was stored into memory, try to simplify it here,
1734 otherwise the cost of the shift might preclude this optimization
1735 e.g. at -Os, even when no actual shift will be needed. */
1736 if (store_info->const_rhs)
1738 auto new_mode = smallest_int_mode_for_size (access_size * BITS_PER_UNIT);
1739 auto byte = subreg_lowpart_offset (new_mode, store_mode);
1740 rtx ret
1741 = simplify_subreg (new_mode, store_info->const_rhs, store_mode, byte);
1742 if (ret && CONSTANT_P (ret))
1744 rtx shift_rtx = gen_int_shift_amount (new_mode, shift);
1745 ret = simplify_const_binary_operation (LSHIFTRT, new_mode, ret,
1746 shift_rtx);
1747 if (ret && CONSTANT_P (ret))
1749 byte = subreg_lowpart_offset (read_mode, new_mode);
1750 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1751 if (ret && CONSTANT_P (ret)
1752 && (set_src_cost (ret, read_mode, speed)
1753 <= COSTS_N_INSNS (1)))
1754 return ret;
1759 if (require_cst)
1760 return NULL_RTX;
1762 /* Some machines like the x86 have shift insns for each size of
1763 operand. Other machines like the ppc or the ia-64 may only have
1764 shift insns that shift values within 32 or 64 bit registers.
1765 This loop tries to find the smallest shift insn that will right
1766 justify the value we want to read but is available in one insn on
1767 the machine. */
1769 opt_scalar_int_mode new_mode_iter;
1770 FOR_EACH_MODE_IN_CLASS (new_mode_iter, MODE_INT)
1772 rtx target, new_reg, new_lhs;
1773 rtx_insn *shift_seq, *insn;
1774 int cost;
1776 new_mode = new_mode_iter.require ();
1777 if (GET_MODE_BITSIZE (new_mode) > BITS_PER_WORD)
1778 break;
1779 if (maybe_lt (GET_MODE_SIZE (new_mode), GET_MODE_SIZE (read_mode)))
1780 continue;
1782 /* Try a wider mode if truncating the store mode to NEW_MODE
1783 requires a real instruction. */
1784 if (maybe_lt (GET_MODE_SIZE (new_mode), GET_MODE_SIZE (store_mode))
1785 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1786 continue;
1788 /* Also try a wider mode if the necessary punning is either not
1789 desirable or not possible. */
1790 if (!CONSTANT_P (store_info->rhs)
1791 && !targetm.modes_tieable_p (new_mode, store_mode))
1792 continue;
1794 if (multiple_p (shift, GET_MODE_BITSIZE (new_mode))
1795 && known_le (GET_MODE_SIZE (new_mode), GET_MODE_SIZE (store_mode)))
1797 /* Try to implement the shift using a subreg. */
1798 poly_int64 offset
1799 = subreg_offset_from_lsb (new_mode, store_mode, shift);
1800 rtx rhs_subreg = simplify_gen_subreg (new_mode, store_info->rhs,
1801 store_mode, offset);
1802 if (rhs_subreg)
1804 read_reg
1805 = extract_low_bits (read_mode, new_mode, copy_rtx (rhs_subreg));
1806 break;
1810 if (maybe_lt (GET_MODE_SIZE (new_mode), access_size))
1811 continue;
1813 new_reg = gen_reg_rtx (new_mode);
1815 start_sequence ();
1817 /* In theory we could also check for an ashr. Ian Taylor knows
1818 of one dsp where the cost of these two was not the same. But
1819 this really is a rare case anyway. */
1820 target = expand_binop (new_mode, lshr_optab, new_reg,
1821 gen_int_shift_amount (new_mode, shift),
1822 new_reg, 1, OPTAB_DIRECT);
1824 shift_seq = get_insns ();
1825 end_sequence ();
1827 if (target != new_reg || shift_seq == NULL)
1828 continue;
1830 cost = 0;
1831 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1832 if (INSN_P (insn))
1833 cost += insn_cost (insn, speed);
1835 /* The computation up to here is essentially independent
1836 of the arguments and could be precomputed. It may
1837 not be worth doing so. We could precompute if
1838 worthwhile or at least cache the results. The result
1839 technically depends on both SHIFT and ACCESS_SIZE,
1840 but in practice the answer will depend only on ACCESS_SIZE. */
1842 if (cost > COSTS_N_INSNS (1))
1843 continue;
1845 new_lhs = extract_low_bits (new_mode, store_mode,
1846 copy_rtx (store_info->rhs));
1847 if (new_lhs == NULL_RTX)
1848 continue;
1850 /* We found an acceptable shift. Generate a move to
1851 take the value from the store and put it into the
1852 shift pseudo, then shift it, then generate another
1853 move to put in into the target of the read. */
1854 emit_move_insn (new_reg, new_lhs);
1855 emit_insn (shift_seq);
1856 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1857 break;
1860 return read_reg;
1864 /* Call back for note_stores to find the hard regs set or clobbered by
1865 insn. Data is a bitmap of the hardregs set so far. */
1867 static void
1868 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1870 bitmap regs_set = (bitmap) data;
1872 if (REG_P (x)
1873 && HARD_REGISTER_P (x))
1874 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1877 /* Helper function for replace_read and record_store.
1878 Attempt to return a value of mode READ_MODE stored in STORE_INFO,
1879 consisting of READ_WIDTH bytes starting from READ_OFFSET. Return NULL
1880 if not successful. If REQUIRE_CST is true, return always constant. */
1882 static rtx
1883 get_stored_val (store_info *store_info, machine_mode read_mode,
1884 poly_int64 read_offset, poly_int64 read_width,
1885 basic_block bb, bool require_cst)
1887 machine_mode store_mode = GET_MODE (store_info->mem);
1888 poly_int64 gap;
1889 rtx read_reg;
1891 /* To get here the read is within the boundaries of the write so
1892 shift will never be negative. Start out with the shift being in
1893 bytes. */
1894 if (store_mode == BLKmode)
1895 gap = 0;
1896 else if (BYTES_BIG_ENDIAN)
1897 gap = ((store_info->offset + store_info->width)
1898 - (read_offset + read_width));
1899 else
1900 gap = read_offset - store_info->offset;
1902 if (gap.is_constant () && maybe_ne (gap, 0))
1904 poly_int64 shift = gap * BITS_PER_UNIT;
1905 poly_int64 access_size = GET_MODE_SIZE (read_mode) + gap;
1906 read_reg = find_shift_sequence (access_size, store_info, read_mode,
1907 shift, optimize_bb_for_speed_p (bb),
1908 require_cst);
1910 else if (store_mode == BLKmode)
1912 /* The store is a memset (addr, const_val, const_size). */
1913 gcc_assert (CONST_INT_P (store_info->rhs));
1914 scalar_int_mode int_store_mode;
1915 if (!int_mode_for_mode (read_mode).exists (&int_store_mode))
1916 read_reg = NULL_RTX;
1917 else if (store_info->rhs == const0_rtx)
1918 read_reg = extract_low_bits (read_mode, int_store_mode, const0_rtx);
1919 else if (GET_MODE_BITSIZE (int_store_mode) > HOST_BITS_PER_WIDE_INT
1920 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1921 read_reg = NULL_RTX;
1922 else
1924 unsigned HOST_WIDE_INT c
1925 = INTVAL (store_info->rhs)
1926 & ((HOST_WIDE_INT_1 << BITS_PER_UNIT) - 1);
1927 int shift = BITS_PER_UNIT;
1928 while (shift < HOST_BITS_PER_WIDE_INT)
1930 c |= (c << shift);
1931 shift <<= 1;
1933 read_reg = gen_int_mode (c, int_store_mode);
1934 read_reg = extract_low_bits (read_mode, int_store_mode, read_reg);
1937 else if (store_info->const_rhs
1938 && (require_cst
1939 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1940 read_reg = extract_low_bits (read_mode, store_mode,
1941 copy_rtx (store_info->const_rhs));
1942 else
1943 read_reg = extract_low_bits (read_mode, store_mode,
1944 copy_rtx (store_info->rhs));
1945 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1946 read_reg = NULL_RTX;
1947 return read_reg;
1950 /* Take a sequence of:
1951 A <- r1
1953 ... <- A
1955 and change it into
1956 r2 <- r1
1957 A <- r1
1959 ... <- r2
1963 r3 <- extract (r1)
1964 r3 <- r3 >> shift
1965 r2 <- extract (r3)
1966 ... <- r2
1970 r2 <- extract (r1)
1971 ... <- r2
1973 Depending on the alignment and the mode of the store and
1974 subsequent load.
1977 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1978 and READ_INSN are for the read. Return true if the replacement
1979 went ok. */
1981 static bool
1982 replace_read (store_info *store_info, insn_info_t store_insn,
1983 read_info_t read_info, insn_info_t read_insn, rtx *loc)
1985 machine_mode store_mode = GET_MODE (store_info->mem);
1986 machine_mode read_mode = GET_MODE (read_info->mem);
1987 rtx_insn *insns, *this_insn;
1988 rtx read_reg;
1989 basic_block bb;
1991 if (!dbg_cnt (dse))
1992 return false;
1994 /* Create a sequence of instructions to set up the read register.
1995 This sequence goes immediately before the store and its result
1996 is read by the load.
1998 We need to keep this in perspective. We are replacing a read
1999 with a sequence of insns, but the read will almost certainly be
2000 in cache, so it is not going to be an expensive one. Thus, we
2001 are not willing to do a multi insn shift or worse a subroutine
2002 call to get rid of the read. */
2003 if (dump_file && (dump_flags & TDF_DETAILS))
2004 fprintf (dump_file, "trying to replace %smode load in insn %d"
2005 " from %smode store in insn %d\n",
2006 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2007 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2008 start_sequence ();
2009 bb = BLOCK_FOR_INSN (read_insn->insn);
2010 read_reg = get_stored_val (store_info,
2011 read_mode, read_info->offset, read_info->width,
2012 bb, false);
2013 if (read_reg == NULL_RTX)
2015 end_sequence ();
2016 if (dump_file && (dump_flags & TDF_DETAILS))
2017 fprintf (dump_file, " -- could not extract bits of stored value\n");
2018 return false;
2020 /* Force the value into a new register so that it won't be clobbered
2021 between the store and the load. */
2022 if (WORD_REGISTER_OPERATIONS
2023 && GET_CODE (read_reg) == SUBREG
2024 && REG_P (SUBREG_REG (read_reg))
2025 && GET_MODE (SUBREG_REG (read_reg)) == word_mode)
2027 /* For WORD_REGISTER_OPERATIONS with subreg of word_mode register
2028 force SUBREG_REG into a new register rather than the SUBREG. */
2029 rtx r = copy_to_mode_reg (word_mode, SUBREG_REG (read_reg));
2030 read_reg = shallow_copy_rtx (read_reg);
2031 SUBREG_REG (read_reg) = r;
2033 else
2034 read_reg = copy_to_mode_reg (read_mode, read_reg);
2035 insns = get_insns ();
2036 end_sequence ();
2038 if (insns != NULL_RTX)
2040 /* Now we have to scan the set of new instructions to see if the
2041 sequence contains and sets of hardregs that happened to be
2042 live at this point. For instance, this can happen if one of
2043 the insns sets the CC and the CC happened to be live at that
2044 point. This does occasionally happen, see PR 37922. */
2045 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2047 for (this_insn = insns;
2048 this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2050 if (insn_invalid_p (this_insn, false))
2052 if (dump_file && (dump_flags & TDF_DETAILS))
2054 fprintf (dump_file, " -- replacing the loaded MEM with ");
2055 print_simple_rtl (dump_file, read_reg);
2056 fprintf (dump_file, " led to an invalid instruction\n");
2058 BITMAP_FREE (regs_set);
2059 return false;
2061 note_stores (this_insn, look_for_hardregs, regs_set);
2064 if (store_insn->fixed_regs_live)
2065 bitmap_and_into (regs_set, store_insn->fixed_regs_live);
2066 if (!bitmap_empty_p (regs_set))
2068 if (dump_file && (dump_flags & TDF_DETAILS))
2070 fprintf (dump_file, "abandoning replacement because sequence "
2071 "clobbers live hardregs:");
2072 df_print_regset (dump_file, regs_set);
2075 BITMAP_FREE (regs_set);
2076 return false;
2078 BITMAP_FREE (regs_set);
2081 subrtx_iterator::array_type array;
2082 FOR_EACH_SUBRTX (iter, array, *loc, NONCONST)
2084 const_rtx x = *iter;
2085 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
2087 if (dump_file && (dump_flags & TDF_DETAILS))
2088 fprintf (dump_file, " -- replacing the MEM failed due to address "
2089 "side-effects\n");
2090 return false;
2094 if (validate_change (read_insn->insn, loc, read_reg, 0))
2096 deferred_change *change = deferred_change_pool.allocate ();
2098 /* Insert this right before the store insn where it will be safe
2099 from later insns that might change it before the read. */
2100 emit_insn_before (insns, store_insn->insn);
2102 /* And now for the kludge part: cselib croaks if you just
2103 return at this point. There are two reasons for this:
2105 1) Cselib has an idea of how many pseudos there are and
2106 that does not include the new ones we just added.
2108 2) Cselib does not know about the move insn we added
2109 above the store_info, and there is no way to tell it
2110 about it, because it has "moved on".
2112 Problem (1) is fixable with a certain amount of engineering.
2113 Problem (2) is requires starting the bb from scratch. This
2114 could be expensive.
2116 So we are just going to have to lie. The move/extraction
2117 insns are not really an issue, cselib did not see them. But
2118 the use of the new pseudo read_insn is a real problem because
2119 cselib has not scanned this insn. The way that we solve this
2120 problem is that we are just going to put the mem back for now
2121 and when we are finished with the block, we undo this. We
2122 keep a table of mems to get rid of. At the end of the basic
2123 block we can put them back. */
2125 *loc = read_info->mem;
2126 change->next = deferred_change_list;
2127 deferred_change_list = change;
2128 change->loc = loc;
2129 change->reg = read_reg;
2131 /* Get rid of the read_info, from the point of view of the
2132 rest of dse, play like this read never happened. */
2133 read_insn->read_rec = read_info->next;
2134 read_info_type_pool.remove (read_info);
2135 if (dump_file && (dump_flags & TDF_DETAILS))
2137 fprintf (dump_file, " -- replaced the loaded MEM with ");
2138 print_simple_rtl (dump_file, read_reg);
2139 fprintf (dump_file, "\n");
2141 return true;
2143 else
2145 if (dump_file && (dump_flags & TDF_DETAILS))
2147 fprintf (dump_file, " -- replacing the loaded MEM with ");
2148 print_simple_rtl (dump_file, read_reg);
2149 fprintf (dump_file, " led to an invalid instruction\n");
2151 return false;
2155 /* Check the address of MEM *LOC and kill any appropriate stores that may
2156 be active. */
2158 static void
2159 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2161 rtx mem = *loc, mem_addr;
2162 insn_info_t insn_info;
2163 poly_int64 offset = 0;
2164 poly_int64 width = 0;
2165 cselib_val *base = NULL;
2166 int group_id;
2167 read_info_t read_info;
2169 insn_info = bb_info->last_insn;
2171 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2172 || MEM_VOLATILE_P (mem))
2174 if (crtl->stack_protect_guard
2175 && (MEM_EXPR (mem) == crtl->stack_protect_guard
2176 || (crtl->stack_protect_guard_decl
2177 && MEM_EXPR (mem) == crtl->stack_protect_guard_decl))
2178 && MEM_VOLATILE_P (mem))
2180 /* This is either the stack protector canary on the stack,
2181 which ought to be written by a MEM_VOLATILE_P store and
2182 thus shouldn't be deleted and is read at the very end of
2183 function, but shouldn't conflict with any other store.
2184 Or it is __stack_chk_guard variable or TLS or whatever else
2185 MEM holding the canary value, which really shouldn't be
2186 ever modified in -fstack-protector* protected functions,
2187 otherwise the prologue store wouldn't match the epilogue
2188 check. */
2189 if (dump_file && (dump_flags & TDF_DETAILS))
2190 fprintf (dump_file, " stack protector canary read ignored.\n");
2191 insn_info->cannot_delete = true;
2192 return;
2195 if (dump_file && (dump_flags & TDF_DETAILS))
2196 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2197 add_wild_read (bb_info);
2198 insn_info->cannot_delete = true;
2199 return;
2202 /* If it is reading readonly mem, then there can be no conflict with
2203 another write. */
2204 if (MEM_READONLY_P (mem))
2205 return;
2207 if (!canon_address (mem, &group_id, &offset, &base))
2209 if (dump_file && (dump_flags & TDF_DETAILS))
2210 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2211 add_wild_read (bb_info);
2212 return;
2215 if (GET_MODE (mem) == BLKmode)
2216 width = -1;
2217 else
2218 width = GET_MODE_SIZE (GET_MODE (mem));
2220 if (!endpoint_representable_p (offset, known_eq (width, -1) ? 1 : width))
2222 if (dump_file && (dump_flags & TDF_DETAILS))
2223 fprintf (dump_file, " adding wild read, due to overflow.\n");
2224 add_wild_read (bb_info);
2225 return;
2228 read_info = read_info_type_pool.allocate ();
2229 read_info->group_id = group_id;
2230 read_info->mem = mem;
2231 read_info->offset = offset;
2232 read_info->width = width;
2233 read_info->next = insn_info->read_rec;
2234 insn_info->read_rec = read_info;
2235 if (group_id < 0)
2236 mem_addr = base->val_rtx;
2237 else
2239 group_info *group = rtx_group_vec[group_id];
2240 mem_addr = group->canon_base_addr;
2242 if (maybe_ne (offset, 0))
2243 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2244 /* Avoid passing VALUE RTXen as mem_addr to canon_true_dependence
2245 which will over and over re-create proper RTL and re-apply the
2246 offset above. See PR80960 where we almost allocate 1.6GB of PLUS
2247 RTXen that way. */
2248 mem_addr = get_addr (mem_addr);
2250 if (group_id >= 0)
2252 /* This is the restricted case where the base is a constant or
2253 the frame pointer and offset is a constant. */
2254 insn_info_t i_ptr = active_local_stores;
2255 insn_info_t last = NULL;
2257 if (dump_file && (dump_flags & TDF_DETAILS))
2259 if (!known_size_p (width))
2260 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2261 group_id);
2262 else
2264 fprintf (dump_file, " processing const load gid=%d", group_id);
2265 print_range (dump_file, offset, width);
2266 fprintf (dump_file, "\n");
2270 while (i_ptr)
2272 bool remove = false;
2273 store_info *store_info = i_ptr->store_rec;
2275 /* Skip the clobbers. */
2276 while (!store_info->is_set)
2277 store_info = store_info->next;
2279 /* There are three cases here. */
2280 if (store_info->group_id < 0)
2281 /* We have a cselib store followed by a read from a
2282 const base. */
2283 remove
2284 = canon_true_dependence (store_info->mem,
2285 GET_MODE (store_info->mem),
2286 store_info->mem_addr,
2287 mem, mem_addr);
2289 else if (group_id == store_info->group_id)
2291 /* This is a block mode load. We may get lucky and
2292 canon_true_dependence may save the day. */
2293 if (!known_size_p (width))
2294 remove
2295 = canon_true_dependence (store_info->mem,
2296 GET_MODE (store_info->mem),
2297 store_info->mem_addr,
2298 mem, mem_addr);
2300 /* If this read is just reading back something that we just
2301 stored, rewrite the read. */
2302 else
2304 if (store_info->rhs
2305 && known_subrange_p (offset, width, store_info->offset,
2306 store_info->width)
2307 && all_positions_needed_p (store_info,
2308 offset - store_info->offset,
2309 width)
2310 && replace_read (store_info, i_ptr, read_info,
2311 insn_info, loc))
2312 return;
2314 /* The bases are the same, just see if the offsets
2315 could overlap. */
2316 if (ranges_maybe_overlap_p (offset, width,
2317 store_info->offset,
2318 store_info->width))
2319 remove = true;
2323 /* else
2324 The else case that is missing here is that the
2325 bases are constant but different. There is nothing
2326 to do here because there is no overlap. */
2328 if (remove)
2330 if (dump_file && (dump_flags & TDF_DETAILS))
2331 dump_insn_info ("removing from active", i_ptr);
2333 active_local_stores_len--;
2334 if (last)
2335 last->next_local_store = i_ptr->next_local_store;
2336 else
2337 active_local_stores = i_ptr->next_local_store;
2339 else
2340 last = i_ptr;
2341 i_ptr = i_ptr->next_local_store;
2344 else
2346 insn_info_t i_ptr = active_local_stores;
2347 insn_info_t last = NULL;
2348 if (dump_file && (dump_flags & TDF_DETAILS))
2350 fprintf (dump_file, " processing cselib load mem:");
2351 print_inline_rtx (dump_file, mem, 0);
2352 fprintf (dump_file, "\n");
2355 while (i_ptr)
2357 bool remove = false;
2358 store_info *store_info = i_ptr->store_rec;
2360 if (dump_file && (dump_flags & TDF_DETAILS))
2361 fprintf (dump_file, " processing cselib load against insn %d\n",
2362 INSN_UID (i_ptr->insn));
2364 /* Skip the clobbers. */
2365 while (!store_info->is_set)
2366 store_info = store_info->next;
2368 /* If this read is just reading back something that we just
2369 stored, rewrite the read. */
2370 if (store_info->rhs
2371 && store_info->group_id == -1
2372 && store_info->cse_base == base
2373 && known_subrange_p (offset, width, store_info->offset,
2374 store_info->width)
2375 && all_positions_needed_p (store_info,
2376 offset - store_info->offset, width)
2377 && replace_read (store_info, i_ptr, read_info, insn_info, loc))
2378 return;
2380 remove = canon_true_dependence (store_info->mem,
2381 GET_MODE (store_info->mem),
2382 store_info->mem_addr,
2383 mem, mem_addr);
2385 if (remove)
2387 if (dump_file && (dump_flags & TDF_DETAILS))
2388 dump_insn_info ("removing from active", i_ptr);
2390 active_local_stores_len--;
2391 if (last)
2392 last->next_local_store = i_ptr->next_local_store;
2393 else
2394 active_local_stores = i_ptr->next_local_store;
2396 else
2397 last = i_ptr;
2398 i_ptr = i_ptr->next_local_store;
2403 /* A note_uses callback in which DATA points the INSN_INFO for
2404 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2405 true for any part of *LOC. */
2407 static void
2408 check_mem_read_use (rtx *loc, void *data)
2410 subrtx_ptr_iterator::array_type array;
2411 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2413 rtx *loc = *iter;
2414 if (MEM_P (*loc))
2415 check_mem_read_rtx (loc, (bb_info_t) data);
2420 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2421 So far it only handles arguments passed in registers. */
2423 static bool
2424 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2426 CUMULATIVE_ARGS args_so_far_v;
2427 cumulative_args_t args_so_far;
2428 tree arg;
2429 int idx;
2431 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2432 args_so_far = pack_cumulative_args (&args_so_far_v);
2434 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2435 for (idx = 0;
2436 arg != void_list_node && idx < nargs;
2437 arg = TREE_CHAIN (arg), idx++)
2439 scalar_int_mode mode;
2440 rtx reg, link, tmp;
2442 if (!is_int_mode (TYPE_MODE (TREE_VALUE (arg)), &mode))
2443 return false;
2445 function_arg_info arg (mode, /*named=*/true);
2446 reg = targetm.calls.function_arg (args_so_far, arg);
2447 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode)
2448 return false;
2450 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2451 link;
2452 link = XEXP (link, 1))
2453 if (GET_CODE (XEXP (link, 0)) == USE)
2455 scalar_int_mode arg_mode;
2456 args[idx] = XEXP (XEXP (link, 0), 0);
2457 if (REG_P (args[idx])
2458 && REGNO (args[idx]) == REGNO (reg)
2459 && (GET_MODE (args[idx]) == mode
2460 || (is_int_mode (GET_MODE (args[idx]), &arg_mode)
2461 && (GET_MODE_SIZE (arg_mode) <= UNITS_PER_WORD)
2462 && (GET_MODE_SIZE (arg_mode) > GET_MODE_SIZE (mode)))))
2463 break;
2465 if (!link)
2466 return false;
2468 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2469 if (GET_MODE (args[idx]) != mode)
2471 if (!tmp || !CONST_INT_P (tmp))
2472 return false;
2473 tmp = gen_int_mode (INTVAL (tmp), mode);
2475 if (tmp)
2476 args[idx] = tmp;
2478 targetm.calls.function_arg_advance (args_so_far, arg);
2480 if (arg != void_list_node || idx != nargs)
2481 return false;
2482 return true;
2485 /* Return a bitmap of the fixed registers contained in IN. */
2487 static bitmap
2488 copy_fixed_regs (const_bitmap in)
2490 bitmap ret;
2492 ret = ALLOC_REG_SET (NULL);
2493 bitmap_and (ret, in, bitmap_view<HARD_REG_SET> (fixed_reg_set));
2494 return ret;
2497 /* Apply record_store to all candidate stores in INSN. Mark INSN
2498 if some part of it is not a candidate store and assigns to a
2499 non-register target. */
2501 static void
2502 scan_insn (bb_info_t bb_info, rtx_insn *insn, int max_active_local_stores)
2504 rtx body;
2505 insn_info_type *insn_info = insn_info_type_pool.allocate ();
2506 int mems_found = 0;
2507 memset (insn_info, 0, sizeof (struct insn_info_type));
2509 if (dump_file && (dump_flags & TDF_DETAILS))
2510 fprintf (dump_file, "\n**scanning insn=%d\n",
2511 INSN_UID (insn));
2513 insn_info->prev_insn = bb_info->last_insn;
2514 insn_info->insn = insn;
2515 bb_info->last_insn = insn_info;
2517 if (DEBUG_INSN_P (insn))
2519 insn_info->cannot_delete = true;
2520 return;
2523 /* Look at all of the uses in the insn. */
2524 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2526 if (CALL_P (insn))
2528 bool const_call;
2529 rtx call, sym;
2530 tree memset_call = NULL_TREE;
2532 insn_info->cannot_delete = true;
2534 /* Const functions cannot do anything bad i.e. read memory,
2535 however, they can read their parameters which may have
2536 been pushed onto the stack.
2537 memset and bzero don't read memory either. */
2538 const_call = RTL_CONST_CALL_P (insn);
2539 if (!const_call
2540 && (call = get_call_rtx_from (insn))
2541 && (sym = XEXP (XEXP (call, 0), 0))
2542 && GET_CODE (sym) == SYMBOL_REF
2543 && SYMBOL_REF_DECL (sym)
2544 && TREE_CODE (SYMBOL_REF_DECL (sym)) == FUNCTION_DECL
2545 && fndecl_built_in_p (SYMBOL_REF_DECL (sym), BUILT_IN_MEMSET))
2546 memset_call = SYMBOL_REF_DECL (sym);
2548 if (const_call || memset_call)
2550 insn_info_t i_ptr = active_local_stores;
2551 insn_info_t last = NULL;
2553 if (dump_file && (dump_flags & TDF_DETAILS))
2554 fprintf (dump_file, "%s call %d\n",
2555 const_call ? "const" : "memset", INSN_UID (insn));
2557 /* See the head comment of the frame_read field. */
2558 if (reload_completed
2559 /* Tail calls are storing their arguments using
2560 arg pointer. If it is a frame pointer on the target,
2561 even before reload we need to kill frame pointer based
2562 stores. */
2563 || (SIBLING_CALL_P (insn)
2564 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2565 insn_info->frame_read = true;
2567 /* Loop over the active stores and remove those which are
2568 killed by the const function call. */
2569 while (i_ptr)
2571 bool remove_store = false;
2573 /* The stack pointer based stores are always killed. */
2574 if (i_ptr->stack_pointer_based)
2575 remove_store = true;
2577 /* If the frame is read, the frame related stores are killed. */
2578 else if (insn_info->frame_read)
2580 store_info *store_info = i_ptr->store_rec;
2582 /* Skip the clobbers. */
2583 while (!store_info->is_set)
2584 store_info = store_info->next;
2586 if (store_info->group_id >= 0
2587 && rtx_group_vec[store_info->group_id]->frame_related)
2588 remove_store = true;
2591 if (remove_store)
2593 if (dump_file && (dump_flags & TDF_DETAILS))
2594 dump_insn_info ("removing from active", i_ptr);
2596 active_local_stores_len--;
2597 if (last)
2598 last->next_local_store = i_ptr->next_local_store;
2599 else
2600 active_local_stores = i_ptr->next_local_store;
2602 else
2603 last = i_ptr;
2605 i_ptr = i_ptr->next_local_store;
2608 if (memset_call)
2610 rtx args[3];
2611 if (get_call_args (insn, memset_call, args, 3)
2612 && CONST_INT_P (args[1])
2613 && CONST_INT_P (args[2])
2614 && INTVAL (args[2]) > 0)
2616 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2617 set_mem_size (mem, INTVAL (args[2]));
2618 body = gen_rtx_SET (mem, args[1]);
2619 mems_found += record_store (body, bb_info);
2620 if (dump_file && (dump_flags & TDF_DETAILS))
2621 fprintf (dump_file, "handling memset as BLKmode store\n");
2622 if (mems_found == 1)
2624 if (active_local_stores_len++ >= max_active_local_stores)
2626 active_local_stores_len = 1;
2627 active_local_stores = NULL;
2629 insn_info->fixed_regs_live
2630 = copy_fixed_regs (bb_info->regs_live);
2631 insn_info->next_local_store = active_local_stores;
2632 active_local_stores = insn_info;
2635 else
2636 clear_rhs_from_active_local_stores ();
2639 else if (SIBLING_CALL_P (insn)
2640 && (reload_completed || HARD_FRAME_POINTER_IS_ARG_POINTER))
2641 /* Arguments for a sibling call that are pushed to memory are passed
2642 using the incoming argument pointer of the current function. After
2643 reload that might be (and likely is) frame pointer based. And, if
2644 it is a frame pointer on the target, even before reload we need to
2645 kill frame pointer based stores. */
2646 add_wild_read (bb_info);
2647 else
2648 /* Every other call, including pure functions, may read any memory
2649 that is not relative to the frame. */
2650 add_non_frame_wild_read (bb_info);
2652 return;
2655 /* Assuming that there are sets in these insns, we cannot delete
2656 them. */
2657 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2658 || volatile_refs_p (PATTERN (insn))
2659 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2660 || (RTX_FRAME_RELATED_P (insn))
2661 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2662 insn_info->cannot_delete = true;
2664 body = PATTERN (insn);
2665 if (GET_CODE (body) == PARALLEL)
2667 int i;
2668 for (i = 0; i < XVECLEN (body, 0); i++)
2669 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2671 else
2672 mems_found += record_store (body, bb_info);
2674 if (dump_file && (dump_flags & TDF_DETAILS))
2675 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2676 mems_found, insn_info->cannot_delete ? "true" : "false");
2678 /* If we found some sets of mems, add it into the active_local_stores so
2679 that it can be locally deleted if found dead or used for
2680 replace_read and redundant constant store elimination. Otherwise mark
2681 it as cannot delete. This simplifies the processing later. */
2682 if (mems_found == 1)
2684 if (active_local_stores_len++ >= max_active_local_stores)
2686 active_local_stores_len = 1;
2687 active_local_stores = NULL;
2689 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2690 insn_info->next_local_store = active_local_stores;
2691 active_local_stores = insn_info;
2693 else
2694 insn_info->cannot_delete = true;
2698 /* Remove BASE from the set of active_local_stores. This is a
2699 callback from cselib that is used to get rid of the stores in
2700 active_local_stores. */
2702 static void
2703 remove_useless_values (cselib_val *base)
2705 insn_info_t insn_info = active_local_stores;
2706 insn_info_t last = NULL;
2708 while (insn_info)
2710 store_info *store_info = insn_info->store_rec;
2711 bool del = false;
2713 /* If ANY of the store_infos match the cselib group that is
2714 being deleted, then the insn cannot be deleted. */
2715 while (store_info)
2717 if ((store_info->group_id == -1)
2718 && (store_info->cse_base == base))
2720 del = true;
2721 break;
2723 store_info = store_info->next;
2726 if (del)
2728 active_local_stores_len--;
2729 if (last)
2730 last->next_local_store = insn_info->next_local_store;
2731 else
2732 active_local_stores = insn_info->next_local_store;
2733 free_store_info (insn_info);
2735 else
2736 last = insn_info;
2738 insn_info = insn_info->next_local_store;
2743 /* Do all of step 1. */
2745 static void
2746 dse_step1 (void)
2748 basic_block bb;
2749 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2751 cselib_init (0);
2752 all_blocks = BITMAP_ALLOC (NULL);
2753 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2754 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2756 /* For -O1 reduce the maximum number of active local stores for RTL DSE
2757 since this can consume huge amounts of memory (PR89115). */
2758 int max_active_local_stores = param_max_dse_active_local_stores;
2759 if (optimize < 2)
2760 max_active_local_stores /= 10;
2762 FOR_ALL_BB_FN (bb, cfun)
2764 insn_info_t ptr;
2765 bb_info_t bb_info = dse_bb_info_type_pool.allocate ();
2767 memset (bb_info, 0, sizeof (dse_bb_info_type));
2768 bitmap_set_bit (all_blocks, bb->index);
2769 bb_info->regs_live = regs_live;
2771 bitmap_copy (regs_live, DF_LR_IN (bb));
2772 df_simulate_initialize_forwards (bb, regs_live);
2774 bb_table[bb->index] = bb_info;
2775 cselib_discard_hook = remove_useless_values;
2777 if (bb->index >= NUM_FIXED_BLOCKS)
2779 rtx_insn *insn;
2781 active_local_stores = NULL;
2782 active_local_stores_len = 0;
2783 cselib_clear_table ();
2785 /* Scan the insns. */
2786 FOR_BB_INSNS (bb, insn)
2788 if (INSN_P (insn))
2789 scan_insn (bb_info, insn, max_active_local_stores);
2790 cselib_process_insn (insn);
2791 if (INSN_P (insn))
2792 df_simulate_one_insn_forwards (bb, insn, regs_live);
2795 /* This is something of a hack, because the global algorithm
2796 is supposed to take care of the case where stores go dead
2797 at the end of the function. However, the global
2798 algorithm must take a more conservative view of block
2799 mode reads than the local alg does. So to get the case
2800 where you have a store to the frame followed by a non
2801 overlapping block more read, we look at the active local
2802 stores at the end of the function and delete all of the
2803 frame and spill based ones. */
2804 if (stores_off_frame_dead_at_return
2805 && (EDGE_COUNT (bb->succs) == 0
2806 || (single_succ_p (bb)
2807 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2808 && ! crtl->calls_eh_return)))
2810 insn_info_t i_ptr = active_local_stores;
2811 while (i_ptr)
2813 store_info *store_info = i_ptr->store_rec;
2815 /* Skip the clobbers. */
2816 while (!store_info->is_set)
2817 store_info = store_info->next;
2818 if (store_info->group_id >= 0)
2820 group_info *group = rtx_group_vec[store_info->group_id];
2821 if (group->frame_related && !i_ptr->cannot_delete)
2822 delete_dead_store_insn (i_ptr);
2825 i_ptr = i_ptr->next_local_store;
2829 /* Get rid of the loads that were discovered in
2830 replace_read. Cselib is finished with this block. */
2831 while (deferred_change_list)
2833 deferred_change *next = deferred_change_list->next;
2835 /* There is no reason to validate this change. That was
2836 done earlier. */
2837 *deferred_change_list->loc = deferred_change_list->reg;
2838 deferred_change_pool.remove (deferred_change_list);
2839 deferred_change_list = next;
2842 /* Get rid of all of the cselib based store_infos in this
2843 block and mark the containing insns as not being
2844 deletable. */
2845 ptr = bb_info->last_insn;
2846 while (ptr)
2848 if (ptr->contains_cselib_groups)
2850 store_info *s_info = ptr->store_rec;
2851 while (s_info && !s_info->is_set)
2852 s_info = s_info->next;
2853 if (s_info
2854 && s_info->redundant_reason
2855 && s_info->redundant_reason->insn
2856 && !ptr->cannot_delete)
2858 if (dump_file && (dump_flags & TDF_DETAILS))
2859 fprintf (dump_file, "Locally deleting insn %d "
2860 "because insn %d stores the "
2861 "same value and couldn't be "
2862 "eliminated\n",
2863 INSN_UID (ptr->insn),
2864 INSN_UID (s_info->redundant_reason->insn));
2865 delete_dead_store_insn (ptr);
2867 free_store_info (ptr);
2869 else
2871 store_info *s_info;
2873 /* Free at least positions_needed bitmaps. */
2874 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2875 if (s_info->is_large)
2877 BITMAP_FREE (s_info->positions_needed.large.bmap);
2878 s_info->is_large = false;
2881 ptr = ptr->prev_insn;
2884 cse_store_info_pool.release ();
2886 bb_info->regs_live = NULL;
2889 BITMAP_FREE (regs_live);
2890 cselib_finish ();
2891 rtx_group_table->empty ();
2895 /*----------------------------------------------------------------------------
2896 Second step.
2898 Assign each byte position in the stores that we are going to
2899 analyze globally to a position in the bitmaps. Returns true if
2900 there are any bit positions assigned.
2901 ----------------------------------------------------------------------------*/
2903 static void
2904 dse_step2_init (void)
2906 unsigned int i;
2907 group_info *group;
2909 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2911 /* For all non stack related bases, we only consider a store to
2912 be deletable if there are two or more stores for that
2913 position. This is because it takes one store to make the
2914 other store redundant. However, for the stores that are
2915 stack related, we consider them if there is only one store
2916 for the position. We do this because the stack related
2917 stores can be deleted if their is no read between them and
2918 the end of the function.
2920 To make this work in the current framework, we take the stack
2921 related bases add all of the bits from store1 into store2.
2922 This has the effect of making the eligible even if there is
2923 only one store. */
2925 if (stores_off_frame_dead_at_return && group->frame_related)
2927 bitmap_ior_into (group->store2_n, group->store1_n);
2928 bitmap_ior_into (group->store2_p, group->store1_p);
2929 if (dump_file && (dump_flags & TDF_DETAILS))
2930 fprintf (dump_file, "group %d is frame related ", i);
2933 group->offset_map_size_n++;
2934 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2935 group->offset_map_size_n);
2936 group->offset_map_size_p++;
2937 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2938 group->offset_map_size_p);
2939 group->process_globally = false;
2940 if (dump_file && (dump_flags & TDF_DETAILS))
2942 fprintf (dump_file, "group %d(%d+%d): ", i,
2943 (int)bitmap_count_bits (group->store2_n),
2944 (int)bitmap_count_bits (group->store2_p));
2945 bitmap_print (dump_file, group->store2_n, "n ", " ");
2946 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2952 /* Init the offset tables. */
2954 static bool
2955 dse_step2 (void)
2957 unsigned int i;
2958 group_info *group;
2959 /* Position 0 is unused because 0 is used in the maps to mean
2960 unused. */
2961 current_position = 1;
2962 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2964 bitmap_iterator bi;
2965 unsigned int j;
2967 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2968 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
2969 bitmap_clear (group->group_kill);
2971 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2973 bitmap_set_bit (group->group_kill, current_position);
2974 if (bitmap_bit_p (group->escaped_n, j))
2975 bitmap_set_bit (kill_on_calls, current_position);
2976 group->offset_map_n[j] = current_position++;
2977 group->process_globally = true;
2979 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2981 bitmap_set_bit (group->group_kill, current_position);
2982 if (bitmap_bit_p (group->escaped_p, j))
2983 bitmap_set_bit (kill_on_calls, current_position);
2984 group->offset_map_p[j] = current_position++;
2985 group->process_globally = true;
2988 return current_position != 1;
2993 /*----------------------------------------------------------------------------
2994 Third step.
2996 Build the bit vectors for the transfer functions.
2997 ----------------------------------------------------------------------------*/
3000 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3001 there, return 0. */
3003 static int
3004 get_bitmap_index (group_info *group_info, HOST_WIDE_INT offset)
3006 if (offset < 0)
3008 HOST_WIDE_INT offset_p = -offset;
3009 if (offset_p >= group_info->offset_map_size_n)
3010 return 0;
3011 return group_info->offset_map_n[offset_p];
3013 else
3015 if (offset >= group_info->offset_map_size_p)
3016 return 0;
3017 return group_info->offset_map_p[offset];
3022 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3023 may be NULL. */
3025 static void
3026 scan_stores (store_info *store_info, bitmap gen, bitmap kill)
3028 while (store_info)
3030 HOST_WIDE_INT i, offset, width;
3031 group_info *group_info
3032 = rtx_group_vec[store_info->group_id];
3033 /* We can (conservatively) ignore stores whose bounds aren't known;
3034 they simply don't generate new global dse opportunities. */
3035 if (group_info->process_globally
3036 && store_info->offset.is_constant (&offset)
3037 && store_info->width.is_constant (&width))
3039 HOST_WIDE_INT end = offset + width;
3040 for (i = offset; i < end; i++)
3042 int index = get_bitmap_index (group_info, i);
3043 if (index != 0)
3045 bitmap_set_bit (gen, index);
3046 if (kill)
3047 bitmap_clear_bit (kill, index);
3051 store_info = store_info->next;
3056 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3057 may be NULL. */
3059 static void
3060 scan_reads (insn_info_t insn_info, bitmap gen, bitmap kill)
3062 read_info_t read_info = insn_info->read_rec;
3063 int i;
3064 group_info *group;
3066 /* If this insn reads the frame, kill all the frame related stores. */
3067 if (insn_info->frame_read)
3069 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3070 if (group->process_globally && group->frame_related)
3072 if (kill)
3073 bitmap_ior_into (kill, group->group_kill);
3074 bitmap_and_compl_into (gen, group->group_kill);
3077 if (insn_info->non_frame_wild_read)
3079 /* Kill all non-frame related stores. Kill all stores of variables that
3080 escape. */
3081 if (kill)
3082 bitmap_ior_into (kill, kill_on_calls);
3083 bitmap_and_compl_into (gen, kill_on_calls);
3084 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3085 if (group->process_globally && !group->frame_related)
3087 if (kill)
3088 bitmap_ior_into (kill, group->group_kill);
3089 bitmap_and_compl_into (gen, group->group_kill);
3092 while (read_info)
3094 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3096 if (group->process_globally)
3098 if (i == read_info->group_id)
3100 HOST_WIDE_INT offset, width;
3101 /* Reads with non-constant size kill all DSE opportunities
3102 in the group. */
3103 if (!read_info->offset.is_constant (&offset)
3104 || !read_info->width.is_constant (&width)
3105 || !known_size_p (width))
3107 /* Handle block mode reads. */
3108 if (kill)
3109 bitmap_ior_into (kill, group->group_kill);
3110 bitmap_and_compl_into (gen, group->group_kill);
3112 else
3114 /* The groups are the same, just process the
3115 offsets. */
3116 HOST_WIDE_INT j;
3117 HOST_WIDE_INT end = offset + width;
3118 for (j = offset; j < end; j++)
3120 int index = get_bitmap_index (group, j);
3121 if (index != 0)
3123 if (kill)
3124 bitmap_set_bit (kill, index);
3125 bitmap_clear_bit (gen, index);
3130 else
3132 /* The groups are different, if the alias sets
3133 conflict, clear the entire group. We only need
3134 to apply this test if the read_info is a cselib
3135 read. Anything with a constant base cannot alias
3136 something else with a different constant
3137 base. */
3138 if ((read_info->group_id < 0)
3139 && canon_true_dependence (group->base_mem,
3140 GET_MODE (group->base_mem),
3141 group->canon_base_addr,
3142 read_info->mem, NULL_RTX))
3144 if (kill)
3145 bitmap_ior_into (kill, group->group_kill);
3146 bitmap_and_compl_into (gen, group->group_kill);
3152 read_info = read_info->next;
3157 /* Return the insn in BB_INFO before the first wild read or if there
3158 are no wild reads in the block, return the last insn. */
3160 static insn_info_t
3161 find_insn_before_first_wild_read (bb_info_t bb_info)
3163 insn_info_t insn_info = bb_info->last_insn;
3164 insn_info_t last_wild_read = NULL;
3166 while (insn_info)
3168 if (insn_info->wild_read)
3170 last_wild_read = insn_info->prev_insn;
3171 /* Block starts with wild read. */
3172 if (!last_wild_read)
3173 return NULL;
3176 insn_info = insn_info->prev_insn;
3179 if (last_wild_read)
3180 return last_wild_read;
3181 else
3182 return bb_info->last_insn;
3186 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3187 the block in order to build the gen and kill sets for the block.
3188 We start at ptr which may be the last insn in the block or may be
3189 the first insn with a wild read. In the latter case we are able to
3190 skip the rest of the block because it just does not matter:
3191 anything that happens is hidden by the wild read. */
3193 static void
3194 dse_step3_scan (basic_block bb)
3196 bb_info_t bb_info = bb_table[bb->index];
3197 insn_info_t insn_info;
3199 insn_info = find_insn_before_first_wild_read (bb_info);
3201 /* In the spill case or in the no_spill case if there is no wild
3202 read in the block, we will need a kill set. */
3203 if (insn_info == bb_info->last_insn)
3205 if (bb_info->kill)
3206 bitmap_clear (bb_info->kill);
3207 else
3208 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3210 else
3211 if (bb_info->kill)
3212 BITMAP_FREE (bb_info->kill);
3214 while (insn_info)
3216 /* There may have been code deleted by the dce pass run before
3217 this phase. */
3218 if (insn_info->insn && INSN_P (insn_info->insn))
3220 scan_stores (insn_info->store_rec, bb_info->gen, bb_info->kill);
3221 scan_reads (insn_info, bb_info->gen, bb_info->kill);
3224 insn_info = insn_info->prev_insn;
3229 /* Set the gen set of the exit block, and also any block with no
3230 successors that does not have a wild read. */
3232 static void
3233 dse_step3_exit_block_scan (bb_info_t bb_info)
3235 /* The gen set is all 0's for the exit block except for the
3236 frame_pointer_group. */
3238 if (stores_off_frame_dead_at_return)
3240 unsigned int i;
3241 group_info *group;
3243 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3245 if (group->process_globally && group->frame_related)
3246 bitmap_ior_into (bb_info->gen, group->group_kill);
3252 /* Find all of the blocks that are not backwards reachable from the
3253 exit block or any block with no successors (BB). These are the
3254 infinite loops or infinite self loops. These blocks will still
3255 have their bits set in UNREACHABLE_BLOCKS. */
3257 static void
3258 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3260 edge e;
3261 edge_iterator ei;
3263 if (bitmap_bit_p (unreachable_blocks, bb->index))
3265 bitmap_clear_bit (unreachable_blocks, bb->index);
3266 FOR_EACH_EDGE (e, ei, bb->preds)
3268 mark_reachable_blocks (unreachable_blocks, e->src);
3273 /* Build the transfer functions for the function. */
3275 static void
3276 dse_step3 ()
3278 basic_block bb;
3279 sbitmap_iterator sbi;
3280 bitmap all_ones = NULL;
3281 unsigned int i;
3283 auto_sbitmap unreachable_blocks (last_basic_block_for_fn (cfun));
3284 bitmap_ones (unreachable_blocks);
3286 FOR_ALL_BB_FN (bb, cfun)
3288 bb_info_t bb_info = bb_table[bb->index];
3289 if (bb_info->gen)
3290 bitmap_clear (bb_info->gen);
3291 else
3292 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3294 if (bb->index == ENTRY_BLOCK)
3296 else if (bb->index == EXIT_BLOCK)
3297 dse_step3_exit_block_scan (bb_info);
3298 else
3299 dse_step3_scan (bb);
3300 if (EDGE_COUNT (bb->succs) == 0)
3301 mark_reachable_blocks (unreachable_blocks, bb);
3303 /* If this is the second time dataflow is run, delete the old
3304 sets. */
3305 if (bb_info->in)
3306 BITMAP_FREE (bb_info->in);
3307 if (bb_info->out)
3308 BITMAP_FREE (bb_info->out);
3311 /* For any block in an infinite loop, we must initialize the out set
3312 to all ones. This could be expensive, but almost never occurs in
3313 practice. However, it is common in regression tests. */
3314 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3316 if (bitmap_bit_p (all_blocks, i))
3318 bb_info_t bb_info = bb_table[i];
3319 if (!all_ones)
3321 unsigned int j;
3322 group_info *group;
3324 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3325 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3326 bitmap_ior_into (all_ones, group->group_kill);
3328 if (!bb_info->out)
3330 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3331 bitmap_copy (bb_info->out, all_ones);
3336 if (all_ones)
3337 BITMAP_FREE (all_ones);
3342 /*----------------------------------------------------------------------------
3343 Fourth step.
3345 Solve the bitvector equations.
3346 ----------------------------------------------------------------------------*/
3349 /* Confluence function for blocks with no successors. Create an out
3350 set from the gen set of the exit block. This block logically has
3351 the exit block as a successor. */
3355 static void
3356 dse_confluence_0 (basic_block bb)
3358 bb_info_t bb_info = bb_table[bb->index];
3360 if (bb->index == EXIT_BLOCK)
3361 return;
3363 if (!bb_info->out)
3365 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3366 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3370 /* Propagate the information from the in set of the dest of E to the
3371 out set of the src of E. If the various in or out sets are not
3372 there, that means they are all ones. */
3374 static bool
3375 dse_confluence_n (edge e)
3377 bb_info_t src_info = bb_table[e->src->index];
3378 bb_info_t dest_info = bb_table[e->dest->index];
3380 if (dest_info->in)
3382 if (src_info->out)
3383 bitmap_and_into (src_info->out, dest_info->in);
3384 else
3386 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3387 bitmap_copy (src_info->out, dest_info->in);
3390 return true;
3394 /* Propagate the info from the out to the in set of BB_INDEX's basic
3395 block. There are three cases:
3397 1) The block has no kill set. In this case the kill set is all
3398 ones. It does not matter what the out set of the block is, none of
3399 the info can reach the top. The only thing that reaches the top is
3400 the gen set and we just copy the set.
3402 2) There is a kill set but no out set and bb has successors. In
3403 this case we just return. Eventually an out set will be created and
3404 it is better to wait than to create a set of ones.
3406 3) There is both a kill and out set. We apply the obvious transfer
3407 function.
3410 static bool
3411 dse_transfer_function (int bb_index)
3413 bb_info_t bb_info = bb_table[bb_index];
3415 if (bb_info->kill)
3417 if (bb_info->out)
3419 /* Case 3 above. */
3420 if (bb_info->in)
3421 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3422 bb_info->out, bb_info->kill);
3423 else
3425 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3426 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3427 bb_info->out, bb_info->kill);
3428 return true;
3431 else
3432 /* Case 2 above. */
3433 return false;
3435 else
3437 /* Case 1 above. If there is already an in set, nothing
3438 happens. */
3439 if (bb_info->in)
3440 return false;
3441 else
3443 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3444 bitmap_copy (bb_info->in, bb_info->gen);
3445 return true;
3450 /* Solve the dataflow equations. */
3452 static void
3453 dse_step4 (void)
3455 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3456 dse_confluence_n, dse_transfer_function,
3457 all_blocks, df_get_postorder (DF_BACKWARD),
3458 df_get_n_blocks (DF_BACKWARD));
3459 if (dump_file && (dump_flags & TDF_DETAILS))
3461 basic_block bb;
3463 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3464 FOR_ALL_BB_FN (bb, cfun)
3466 bb_info_t bb_info = bb_table[bb->index];
3468 df_print_bb_index (bb, dump_file);
3469 if (bb_info->in)
3470 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3471 else
3472 fprintf (dump_file, " in: *MISSING*\n");
3473 if (bb_info->gen)
3474 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3475 else
3476 fprintf (dump_file, " gen: *MISSING*\n");
3477 if (bb_info->kill)
3478 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3479 else
3480 fprintf (dump_file, " kill: *MISSING*\n");
3481 if (bb_info->out)
3482 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3483 else
3484 fprintf (dump_file, " out: *MISSING*\n\n");
3491 /*----------------------------------------------------------------------------
3492 Fifth step.
3494 Delete the stores that can only be deleted using the global information.
3495 ----------------------------------------------------------------------------*/
3498 static void
3499 dse_step5 (void)
3501 basic_block bb;
3502 FOR_EACH_BB_FN (bb, cfun)
3504 bb_info_t bb_info = bb_table[bb->index];
3505 insn_info_t insn_info = bb_info->last_insn;
3506 bitmap v = bb_info->out;
3508 while (insn_info)
3510 bool deleted = false;
3511 if (dump_file && insn_info->insn)
3513 fprintf (dump_file, "starting to process insn %d\n",
3514 INSN_UID (insn_info->insn));
3515 bitmap_print (dump_file, v, " v: ", "\n");
3518 /* There may have been code deleted by the dce pass run before
3519 this phase. */
3520 if (insn_info->insn
3521 && INSN_P (insn_info->insn)
3522 && (!insn_info->cannot_delete)
3523 && (!bitmap_empty_p (v)))
3525 store_info *store_info = insn_info->store_rec;
3527 /* Try to delete the current insn. */
3528 deleted = true;
3530 /* Skip the clobbers. */
3531 while (!store_info->is_set)
3532 store_info = store_info->next;
3534 HOST_WIDE_INT i, offset, width;
3535 group_info *group_info = rtx_group_vec[store_info->group_id];
3537 if (!store_info->offset.is_constant (&offset)
3538 || !store_info->width.is_constant (&width))
3539 deleted = false;
3540 else
3542 HOST_WIDE_INT end = offset + width;
3543 for (i = offset; i < end; i++)
3545 int index = get_bitmap_index (group_info, i);
3547 if (dump_file && (dump_flags & TDF_DETAILS))
3548 fprintf (dump_file, "i = %d, index = %d\n",
3549 (int) i, index);
3550 if (index == 0 || !bitmap_bit_p (v, index))
3552 if (dump_file && (dump_flags & TDF_DETAILS))
3553 fprintf (dump_file, "failing at i = %d\n",
3554 (int) i);
3555 deleted = false;
3556 break;
3560 if (deleted)
3562 if (dbg_cnt (dse)
3563 && check_for_inc_dec_1 (insn_info))
3565 delete_insn (insn_info->insn);
3566 insn_info->insn = NULL;
3567 globally_deleted++;
3571 /* We do want to process the local info if the insn was
3572 deleted. For instance, if the insn did a wild read, we
3573 no longer need to trash the info. */
3574 if (insn_info->insn
3575 && INSN_P (insn_info->insn)
3576 && (!deleted))
3578 scan_stores (insn_info->store_rec, v, NULL);
3579 if (insn_info->wild_read)
3581 if (dump_file && (dump_flags & TDF_DETAILS))
3582 fprintf (dump_file, "wild read\n");
3583 bitmap_clear (v);
3585 else if (insn_info->read_rec
3586 || insn_info->non_frame_wild_read
3587 || insn_info->frame_read)
3589 if (dump_file && (dump_flags & TDF_DETAILS))
3591 if (!insn_info->non_frame_wild_read
3592 && !insn_info->frame_read)
3593 fprintf (dump_file, "regular read\n");
3594 if (insn_info->non_frame_wild_read)
3595 fprintf (dump_file, "non-frame wild read\n");
3596 if (insn_info->frame_read)
3597 fprintf (dump_file, "frame read\n");
3599 scan_reads (insn_info, v, NULL);
3603 insn_info = insn_info->prev_insn;
3610 /*----------------------------------------------------------------------------
3611 Sixth step.
3613 Delete stores made redundant by earlier stores (which store the same
3614 value) that couldn't be eliminated.
3615 ----------------------------------------------------------------------------*/
3617 static void
3618 dse_step6 (void)
3620 basic_block bb;
3622 FOR_ALL_BB_FN (bb, cfun)
3624 bb_info_t bb_info = bb_table[bb->index];
3625 insn_info_t insn_info = bb_info->last_insn;
3627 while (insn_info)
3629 /* There may have been code deleted by the dce pass run before
3630 this phase. */
3631 if (insn_info->insn
3632 && INSN_P (insn_info->insn)
3633 && !insn_info->cannot_delete)
3635 store_info *s_info = insn_info->store_rec;
3637 while (s_info && !s_info->is_set)
3638 s_info = s_info->next;
3639 if (s_info
3640 && s_info->redundant_reason
3641 && s_info->redundant_reason->insn
3642 && INSN_P (s_info->redundant_reason->insn))
3644 rtx_insn *rinsn = s_info->redundant_reason->insn;
3645 if (dump_file && (dump_flags & TDF_DETAILS))
3646 fprintf (dump_file, "Locally deleting insn %d "
3647 "because insn %d stores the "
3648 "same value and couldn't be "
3649 "eliminated\n",
3650 INSN_UID (insn_info->insn),
3651 INSN_UID (rinsn));
3652 delete_dead_store_insn (insn_info);
3655 insn_info = insn_info->prev_insn;
3660 /*----------------------------------------------------------------------------
3661 Seventh step.
3663 Destroy everything left standing.
3664 ----------------------------------------------------------------------------*/
3666 static void
3667 dse_step7 (void)
3669 bitmap_obstack_release (&dse_bitmap_obstack);
3670 obstack_free (&dse_obstack, NULL);
3672 end_alias_analysis ();
3673 free (bb_table);
3674 delete rtx_group_table;
3675 rtx_group_table = NULL;
3676 rtx_group_vec.release ();
3677 BITMAP_FREE (all_blocks);
3678 BITMAP_FREE (scratch);
3680 rtx_store_info_pool.release ();
3681 read_info_type_pool.release ();
3682 insn_info_type_pool.release ();
3683 dse_bb_info_type_pool.release ();
3684 group_info_pool.release ();
3685 deferred_change_pool.release ();
3689 /* -------------------------------------------------------------------------
3691 ------------------------------------------------------------------------- */
3693 /* Callback for running pass_rtl_dse. */
3695 static unsigned int
3696 rest_of_handle_dse (void)
3698 df_set_flags (DF_DEFER_INSN_RESCAN);
3700 /* Need the notes since we must track live hardregs in the forwards
3701 direction. */
3702 df_note_add_problem ();
3703 df_analyze ();
3705 dse_step0 ();
3706 dse_step1 ();
3707 /* DSE can eliminate potentially-trapping MEMs.
3708 Remove any EH edges associated with them, since otherwise
3709 DF_LR_RUN_DCE will complain later. */
3710 if ((locally_deleted || globally_deleted)
3711 && cfun->can_throw_non_call_exceptions
3712 && purge_all_dead_edges ())
3714 free_dominance_info (CDI_DOMINATORS);
3715 delete_unreachable_blocks ();
3717 dse_step2_init ();
3718 if (dse_step2 ())
3720 df_set_flags (DF_LR_RUN_DCE);
3721 df_analyze ();
3722 if (dump_file && (dump_flags & TDF_DETAILS))
3723 fprintf (dump_file, "doing global processing\n");
3724 dse_step3 ();
3725 dse_step4 ();
3726 dse_step5 ();
3729 dse_step6 ();
3730 dse_step7 ();
3732 if (dump_file)
3733 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d\n",
3734 locally_deleted, globally_deleted);
3736 /* DSE can eliminate potentially-trapping MEMs.
3737 Remove any EH edges associated with them. */
3738 if ((locally_deleted || globally_deleted)
3739 && cfun->can_throw_non_call_exceptions
3740 && purge_all_dead_edges ())
3742 free_dominance_info (CDI_DOMINATORS);
3743 cleanup_cfg (0);
3746 return 0;
3749 namespace {
3751 const pass_data pass_data_rtl_dse1 =
3753 RTL_PASS, /* type */
3754 "dse1", /* name */
3755 OPTGROUP_NONE, /* optinfo_flags */
3756 TV_DSE1, /* tv_id */
3757 0, /* properties_required */
3758 0, /* properties_provided */
3759 0, /* properties_destroyed */
3760 0, /* todo_flags_start */
3761 TODO_df_finish, /* todo_flags_finish */
3764 class pass_rtl_dse1 : public rtl_opt_pass
3766 public:
3767 pass_rtl_dse1 (gcc::context *ctxt)
3768 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3771 /* opt_pass methods: */
3772 bool gate (function *) final override
3774 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3777 unsigned int execute (function *) final override
3779 return rest_of_handle_dse ();
3782 }; // class pass_rtl_dse1
3784 } // anon namespace
3786 rtl_opt_pass *
3787 make_pass_rtl_dse1 (gcc::context *ctxt)
3789 return new pass_rtl_dse1 (ctxt);
3792 namespace {
3794 const pass_data pass_data_rtl_dse2 =
3796 RTL_PASS, /* type */
3797 "dse2", /* name */
3798 OPTGROUP_NONE, /* optinfo_flags */
3799 TV_DSE2, /* tv_id */
3800 0, /* properties_required */
3801 0, /* properties_provided */
3802 0, /* properties_destroyed */
3803 0, /* todo_flags_start */
3804 TODO_df_finish, /* todo_flags_finish */
3807 class pass_rtl_dse2 : public rtl_opt_pass
3809 public:
3810 pass_rtl_dse2 (gcc::context *ctxt)
3811 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3814 /* opt_pass methods: */
3815 bool gate (function *) final override
3817 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3820 unsigned int execute (function *) final override
3822 return rest_of_handle_dse ();
3825 }; // class pass_rtl_dse2
3827 } // anon namespace
3829 rtl_opt_pass *
3830 make_pass_rtl_dse2 (gcc::context *ctxt)
3832 return new pass_rtl_dse2 (ctxt);