2015-06-11 Paul Thomas <pault@gcc.gnu.org>
[official-gcc.git] / gcc / dse.c
blobd7d4ba6afbfd3913691464d64730f54e68577a39
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "tm.h"
29 #include "rtl.h"
30 #include "input.h"
31 #include "alias.h"
32 #include "symtab.h"
33 #include "tree.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "tm_p.h"
37 #include "regs.h"
38 #include "hard-reg-set.h"
39 #include "regset.h"
40 #include "flags.h"
41 #include "dominance.h"
42 #include "cfg.h"
43 #include "cfgrtl.h"
44 #include "predict.h"
45 #include "basic-block.h"
46 #include "df.h"
47 #include "cselib.h"
48 #include "tree-pass.h"
49 #include "alloc-pool.h"
50 #include "insn-config.h"
51 #include "function.h"
52 #include "expmed.h"
53 #include "dojump.h"
54 #include "explow.h"
55 #include "calls.h"
56 #include "emit-rtl.h"
57 #include "varasm.h"
58 #include "stmt.h"
59 #include "expr.h"
60 #include "recog.h"
61 #include "insn-codes.h"
62 #include "optabs.h"
63 #include "dbgcnt.h"
64 #include "target.h"
65 #include "params.h"
66 #include "tree-ssa-alias.h"
67 #include "internal-fn.h"
68 #include "gimple-expr.h"
69 #include "is-a.h"
70 #include "gimple.h"
71 #include "gimple-ssa.h"
72 #include "rtl-iter.h"
73 #include "cfgcleanup.h"
75 /* This file contains three techniques for performing Dead Store
76 Elimination (dse).
78 * The first technique performs dse locally on any base address. It
79 is based on the cselib which is a local value numbering technique.
80 This technique is local to a basic block but deals with a fairly
81 general addresses.
83 * The second technique performs dse globally but is restricted to
84 base addresses that are either constant or are relative to the
85 frame_pointer.
87 * The third technique, (which is only done after register allocation)
88 processes the spill spill slots. This differs from the second
89 technique because it takes advantage of the fact that spilling is
90 completely free from the effects of aliasing.
92 Logically, dse is a backwards dataflow problem. A store can be
93 deleted if it if cannot be reached in the backward direction by any
94 use of the value being stored. However, the local technique uses a
95 forwards scan of the basic block because cselib requires that the
96 block be processed in that order.
98 The pass is logically broken into 7 steps:
100 0) Initialization.
102 1) The local algorithm, as well as scanning the insns for the two
103 global algorithms.
105 2) Analysis to see if the global algs are necessary. In the case
106 of stores base on a constant address, there must be at least two
107 stores to that address, to make it possible to delete some of the
108 stores. In the case of stores off of the frame or spill related
109 stores, only one store to an address is necessary because those
110 stores die at the end of the function.
112 3) Set up the global dataflow equations based on processing the
113 info parsed in the first step.
115 4) Solve the dataflow equations.
117 5) Delete the insns that the global analysis has indicated are
118 unnecessary.
120 6) Delete insns that store the same value as preceding store
121 where the earlier store couldn't be eliminated.
123 7) Cleanup.
125 This step uses cselib and canon_rtx to build the largest expression
126 possible for each address. This pass is a forwards pass through
127 each basic block. From the point of view of the global technique,
128 the first pass could examine a block in either direction. The
129 forwards ordering is to accommodate cselib.
131 We make a simplifying assumption: addresses fall into four broad
132 categories:
134 1) base has rtx_varies_p == false, offset is constant.
135 2) base has rtx_varies_p == false, offset variable.
136 3) base has rtx_varies_p == true, offset constant.
137 4) base has rtx_varies_p == true, offset variable.
139 The local passes are able to process all 4 kinds of addresses. The
140 global pass only handles 1).
142 The global problem is formulated as follows:
144 A store, S1, to address A, where A is not relative to the stack
145 frame, can be eliminated if all paths from S1 to the end of the
146 function contain another store to A before a read to A.
148 If the address A is relative to the stack frame, a store S2 to A
149 can be eliminated if there are no paths from S2 that reach the
150 end of the function that read A before another store to A. In
151 this case S2 can be deleted if there are paths from S2 to the
152 end of the function that have no reads or writes to A. This
153 second case allows stores to the stack frame to be deleted that
154 would otherwise die when the function returns. This cannot be
155 done if stores_off_frame_dead_at_return is not true. See the doc
156 for that variable for when this variable is false.
158 The global problem is formulated as a backwards set union
159 dataflow problem where the stores are the gens and reads are the
160 kills. Set union problems are rare and require some special
161 handling given our representation of bitmaps. A straightforward
162 implementation requires a lot of bitmaps filled with 1s.
163 These are expensive and cumbersome in our bitmap formulation so
164 care has been taken to avoid large vectors filled with 1s. See
165 the comments in bb_info and in the dataflow confluence functions
166 for details.
168 There are two places for further enhancements to this algorithm:
170 1) The original dse which was embedded in a pass called flow also
171 did local address forwarding. For example in
173 A <- r100
174 ... <- A
176 flow would replace the right hand side of the second insn with a
177 reference to r100. Most of the information is available to add this
178 to this pass. It has not done it because it is a lot of work in
179 the case that either r100 is assigned to between the first and
180 second insn and/or the second insn is a load of part of the value
181 stored by the first insn.
183 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
184 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
185 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
186 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
188 2) The cleaning up of spill code is quite profitable. It currently
189 depends on reading tea leaves and chicken entrails left by reload.
190 This pass depends on reload creating a singleton alias set for each
191 spill slot and telling the next dse pass which of these alias sets
192 are the singletons. Rather than analyze the addresses of the
193 spills, dse's spill processing just does analysis of the loads and
194 stores that use those alias sets. There are three cases where this
195 falls short:
197 a) Reload sometimes creates the slot for one mode of access, and
198 then inserts loads and/or stores for a smaller mode. In this
199 case, the current code just punts on the slot. The proper thing
200 to do is to back out and use one bit vector position for each
201 byte of the entity associated with the slot. This depends on
202 KNOWING that reload always generates the accesses for each of the
203 bytes in some canonical (read that easy to understand several
204 passes after reload happens) way.
206 b) Reload sometimes decides that spill slot it allocated was not
207 large enough for the mode and goes back and allocates more slots
208 with the same mode and alias set. The backout in this case is a
209 little more graceful than (a). In this case the slot is unmarked
210 as being a spill slot and if final address comes out to be based
211 off the frame pointer, the global algorithm handles this slot.
213 c) For any pass that may prespill, there is currently no
214 mechanism to tell the dse pass that the slot being used has the
215 special properties that reload uses. It may be that all that is
216 required is to have those passes make the same calls that reload
217 does, assuming that the alias sets can be manipulated in the same
218 way. */
220 /* There are limits to the size of constant offsets we model for the
221 global problem. There are certainly test cases, that exceed this
222 limit, however, it is unlikely that there are important programs
223 that really have constant offsets this size. */
224 #define MAX_OFFSET (64 * 1024)
226 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
227 on the default obstack because these bitmaps can grow quite large
228 (~2GB for the small (!) test case of PR54146) and we'll hold on to
229 all that memory until the end of the compiler run.
230 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
231 releasing the whole obstack. */
232 static bitmap_obstack dse_bitmap_obstack;
234 /* Obstack for other data. As for above: Kinda nice to be able to
235 throw it all away at the end in one big sweep. */
236 static struct obstack dse_obstack;
238 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
239 static bitmap scratch = NULL;
241 struct insn_info_type;
243 /* This structure holds information about a candidate store. */
244 struct store_info
247 /* False means this is a clobber. */
248 bool is_set;
250 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
251 bool is_large;
253 /* The id of the mem group of the base address. If rtx_varies_p is
254 true, this is -1. Otherwise, it is the index into the group
255 table. */
256 int group_id;
258 /* This is the cselib value. */
259 cselib_val *cse_base;
261 /* This canonized mem. */
262 rtx mem;
264 /* Canonized MEM address for use by canon_true_dependence. */
265 rtx mem_addr;
267 /* If this is non-zero, it is the alias set of a spill location. */
268 alias_set_type alias_set;
270 /* The offset of the first and byte before the last byte associated
271 with the operation. */
272 HOST_WIDE_INT begin, end;
274 union
276 /* A bitmask as wide as the number of bytes in the word that
277 contains a 1 if the byte may be needed. The store is unused if
278 all of the bits are 0. This is used if IS_LARGE is false. */
279 unsigned HOST_WIDE_INT small_bitmask;
281 struct
283 /* A bitmap with one bit per byte. Cleared bit means the position
284 is needed. Used if IS_LARGE is false. */
285 bitmap bmap;
287 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
288 equal to END - BEGIN, the whole store is unused. */
289 int count;
290 } large;
291 } positions_needed;
293 /* The next store info for this insn. */
294 struct store_info *next;
296 /* The right hand side of the store. This is used if there is a
297 subsequent reload of the mems address somewhere later in the
298 basic block. */
299 rtx rhs;
301 /* If rhs is or holds a constant, this contains that constant,
302 otherwise NULL. */
303 rtx const_rhs;
305 /* Set if this store stores the same constant value as REDUNDANT_REASON
306 insn stored. These aren't eliminated early, because doing that
307 might prevent the earlier larger store to be eliminated. */
308 struct insn_info_type *redundant_reason;
311 /* Return a bitmask with the first N low bits set. */
313 static unsigned HOST_WIDE_INT
314 lowpart_bitmask (int n)
316 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
317 return mask >> (HOST_BITS_PER_WIDE_INT - n);
320 typedef struct store_info *store_info_t;
321 static pool_allocator<store_info> cse_store_info_pool ("cse_store_info_pool",
322 100);
324 static pool_allocator<store_info> rtx_store_info_pool ("rtx_store_info_pool",
325 100);
327 /* This structure holds information about a load. These are only
328 built for rtx bases. */
329 struct read_info_type
331 /* The id of the mem group of the base address. */
332 int group_id;
334 /* If this is non-zero, it is the alias set of a spill location. */
335 alias_set_type alias_set;
337 /* The offset of the first and byte after the last byte associated
338 with the operation. If begin == end == 0, the read did not have
339 a constant offset. */
340 int begin, end;
342 /* The mem being read. */
343 rtx mem;
345 /* The next read_info for this insn. */
346 struct read_info_type *next;
348 /* Pool allocation new operator. */
349 inline void *operator new (size_t)
351 return pool.allocate ();
354 /* Delete operator utilizing pool allocation. */
355 inline void operator delete (void *ptr)
357 pool.remove ((read_info_type *) ptr);
360 /* Memory allocation pool. */
361 static pool_allocator<read_info_type> pool;
363 typedef struct read_info_type *read_info_t;
365 pool_allocator<read_info_type> read_info_type::pool ("read_info_pool", 100);
367 /* One of these records is created for each insn. */
369 struct insn_info_type
371 /* Set true if the insn contains a store but the insn itself cannot
372 be deleted. This is set if the insn is a parallel and there is
373 more than one non dead output or if the insn is in some way
374 volatile. */
375 bool cannot_delete;
377 /* This field is only used by the global algorithm. It is set true
378 if the insn contains any read of mem except for a (1). This is
379 also set if the insn is a call or has a clobber mem. If the insn
380 contains a wild read, the use_rec will be null. */
381 bool wild_read;
383 /* This is true only for CALL instructions which could potentially read
384 any non-frame memory location. This field is used by the global
385 algorithm. */
386 bool non_frame_wild_read;
388 /* This field is only used for the processing of const functions.
389 These functions cannot read memory, but they can read the stack
390 because that is where they may get their parms. We need to be
391 this conservative because, like the store motion pass, we don't
392 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
393 Moreover, we need to distinguish two cases:
394 1. Before reload (register elimination), the stores related to
395 outgoing arguments are stack pointer based and thus deemed
396 of non-constant base in this pass. This requires special
397 handling but also means that the frame pointer based stores
398 need not be killed upon encountering a const function call.
399 2. After reload, the stores related to outgoing arguments can be
400 either stack pointer or hard frame pointer based. This means
401 that we have no other choice than also killing all the frame
402 pointer based stores upon encountering a const function call.
403 This field is set after reload for const function calls and before
404 reload for const tail function calls on targets where arg pointer
405 is the frame pointer. Having this set is less severe than a wild
406 read, it just means that all the frame related stores are killed
407 rather than all the stores. */
408 bool frame_read;
410 /* This field is only used for the processing of const functions.
411 It is set if the insn may contain a stack pointer based store. */
412 bool stack_pointer_based;
414 /* This is true if any of the sets within the store contains a
415 cselib base. Such stores can only be deleted by the local
416 algorithm. */
417 bool contains_cselib_groups;
419 /* The insn. */
420 rtx_insn *insn;
422 /* The list of mem sets or mem clobbers that are contained in this
423 insn. If the insn is deletable, it contains only one mem set.
424 But it could also contain clobbers. Insns that contain more than
425 one mem set are not deletable, but each of those mems are here in
426 order to provide info to delete other insns. */
427 store_info_t store_rec;
429 /* The linked list of mem uses in this insn. Only the reads from
430 rtx bases are listed here. The reads to cselib bases are
431 completely processed during the first scan and so are never
432 created. */
433 read_info_t read_rec;
435 /* The live fixed registers. We assume only fixed registers can
436 cause trouble by being clobbered from an expanded pattern;
437 storing only the live fixed registers (rather than all registers)
438 means less memory needs to be allocated / copied for the individual
439 stores. */
440 regset fixed_regs_live;
442 /* The prev insn in the basic block. */
443 struct insn_info_type * prev_insn;
445 /* The linked list of insns that are in consideration for removal in
446 the forwards pass through the basic block. This pointer may be
447 trash as it is not cleared when a wild read occurs. The only
448 time it is guaranteed to be correct is when the traversal starts
449 at active_local_stores. */
450 struct insn_info_type * next_local_store;
452 /* Pool allocation new operator. */
453 inline void *operator new (size_t)
455 return pool.allocate ();
458 /* Delete operator utilizing pool allocation. */
459 inline void operator delete (void *ptr)
461 pool.remove ((insn_info_type *) ptr);
464 /* Memory allocation pool. */
465 static pool_allocator<insn_info_type> pool;
467 typedef struct insn_info_type *insn_info_t;
469 pool_allocator<insn_info_type> insn_info_type::pool ("insn_info_pool", 100);
471 /* The linked list of stores that are under consideration in this
472 basic block. */
473 static insn_info_t active_local_stores;
474 static int active_local_stores_len;
476 struct dse_bb_info_type
478 /* Pointer to the insn info for the last insn in the block. These
479 are linked so this is how all of the insns are reached. During
480 scanning this is the current insn being scanned. */
481 insn_info_t last_insn;
483 /* The info for the global dataflow problem. */
486 /* This is set if the transfer function should and in the wild_read
487 bitmap before applying the kill and gen sets. That vector knocks
488 out most of the bits in the bitmap and thus speeds up the
489 operations. */
490 bool apply_wild_read;
492 /* The following 4 bitvectors hold information about which positions
493 of which stores are live or dead. They are indexed by
494 get_bitmap_index. */
496 /* The set of store positions that exist in this block before a wild read. */
497 bitmap gen;
499 /* The set of load positions that exist in this block above the
500 same position of a store. */
501 bitmap kill;
503 /* The set of stores that reach the top of the block without being
504 killed by a read.
506 Do not represent the in if it is all ones. Note that this is
507 what the bitvector should logically be initialized to for a set
508 intersection problem. However, like the kill set, this is too
509 expensive. So initially, the in set will only be created for the
510 exit block and any block that contains a wild read. */
511 bitmap in;
513 /* The set of stores that reach the bottom of the block from it's
514 successors.
516 Do not represent the in if it is all ones. Note that this is
517 what the bitvector should logically be initialized to for a set
518 intersection problem. However, like the kill and in set, this is
519 too expensive. So what is done is that the confluence operator
520 just initializes the vector from one of the out sets of the
521 successors of the block. */
522 bitmap out;
524 /* The following bitvector is indexed by the reg number. It
525 contains the set of regs that are live at the current instruction
526 being processed. While it contains info for all of the
527 registers, only the hard registers are actually examined. It is used
528 to assure that shift and/or add sequences that are inserted do not
529 accidentally clobber live hard regs. */
530 bitmap regs_live;
532 /* Pool allocation new operator. */
533 inline void *operator new (size_t)
535 return pool.allocate ();
538 /* Delete operator utilizing pool allocation. */
539 inline void operator delete (void *ptr)
541 pool.remove ((dse_bb_info_type *) ptr);
544 /* Memory allocation pool. */
545 static pool_allocator<dse_bb_info_type> pool;
548 typedef struct dse_bb_info_type *bb_info_t;
549 pool_allocator<dse_bb_info_type> dse_bb_info_type::pool ("bb_info_pool", 100);
551 /* Table to hold all bb_infos. */
552 static bb_info_t *bb_table;
554 /* There is a group_info for each rtx base that is used to reference
555 memory. There are also not many of the rtx bases because they are
556 very limited in scope. */
558 struct group_info
560 /* The actual base of the address. */
561 rtx rtx_base;
563 /* The sequential id of the base. This allows us to have a
564 canonical ordering of these that is not based on addresses. */
565 int id;
567 /* True if there are any positions that are to be processed
568 globally. */
569 bool process_globally;
571 /* True if the base of this group is either the frame_pointer or
572 hard_frame_pointer. */
573 bool frame_related;
575 /* A mem wrapped around the base pointer for the group in order to do
576 read dependency. It must be given BLKmode in order to encompass all
577 the possible offsets from the base. */
578 rtx base_mem;
580 /* Canonized version of base_mem's address. */
581 rtx canon_base_addr;
583 /* These two sets of two bitmaps are used to keep track of how many
584 stores are actually referencing that position from this base. We
585 only do this for rtx bases as this will be used to assign
586 positions in the bitmaps for the global problem. Bit N is set in
587 store1 on the first store for offset N. Bit N is set in store2
588 for the second store to offset N. This is all we need since we
589 only care about offsets that have two or more stores for them.
591 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
592 for 0 and greater offsets.
594 There is one special case here, for stores into the stack frame,
595 we will or store1 into store2 before deciding which stores look
596 at globally. This is because stores to the stack frame that have
597 no other reads before the end of the function can also be
598 deleted. */
599 bitmap store1_n, store1_p, store2_n, store2_p;
601 /* These bitmaps keep track of offsets in this group escape this function.
602 An offset escapes if it corresponds to a named variable whose
603 addressable flag is set. */
604 bitmap escaped_n, escaped_p;
606 /* The positions in this bitmap have the same assignments as the in,
607 out, gen and kill bitmaps. This bitmap is all zeros except for
608 the positions that are occupied by stores for this group. */
609 bitmap group_kill;
611 /* The offset_map is used to map the offsets from this base into
612 positions in the global bitmaps. It is only created after all of
613 the all of stores have been scanned and we know which ones we
614 care about. */
615 int *offset_map_n, *offset_map_p;
616 int offset_map_size_n, offset_map_size_p;
618 /* Pool allocation new operator. */
619 inline void *operator new (size_t)
621 return pool.allocate ();
624 /* Delete operator utilizing pool allocation. */
625 inline void operator delete (void *ptr)
627 pool.remove ((group_info *) ptr);
630 /* Memory allocation pool. */
631 static pool_allocator<group_info> pool;
633 typedef struct group_info *group_info_t;
634 typedef const struct group_info *const_group_info_t;
636 pool_allocator<group_info> group_info::pool ("rtx_group_info_pool", 100);
638 /* Index into the rtx_group_vec. */
639 static int rtx_group_next_id;
642 static vec<group_info_t> rtx_group_vec;
645 /* This structure holds the set of changes that are being deferred
646 when removing read operation. See replace_read. */
647 struct deferred_change
650 /* The mem that is being replaced. */
651 rtx *loc;
653 /* The reg it is being replaced with. */
654 rtx reg;
656 struct deferred_change *next;
658 /* Pool allocation new operator. */
659 inline void *operator new (size_t)
661 return pool.allocate ();
664 /* Delete operator utilizing pool allocation. */
665 inline void operator delete (void *ptr)
667 pool.remove ((deferred_change *) ptr);
670 /* Memory allocation pool. */
671 static pool_allocator<deferred_change> pool;
674 typedef struct deferred_change *deferred_change_t;
676 pool_allocator<deferred_change> deferred_change::pool
677 ("deferred_change_pool", 10);
679 static deferred_change_t deferred_change_list = NULL;
681 /* The group that holds all of the clear_alias_sets. */
682 static group_info_t clear_alias_group;
684 /* The modes of the clear_alias_sets. */
685 static htab_t clear_alias_mode_table;
687 /* Hash table element to look up the mode for an alias set. */
688 struct clear_alias_mode_holder
690 alias_set_type alias_set;
691 machine_mode mode;
694 /* This is true except if cfun->stdarg -- i.e. we cannot do
695 this for vararg functions because they play games with the frame. */
696 static bool stores_off_frame_dead_at_return;
698 /* Counter for stats. */
699 static int globally_deleted;
700 static int locally_deleted;
701 static int spill_deleted;
703 static bitmap all_blocks;
705 /* Locations that are killed by calls in the global phase. */
706 static bitmap kill_on_calls;
708 /* The number of bits used in the global bitmaps. */
709 static unsigned int current_position;
711 /*----------------------------------------------------------------------------
712 Zeroth step.
714 Initialization.
715 ----------------------------------------------------------------------------*/
718 /* Find the entry associated with ALIAS_SET. */
720 static struct clear_alias_mode_holder *
721 clear_alias_set_lookup (alias_set_type alias_set)
723 struct clear_alias_mode_holder tmp_holder;
724 void **slot;
726 tmp_holder.alias_set = alias_set;
727 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
728 gcc_assert (*slot);
730 return (struct clear_alias_mode_holder *) *slot;
734 /* Hashtable callbacks for maintaining the "bases" field of
735 store_group_info, given that the addresses are function invariants. */
737 struct invariant_group_base_hasher : typed_noop_remove <group_info>
739 typedef group_info *value_type;
740 typedef group_info *compare_type;
741 static inline hashval_t hash (const group_info *);
742 static inline bool equal (const group_info *, const group_info *);
745 inline bool
746 invariant_group_base_hasher::equal (const group_info *gi1,
747 const group_info *gi2)
749 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
752 inline hashval_t
753 invariant_group_base_hasher::hash (const group_info *gi)
755 int do_not_record;
756 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
759 /* Tables of group_info structures, hashed by base value. */
760 static hash_table<invariant_group_base_hasher> *rtx_group_table;
763 /* Get the GROUP for BASE. Add a new group if it is not there. */
765 static group_info_t
766 get_group_info (rtx base)
768 struct group_info tmp_gi;
769 group_info_t gi;
770 group_info **slot;
772 if (base)
774 /* Find the store_base_info structure for BASE, creating a new one
775 if necessary. */
776 tmp_gi.rtx_base = base;
777 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
778 gi = (group_info_t) *slot;
780 else
782 if (!clear_alias_group)
784 clear_alias_group = gi = new group_info;
785 memset (gi, 0, sizeof (struct group_info));
786 gi->id = rtx_group_next_id++;
787 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
788 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
789 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
790 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
791 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
792 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
793 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
794 gi->process_globally = false;
795 gi->offset_map_size_n = 0;
796 gi->offset_map_size_p = 0;
797 gi->offset_map_n = NULL;
798 gi->offset_map_p = NULL;
799 rtx_group_vec.safe_push (gi);
801 return clear_alias_group;
804 if (gi == NULL)
806 *slot = gi = new group_info;
807 gi->rtx_base = base;
808 gi->id = rtx_group_next_id++;
809 gi->base_mem = gen_rtx_MEM (BLKmode, base);
810 gi->canon_base_addr = canon_rtx (base);
811 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
812 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
813 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
814 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
815 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
816 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
817 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
818 gi->process_globally = false;
819 gi->frame_related =
820 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
821 gi->offset_map_size_n = 0;
822 gi->offset_map_size_p = 0;
823 gi->offset_map_n = NULL;
824 gi->offset_map_p = NULL;
825 rtx_group_vec.safe_push (gi);
828 return gi;
832 /* Initialization of data structures. */
834 static void
835 dse_step0 (void)
837 locally_deleted = 0;
838 globally_deleted = 0;
839 spill_deleted = 0;
841 bitmap_obstack_initialize (&dse_bitmap_obstack);
842 gcc_obstack_init (&dse_obstack);
844 scratch = BITMAP_ALLOC (&reg_obstack);
845 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
848 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
850 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
851 rtx_group_next_id = 0;
853 stores_off_frame_dead_at_return = !cfun->stdarg;
855 init_alias_analysis ();
857 clear_alias_group = NULL;
862 /*----------------------------------------------------------------------------
863 First step.
865 Scan all of the insns. Any random ordering of the blocks is fine.
866 Each block is scanned in forward order to accommodate cselib which
867 is used to remove stores with non-constant bases.
868 ----------------------------------------------------------------------------*/
870 /* Delete all of the store_info recs from INSN_INFO. */
872 static void
873 free_store_info (insn_info_t insn_info)
875 store_info_t store_info = insn_info->store_rec;
876 while (store_info)
878 store_info_t next = store_info->next;
879 if (store_info->is_large)
880 BITMAP_FREE (store_info->positions_needed.large.bmap);
881 if (store_info->cse_base)
882 cse_store_info_pool.remove (store_info);
883 else
884 rtx_store_info_pool.remove (store_info);
885 store_info = next;
888 insn_info->cannot_delete = true;
889 insn_info->contains_cselib_groups = false;
890 insn_info->store_rec = NULL;
893 typedef struct
895 rtx_insn *first, *current;
896 regset fixed_regs_live;
897 bool failure;
898 } note_add_store_info;
900 /* Callback for emit_inc_dec_insn_before via note_stores.
901 Check if a register is clobbered which is live afterwards. */
903 static void
904 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
906 rtx_insn *insn;
907 note_add_store_info *info = (note_add_store_info *) data;
909 if (!REG_P (loc))
910 return;
912 /* If this register is referenced by the current or an earlier insn,
913 that's OK. E.g. this applies to the register that is being incremented
914 with this addition. */
915 for (insn = info->first;
916 insn != NEXT_INSN (info->current);
917 insn = NEXT_INSN (insn))
918 if (reg_referenced_p (loc, PATTERN (insn)))
919 return;
921 /* If we come here, we have a clobber of a register that's only OK
922 if that register is not live. If we don't have liveness information
923 available, fail now. */
924 if (!info->fixed_regs_live)
926 info->failure = true;
927 return;
929 /* Now check if this is a live fixed register. */
930 unsigned int end_regno = END_REGNO (loc);
931 for (unsigned int regno = REGNO (loc); regno < end_regno; ++regno)
932 if (REGNO_REG_SET_P (info->fixed_regs_live, regno))
933 info->failure = true;
936 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
937 SRC + SRCOFF before insn ARG. */
939 static int
940 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
941 rtx op ATTRIBUTE_UNUSED,
942 rtx dest, rtx src, rtx srcoff, void *arg)
944 insn_info_t insn_info = (insn_info_t) arg;
945 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
946 note_add_store_info info;
948 /* We can reuse all operands without copying, because we are about
949 to delete the insn that contained it. */
950 if (srcoff)
952 start_sequence ();
953 emit_insn (gen_add3_insn (dest, src, srcoff));
954 new_insn = get_insns ();
955 end_sequence ();
957 else
958 new_insn = gen_move_insn (dest, src);
959 info.first = new_insn;
960 info.fixed_regs_live = insn_info->fixed_regs_live;
961 info.failure = false;
962 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
964 info.current = cur;
965 note_stores (PATTERN (cur), note_add_store, &info);
968 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
969 return it immediately, communicating the failure to its caller. */
970 if (info.failure)
971 return 1;
973 emit_insn_before (new_insn, insn);
975 return 0;
978 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
979 is there, is split into a separate insn.
980 Return true on success (or if there was nothing to do), false on failure. */
982 static bool
983 check_for_inc_dec_1 (insn_info_t insn_info)
985 rtx_insn *insn = insn_info->insn;
986 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
987 if (note)
988 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
989 insn_info) == 0;
990 return true;
994 /* Entry point for postreload. If you work on reload_cse, or you need this
995 anywhere else, consider if you can provide register liveness information
996 and add a parameter to this function so that it can be passed down in
997 insn_info.fixed_regs_live. */
998 bool
999 check_for_inc_dec (rtx_insn *insn)
1001 insn_info_type insn_info;
1002 rtx note;
1004 insn_info.insn = insn;
1005 insn_info.fixed_regs_live = NULL;
1006 note = find_reg_note (insn, REG_INC, NULL_RTX);
1007 if (note)
1008 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
1009 &insn_info) == 0;
1010 return true;
1013 /* Delete the insn and free all of the fields inside INSN_INFO. */
1015 static void
1016 delete_dead_store_insn (insn_info_t insn_info)
1018 read_info_t read_info;
1020 if (!dbg_cnt (dse))
1021 return;
1023 if (!check_for_inc_dec_1 (insn_info))
1024 return;
1025 if (dump_file && (dump_flags & TDF_DETAILS))
1027 fprintf (dump_file, "Locally deleting insn %d ",
1028 INSN_UID (insn_info->insn));
1029 if (insn_info->store_rec->alias_set)
1030 fprintf (dump_file, "alias set %d\n",
1031 (int) insn_info->store_rec->alias_set);
1032 else
1033 fprintf (dump_file, "\n");
1036 free_store_info (insn_info);
1037 read_info = insn_info->read_rec;
1039 while (read_info)
1041 read_info_t next = read_info->next;
1042 delete read_info;
1043 read_info = next;
1045 insn_info->read_rec = NULL;
1047 delete_insn (insn_info->insn);
1048 locally_deleted++;
1049 insn_info->insn = NULL;
1051 insn_info->wild_read = false;
1054 /* Return whether DECL, a local variable, can possibly escape the current
1055 function scope. */
1057 static bool
1058 local_variable_can_escape (tree decl)
1060 if (TREE_ADDRESSABLE (decl))
1061 return true;
1063 /* If this is a partitioned variable, we need to consider all the variables
1064 in the partition. This is necessary because a store into one of them can
1065 be replaced with a store into another and this may not change the outcome
1066 of the escape analysis. */
1067 if (cfun->gimple_df->decls_to_pointers != NULL)
1069 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
1070 if (namep)
1071 return TREE_ADDRESSABLE (*namep);
1074 return false;
1077 /* Return whether EXPR can possibly escape the current function scope. */
1079 static bool
1080 can_escape (tree expr)
1082 tree base;
1083 if (!expr)
1084 return true;
1085 base = get_base_address (expr);
1086 if (DECL_P (base)
1087 && !may_be_aliased (base)
1088 && !(TREE_CODE (base) == VAR_DECL
1089 && !DECL_EXTERNAL (base)
1090 && !TREE_STATIC (base)
1091 && local_variable_can_escape (base)))
1092 return false;
1093 return true;
1096 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1097 OFFSET and WIDTH. */
1099 static void
1100 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1101 tree expr)
1103 HOST_WIDE_INT i;
1104 bool expr_escapes = can_escape (expr);
1105 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1106 for (i=offset; i<offset+width; i++)
1108 bitmap store1;
1109 bitmap store2;
1110 bitmap escaped;
1111 int ai;
1112 if (i < 0)
1114 store1 = group->store1_n;
1115 store2 = group->store2_n;
1116 escaped = group->escaped_n;
1117 ai = -i;
1119 else
1121 store1 = group->store1_p;
1122 store2 = group->store2_p;
1123 escaped = group->escaped_p;
1124 ai = i;
1127 if (!bitmap_set_bit (store1, ai))
1128 bitmap_set_bit (store2, ai);
1129 else
1131 if (i < 0)
1133 if (group->offset_map_size_n < ai)
1134 group->offset_map_size_n = ai;
1136 else
1138 if (group->offset_map_size_p < ai)
1139 group->offset_map_size_p = ai;
1142 if (expr_escapes)
1143 bitmap_set_bit (escaped, ai);
1147 static void
1148 reset_active_stores (void)
1150 active_local_stores = NULL;
1151 active_local_stores_len = 0;
1154 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1156 static void
1157 free_read_records (bb_info_t bb_info)
1159 insn_info_t insn_info = bb_info->last_insn;
1160 read_info_t *ptr = &insn_info->read_rec;
1161 while (*ptr)
1163 read_info_t next = (*ptr)->next;
1164 if ((*ptr)->alias_set == 0)
1166 delete *ptr;
1167 *ptr = next;
1169 else
1170 ptr = &(*ptr)->next;
1174 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1176 static void
1177 add_wild_read (bb_info_t bb_info)
1179 insn_info_t insn_info = bb_info->last_insn;
1180 insn_info->wild_read = true;
1181 free_read_records (bb_info);
1182 reset_active_stores ();
1185 /* Set the BB_INFO so that the last insn is marked as a wild read of
1186 non-frame locations. */
1188 static void
1189 add_non_frame_wild_read (bb_info_t bb_info)
1191 insn_info_t insn_info = bb_info->last_insn;
1192 insn_info->non_frame_wild_read = true;
1193 free_read_records (bb_info);
1194 reset_active_stores ();
1197 /* Return true if X is a constant or one of the registers that behave
1198 as a constant over the life of a function. This is equivalent to
1199 !rtx_varies_p for memory addresses. */
1201 static bool
1202 const_or_frame_p (rtx x)
1204 if (CONSTANT_P (x))
1205 return true;
1207 if (GET_CODE (x) == REG)
1209 /* Note that we have to test for the actual rtx used for the frame
1210 and arg pointers and not just the register number in case we have
1211 eliminated the frame and/or arg pointer and are using it
1212 for pseudos. */
1213 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1214 /* The arg pointer varies if it is not a fixed register. */
1215 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1216 || x == pic_offset_table_rtx)
1217 return true;
1218 return false;
1221 return false;
1224 /* Take all reasonable action to put the address of MEM into the form
1225 that we can do analysis on.
1227 The gold standard is to get the address into the form: address +
1228 OFFSET where address is something that rtx_varies_p considers a
1229 constant. When we can get the address in this form, we can do
1230 global analysis on it. Note that for constant bases, address is
1231 not actually returned, only the group_id. The address can be
1232 obtained from that.
1234 If that fails, we try cselib to get a value we can at least use
1235 locally. If that fails we return false.
1237 The GROUP_ID is set to -1 for cselib bases and the index of the
1238 group for non_varying bases.
1240 FOR_READ is true if this is a mem read and false if not. */
1242 static bool
1243 canon_address (rtx mem,
1244 alias_set_type *alias_set_out,
1245 int *group_id,
1246 HOST_WIDE_INT *offset,
1247 cselib_val **base)
1249 machine_mode address_mode = get_address_mode (mem);
1250 rtx mem_address = XEXP (mem, 0);
1251 rtx expanded_address, address;
1252 int expanded;
1254 *alias_set_out = 0;
1256 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1258 if (dump_file && (dump_flags & TDF_DETAILS))
1260 fprintf (dump_file, " mem: ");
1261 print_inline_rtx (dump_file, mem_address, 0);
1262 fprintf (dump_file, "\n");
1265 /* First see if just canon_rtx (mem_address) is const or frame,
1266 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1267 address = NULL_RTX;
1268 for (expanded = 0; expanded < 2; expanded++)
1270 if (expanded)
1272 /* Use cselib to replace all of the reg references with the full
1273 expression. This will take care of the case where we have
1275 r_x = base + offset;
1276 val = *r_x;
1278 by making it into
1280 val = *(base + offset); */
1282 expanded_address = cselib_expand_value_rtx (mem_address,
1283 scratch, 5);
1285 /* If this fails, just go with the address from first
1286 iteration. */
1287 if (!expanded_address)
1288 break;
1290 else
1291 expanded_address = mem_address;
1293 /* Split the address into canonical BASE + OFFSET terms. */
1294 address = canon_rtx (expanded_address);
1296 *offset = 0;
1298 if (dump_file && (dump_flags & TDF_DETAILS))
1300 if (expanded)
1302 fprintf (dump_file, "\n after cselib_expand address: ");
1303 print_inline_rtx (dump_file, expanded_address, 0);
1304 fprintf (dump_file, "\n");
1307 fprintf (dump_file, "\n after canon_rtx address: ");
1308 print_inline_rtx (dump_file, address, 0);
1309 fprintf (dump_file, "\n");
1312 if (GET_CODE (address) == CONST)
1313 address = XEXP (address, 0);
1315 if (GET_CODE (address) == PLUS
1316 && CONST_INT_P (XEXP (address, 1)))
1318 *offset = INTVAL (XEXP (address, 1));
1319 address = XEXP (address, 0);
1322 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1323 && const_or_frame_p (address))
1325 group_info_t group = get_group_info (address);
1327 if (dump_file && (dump_flags & TDF_DETAILS))
1328 fprintf (dump_file, " gid=%d offset=%d \n",
1329 group->id, (int)*offset);
1330 *base = NULL;
1331 *group_id = group->id;
1332 return true;
1336 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1337 *group_id = -1;
1339 if (*base == NULL)
1341 if (dump_file && (dump_flags & TDF_DETAILS))
1342 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1343 return false;
1345 if (dump_file && (dump_flags & TDF_DETAILS))
1346 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1347 (*base)->uid, (*base)->hash, (int)*offset);
1348 return true;
1352 /* Clear the rhs field from the active_local_stores array. */
1354 static void
1355 clear_rhs_from_active_local_stores (void)
1357 insn_info_t ptr = active_local_stores;
1359 while (ptr)
1361 store_info_t store_info = ptr->store_rec;
1362 /* Skip the clobbers. */
1363 while (!store_info->is_set)
1364 store_info = store_info->next;
1366 store_info->rhs = NULL;
1367 store_info->const_rhs = NULL;
1369 ptr = ptr->next_local_store;
1374 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1376 static inline void
1377 set_position_unneeded (store_info_t s_info, int pos)
1379 if (__builtin_expect (s_info->is_large, false))
1381 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1382 s_info->positions_needed.large.count++;
1384 else
1385 s_info->positions_needed.small_bitmask
1386 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1389 /* Mark the whole store S_INFO as unneeded. */
1391 static inline void
1392 set_all_positions_unneeded (store_info_t s_info)
1394 if (__builtin_expect (s_info->is_large, false))
1396 int pos, end = s_info->end - s_info->begin;
1397 for (pos = 0; pos < end; pos++)
1398 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1399 s_info->positions_needed.large.count = end;
1401 else
1402 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1405 /* Return TRUE if any bytes from S_INFO store are needed. */
1407 static inline bool
1408 any_positions_needed_p (store_info_t s_info)
1410 if (__builtin_expect (s_info->is_large, false))
1411 return (s_info->positions_needed.large.count
1412 < s_info->end - s_info->begin);
1413 else
1414 return (s_info->positions_needed.small_bitmask
1415 != (unsigned HOST_WIDE_INT) 0);
1418 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1419 store are needed. */
1421 static inline bool
1422 all_positions_needed_p (store_info_t s_info, int start, int width)
1424 if (__builtin_expect (s_info->is_large, false))
1426 int end = start + width;
1427 while (start < end)
1428 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1429 return false;
1430 return true;
1432 else
1434 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1435 return (s_info->positions_needed.small_bitmask & mask) == mask;
1440 static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
1441 HOST_WIDE_INT, basic_block, bool);
1444 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1445 there is a candidate store, after adding it to the appropriate
1446 local store group if so. */
1448 static int
1449 record_store (rtx body, bb_info_t bb_info)
1451 rtx mem, rhs, const_rhs, mem_addr;
1452 HOST_WIDE_INT offset = 0;
1453 HOST_WIDE_INT width = 0;
1454 alias_set_type spill_alias_set;
1455 insn_info_t insn_info = bb_info->last_insn;
1456 store_info_t store_info = NULL;
1457 int group_id;
1458 cselib_val *base = NULL;
1459 insn_info_t ptr, last, redundant_reason;
1460 bool store_is_unused;
1462 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1463 return 0;
1465 mem = SET_DEST (body);
1467 /* If this is not used, then this cannot be used to keep the insn
1468 from being deleted. On the other hand, it does provide something
1469 that can be used to prove that another store is dead. */
1470 store_is_unused
1471 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1473 /* Check whether that value is a suitable memory location. */
1474 if (!MEM_P (mem))
1476 /* If the set or clobber is unused, then it does not effect our
1477 ability to get rid of the entire insn. */
1478 if (!store_is_unused)
1479 insn_info->cannot_delete = true;
1480 return 0;
1483 /* At this point we know mem is a mem. */
1484 if (GET_MODE (mem) == BLKmode)
1486 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1488 if (dump_file && (dump_flags & TDF_DETAILS))
1489 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1490 add_wild_read (bb_info);
1491 insn_info->cannot_delete = true;
1492 return 0;
1494 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1495 as memset (addr, 0, 36); */
1496 else if (!MEM_SIZE_KNOWN_P (mem)
1497 || MEM_SIZE (mem) <= 0
1498 || MEM_SIZE (mem) > MAX_OFFSET
1499 || GET_CODE (body) != SET
1500 || !CONST_INT_P (SET_SRC (body)))
1502 if (!store_is_unused)
1504 /* If the set or clobber is unused, then it does not effect our
1505 ability to get rid of the entire insn. */
1506 insn_info->cannot_delete = true;
1507 clear_rhs_from_active_local_stores ();
1509 return 0;
1513 /* We can still process a volatile mem, we just cannot delete it. */
1514 if (MEM_VOLATILE_P (mem))
1515 insn_info->cannot_delete = true;
1517 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1519 clear_rhs_from_active_local_stores ();
1520 return 0;
1523 if (GET_MODE (mem) == BLKmode)
1524 width = MEM_SIZE (mem);
1525 else
1526 width = GET_MODE_SIZE (GET_MODE (mem));
1528 if (spill_alias_set)
1530 bitmap store1 = clear_alias_group->store1_p;
1531 bitmap store2 = clear_alias_group->store2_p;
1533 gcc_assert (GET_MODE (mem) != BLKmode);
1535 if (!bitmap_set_bit (store1, spill_alias_set))
1536 bitmap_set_bit (store2, spill_alias_set);
1538 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1539 clear_alias_group->offset_map_size_p = spill_alias_set;
1541 store_info = rtx_store_info_pool.allocate ();
1543 if (dump_file && (dump_flags & TDF_DETAILS))
1544 fprintf (dump_file, " processing spill store %d(%s)\n",
1545 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1547 else if (group_id >= 0)
1549 /* In the restrictive case where the base is a constant or the
1550 frame pointer we can do global analysis. */
1552 group_info_t group
1553 = rtx_group_vec[group_id];
1554 tree expr = MEM_EXPR (mem);
1556 store_info = rtx_store_info_pool.allocate ();
1557 set_usage_bits (group, offset, width, expr);
1559 if (dump_file && (dump_flags & TDF_DETAILS))
1560 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1561 group_id, (int)offset, (int)(offset+width));
1563 else
1565 if (may_be_sp_based_p (XEXP (mem, 0)))
1566 insn_info->stack_pointer_based = true;
1567 insn_info->contains_cselib_groups = true;
1569 store_info = cse_store_info_pool.allocate ();
1570 group_id = -1;
1572 if (dump_file && (dump_flags & TDF_DETAILS))
1573 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1574 (int)offset, (int)(offset+width));
1577 const_rhs = rhs = NULL_RTX;
1578 if (GET_CODE (body) == SET
1579 /* No place to keep the value after ra. */
1580 && !reload_completed
1581 && (REG_P (SET_SRC (body))
1582 || GET_CODE (SET_SRC (body)) == SUBREG
1583 || CONSTANT_P (SET_SRC (body)))
1584 && !MEM_VOLATILE_P (mem)
1585 /* Sometimes the store and reload is used for truncation and
1586 rounding. */
1587 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1589 rhs = SET_SRC (body);
1590 if (CONSTANT_P (rhs))
1591 const_rhs = rhs;
1592 else if (body == PATTERN (insn_info->insn))
1594 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1595 if (tem && CONSTANT_P (XEXP (tem, 0)))
1596 const_rhs = XEXP (tem, 0);
1598 if (const_rhs == NULL_RTX && REG_P (rhs))
1600 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1602 if (tem && CONSTANT_P (tem))
1603 const_rhs = tem;
1607 /* Check to see if this stores causes some other stores to be
1608 dead. */
1609 ptr = active_local_stores;
1610 last = NULL;
1611 redundant_reason = NULL;
1612 mem = canon_rtx (mem);
1613 /* For alias_set != 0 canon_true_dependence should be never called. */
1614 if (spill_alias_set)
1615 mem_addr = NULL_RTX;
1616 else
1618 if (group_id < 0)
1619 mem_addr = base->val_rtx;
1620 else
1622 group_info_t group
1623 = rtx_group_vec[group_id];
1624 mem_addr = group->canon_base_addr;
1626 /* get_addr can only handle VALUE but cannot handle expr like:
1627 VALUE + OFFSET, so call get_addr to get original addr for
1628 mem_addr before plus_constant. */
1629 mem_addr = get_addr (mem_addr);
1630 if (offset)
1631 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1634 while (ptr)
1636 insn_info_t next = ptr->next_local_store;
1637 store_info_t s_info = ptr->store_rec;
1638 bool del = true;
1640 /* Skip the clobbers. We delete the active insn if this insn
1641 shadows the set. To have been put on the active list, it
1642 has exactly on set. */
1643 while (!s_info->is_set)
1644 s_info = s_info->next;
1646 if (s_info->alias_set != spill_alias_set)
1647 del = false;
1648 else if (s_info->alias_set)
1650 struct clear_alias_mode_holder *entry
1651 = clear_alias_set_lookup (s_info->alias_set);
1652 /* Generally, spills cannot be processed if and of the
1653 references to the slot have a different mode. But if
1654 we are in the same block and mode is exactly the same
1655 between this store and one before in the same block,
1656 we can still delete it. */
1657 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1658 && (GET_MODE (mem) == entry->mode))
1660 del = true;
1661 set_all_positions_unneeded (s_info);
1663 if (dump_file && (dump_flags & TDF_DETAILS))
1664 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1665 INSN_UID (ptr->insn), (int) s_info->alias_set);
1667 else if ((s_info->group_id == group_id)
1668 && (s_info->cse_base == base))
1670 HOST_WIDE_INT i;
1671 if (dump_file && (dump_flags & TDF_DETAILS))
1672 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1673 INSN_UID (ptr->insn), s_info->group_id,
1674 (int)s_info->begin, (int)s_info->end);
1676 /* Even if PTR won't be eliminated as unneeded, if both
1677 PTR and this insn store the same constant value, we might
1678 eliminate this insn instead. */
1679 if (s_info->const_rhs
1680 && const_rhs
1681 && offset >= s_info->begin
1682 && offset + width <= s_info->end
1683 && all_positions_needed_p (s_info, offset - s_info->begin,
1684 width))
1686 if (GET_MODE (mem) == BLKmode)
1688 if (GET_MODE (s_info->mem) == BLKmode
1689 && s_info->const_rhs == const_rhs)
1690 redundant_reason = ptr;
1692 else if (s_info->const_rhs == const0_rtx
1693 && const_rhs == const0_rtx)
1694 redundant_reason = ptr;
1695 else
1697 rtx val;
1698 start_sequence ();
1699 val = get_stored_val (s_info, GET_MODE (mem),
1700 offset, offset + width,
1701 BLOCK_FOR_INSN (insn_info->insn),
1702 true);
1703 if (get_insns () != NULL)
1704 val = NULL_RTX;
1705 end_sequence ();
1706 if (val && rtx_equal_p (val, const_rhs))
1707 redundant_reason = ptr;
1711 for (i = MAX (offset, s_info->begin);
1712 i < offset + width && i < s_info->end;
1713 i++)
1714 set_position_unneeded (s_info, i - s_info->begin);
1716 else if (s_info->rhs)
1717 /* Need to see if it is possible for this store to overwrite
1718 the value of store_info. If it is, set the rhs to NULL to
1719 keep it from being used to remove a load. */
1721 if (canon_true_dependence (s_info->mem,
1722 GET_MODE (s_info->mem),
1723 s_info->mem_addr,
1724 mem, mem_addr))
1726 s_info->rhs = NULL;
1727 s_info->const_rhs = NULL;
1731 /* An insn can be deleted if every position of every one of
1732 its s_infos is zero. */
1733 if (any_positions_needed_p (s_info))
1734 del = false;
1736 if (del)
1738 insn_info_t insn_to_delete = ptr;
1740 active_local_stores_len--;
1741 if (last)
1742 last->next_local_store = ptr->next_local_store;
1743 else
1744 active_local_stores = ptr->next_local_store;
1746 if (!insn_to_delete->cannot_delete)
1747 delete_dead_store_insn (insn_to_delete);
1749 else
1750 last = ptr;
1752 ptr = next;
1755 /* Finish filling in the store_info. */
1756 store_info->next = insn_info->store_rec;
1757 insn_info->store_rec = store_info;
1758 store_info->mem = mem;
1759 store_info->alias_set = spill_alias_set;
1760 store_info->mem_addr = mem_addr;
1761 store_info->cse_base = base;
1762 if (width > HOST_BITS_PER_WIDE_INT)
1764 store_info->is_large = true;
1765 store_info->positions_needed.large.count = 0;
1766 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1768 else
1770 store_info->is_large = false;
1771 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1773 store_info->group_id = group_id;
1774 store_info->begin = offset;
1775 store_info->end = offset + width;
1776 store_info->is_set = GET_CODE (body) == SET;
1777 store_info->rhs = rhs;
1778 store_info->const_rhs = const_rhs;
1779 store_info->redundant_reason = redundant_reason;
1781 /* If this is a clobber, we return 0. We will only be able to
1782 delete this insn if there is only one store USED store, but we
1783 can use the clobber to delete other stores earlier. */
1784 return store_info->is_set ? 1 : 0;
1788 static void
1789 dump_insn_info (const char * start, insn_info_t insn_info)
1791 fprintf (dump_file, "%s insn=%d %s\n", start,
1792 INSN_UID (insn_info->insn),
1793 insn_info->store_rec ? "has store" : "naked");
1797 /* If the modes are different and the value's source and target do not
1798 line up, we need to extract the value from lower part of the rhs of
1799 the store, shift it, and then put it into a form that can be shoved
1800 into the read_insn. This function generates a right SHIFT of a
1801 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1802 shift sequence is returned or NULL if we failed to find a
1803 shift. */
1805 static rtx
1806 find_shift_sequence (int access_size,
1807 store_info_t store_info,
1808 machine_mode read_mode,
1809 int shift, bool speed, bool require_cst)
1811 machine_mode store_mode = GET_MODE (store_info->mem);
1812 machine_mode new_mode;
1813 rtx read_reg = NULL;
1815 /* Some machines like the x86 have shift insns for each size of
1816 operand. Other machines like the ppc or the ia-64 may only have
1817 shift insns that shift values within 32 or 64 bit registers.
1818 This loop tries to find the smallest shift insn that will right
1819 justify the value we want to read but is available in one insn on
1820 the machine. */
1822 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1823 MODE_INT);
1824 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1825 new_mode = GET_MODE_WIDER_MODE (new_mode))
1827 rtx target, new_reg, new_lhs;
1828 rtx_insn *shift_seq, *insn;
1829 int cost;
1831 /* If a constant was stored into memory, try to simplify it here,
1832 otherwise the cost of the shift might preclude this optimization
1833 e.g. at -Os, even when no actual shift will be needed. */
1834 if (store_info->const_rhs)
1836 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1837 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1838 store_mode, byte);
1839 if (ret && CONSTANT_P (ret))
1841 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1842 ret, GEN_INT (shift));
1843 if (ret && CONSTANT_P (ret))
1845 byte = subreg_lowpart_offset (read_mode, new_mode);
1846 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1847 if (ret && CONSTANT_P (ret)
1848 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1849 return ret;
1854 if (require_cst)
1855 return NULL_RTX;
1857 /* Try a wider mode if truncating the store mode to NEW_MODE
1858 requires a real instruction. */
1859 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1860 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1861 continue;
1863 /* Also try a wider mode if the necessary punning is either not
1864 desirable or not possible. */
1865 if (!CONSTANT_P (store_info->rhs)
1866 && !MODES_TIEABLE_P (new_mode, store_mode))
1867 continue;
1869 new_reg = gen_reg_rtx (new_mode);
1871 start_sequence ();
1873 /* In theory we could also check for an ashr. Ian Taylor knows
1874 of one dsp where the cost of these two was not the same. But
1875 this really is a rare case anyway. */
1876 target = expand_binop (new_mode, lshr_optab, new_reg,
1877 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1879 shift_seq = get_insns ();
1880 end_sequence ();
1882 if (target != new_reg || shift_seq == NULL)
1883 continue;
1885 cost = 0;
1886 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1887 if (INSN_P (insn))
1888 cost += insn_rtx_cost (PATTERN (insn), speed);
1890 /* The computation up to here is essentially independent
1891 of the arguments and could be precomputed. It may
1892 not be worth doing so. We could precompute if
1893 worthwhile or at least cache the results. The result
1894 technically depends on both SHIFT and ACCESS_SIZE,
1895 but in practice the answer will depend only on ACCESS_SIZE. */
1897 if (cost > COSTS_N_INSNS (1))
1898 continue;
1900 new_lhs = extract_low_bits (new_mode, store_mode,
1901 copy_rtx (store_info->rhs));
1902 if (new_lhs == NULL_RTX)
1903 continue;
1905 /* We found an acceptable shift. Generate a move to
1906 take the value from the store and put it into the
1907 shift pseudo, then shift it, then generate another
1908 move to put in into the target of the read. */
1909 emit_move_insn (new_reg, new_lhs);
1910 emit_insn (shift_seq);
1911 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1912 break;
1915 return read_reg;
1919 /* Call back for note_stores to find the hard regs set or clobbered by
1920 insn. Data is a bitmap of the hardregs set so far. */
1922 static void
1923 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1925 bitmap regs_set = (bitmap) data;
1927 if (REG_P (x)
1928 && HARD_REGISTER_P (x))
1929 bitmap_set_range (regs_set, REGNO (x), REG_NREGS (x));
1932 /* Helper function for replace_read and record_store.
1933 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1934 to one before READ_END bytes read in READ_MODE. Return NULL
1935 if not successful. If REQUIRE_CST is true, return always constant. */
1937 static rtx
1938 get_stored_val (store_info_t store_info, machine_mode read_mode,
1939 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1940 basic_block bb, bool require_cst)
1942 machine_mode store_mode = GET_MODE (store_info->mem);
1943 int shift;
1944 int access_size; /* In bytes. */
1945 rtx read_reg;
1947 /* To get here the read is within the boundaries of the write so
1948 shift will never be negative. Start out with the shift being in
1949 bytes. */
1950 if (store_mode == BLKmode)
1951 shift = 0;
1952 else if (BYTES_BIG_ENDIAN)
1953 shift = store_info->end - read_end;
1954 else
1955 shift = read_begin - store_info->begin;
1957 access_size = shift + GET_MODE_SIZE (read_mode);
1959 /* From now on it is bits. */
1960 shift *= BITS_PER_UNIT;
1962 if (shift)
1963 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1964 optimize_bb_for_speed_p (bb),
1965 require_cst);
1966 else if (store_mode == BLKmode)
1968 /* The store is a memset (addr, const_val, const_size). */
1969 gcc_assert (CONST_INT_P (store_info->rhs));
1970 store_mode = int_mode_for_mode (read_mode);
1971 if (store_mode == BLKmode)
1972 read_reg = NULL_RTX;
1973 else if (store_info->rhs == const0_rtx)
1974 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1975 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1976 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1977 read_reg = NULL_RTX;
1978 else
1980 unsigned HOST_WIDE_INT c
1981 = INTVAL (store_info->rhs)
1982 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1983 int shift = BITS_PER_UNIT;
1984 while (shift < HOST_BITS_PER_WIDE_INT)
1986 c |= (c << shift);
1987 shift <<= 1;
1989 read_reg = gen_int_mode (c, store_mode);
1990 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1993 else if (store_info->const_rhs
1994 && (require_cst
1995 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1996 read_reg = extract_low_bits (read_mode, store_mode,
1997 copy_rtx (store_info->const_rhs));
1998 else
1999 read_reg = extract_low_bits (read_mode, store_mode,
2000 copy_rtx (store_info->rhs));
2001 if (require_cst && read_reg && !CONSTANT_P (read_reg))
2002 read_reg = NULL_RTX;
2003 return read_reg;
2006 /* Take a sequence of:
2007 A <- r1
2009 ... <- A
2011 and change it into
2012 r2 <- r1
2013 A <- r1
2015 ... <- r2
2019 r3 <- extract (r1)
2020 r3 <- r3 >> shift
2021 r2 <- extract (r3)
2022 ... <- r2
2026 r2 <- extract (r1)
2027 ... <- r2
2029 Depending on the alignment and the mode of the store and
2030 subsequent load.
2033 The STORE_INFO and STORE_INSN are for the store and READ_INFO
2034 and READ_INSN are for the read. Return true if the replacement
2035 went ok. */
2037 static bool
2038 replace_read (store_info_t store_info, insn_info_t store_insn,
2039 read_info_t read_info, insn_info_t read_insn, rtx *loc,
2040 bitmap regs_live)
2042 machine_mode store_mode = GET_MODE (store_info->mem);
2043 machine_mode read_mode = GET_MODE (read_info->mem);
2044 rtx_insn *insns, *this_insn;
2045 rtx read_reg;
2046 basic_block bb;
2048 if (!dbg_cnt (dse))
2049 return false;
2051 /* Create a sequence of instructions to set up the read register.
2052 This sequence goes immediately before the store and its result
2053 is read by the load.
2055 We need to keep this in perspective. We are replacing a read
2056 with a sequence of insns, but the read will almost certainly be
2057 in cache, so it is not going to be an expensive one. Thus, we
2058 are not willing to do a multi insn shift or worse a subroutine
2059 call to get rid of the read. */
2060 if (dump_file && (dump_flags & TDF_DETAILS))
2061 fprintf (dump_file, "trying to replace %smode load in insn %d"
2062 " from %smode store in insn %d\n",
2063 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2064 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2065 start_sequence ();
2066 bb = BLOCK_FOR_INSN (read_insn->insn);
2067 read_reg = get_stored_val (store_info,
2068 read_mode, read_info->begin, read_info->end,
2069 bb, false);
2070 if (read_reg == NULL_RTX)
2072 end_sequence ();
2073 if (dump_file && (dump_flags & TDF_DETAILS))
2074 fprintf (dump_file, " -- could not extract bits of stored value\n");
2075 return false;
2077 /* Force the value into a new register so that it won't be clobbered
2078 between the store and the load. */
2079 read_reg = copy_to_mode_reg (read_mode, read_reg);
2080 insns = get_insns ();
2081 end_sequence ();
2083 if (insns != NULL_RTX)
2085 /* Now we have to scan the set of new instructions to see if the
2086 sequence contains and sets of hardregs that happened to be
2087 live at this point. For instance, this can happen if one of
2088 the insns sets the CC and the CC happened to be live at that
2089 point. This does occasionally happen, see PR 37922. */
2090 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2092 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2093 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2095 bitmap_and_into (regs_set, regs_live);
2096 if (!bitmap_empty_p (regs_set))
2098 if (dump_file && (dump_flags & TDF_DETAILS))
2100 fprintf (dump_file,
2101 "abandoning replacement because sequence clobbers live hardregs:");
2102 df_print_regset (dump_file, regs_set);
2105 BITMAP_FREE (regs_set);
2106 return false;
2108 BITMAP_FREE (regs_set);
2111 if (validate_change (read_insn->insn, loc, read_reg, 0))
2113 deferred_change_t change = new deferred_change;
2115 /* Insert this right before the store insn where it will be safe
2116 from later insns that might change it before the read. */
2117 emit_insn_before (insns, store_insn->insn);
2119 /* And now for the kludge part: cselib croaks if you just
2120 return at this point. There are two reasons for this:
2122 1) Cselib has an idea of how many pseudos there are and
2123 that does not include the new ones we just added.
2125 2) Cselib does not know about the move insn we added
2126 above the store_info, and there is no way to tell it
2127 about it, because it has "moved on".
2129 Problem (1) is fixable with a certain amount of engineering.
2130 Problem (2) is requires starting the bb from scratch. This
2131 could be expensive.
2133 So we are just going to have to lie. The move/extraction
2134 insns are not really an issue, cselib did not see them. But
2135 the use of the new pseudo read_insn is a real problem because
2136 cselib has not scanned this insn. The way that we solve this
2137 problem is that we are just going to put the mem back for now
2138 and when we are finished with the block, we undo this. We
2139 keep a table of mems to get rid of. At the end of the basic
2140 block we can put them back. */
2142 *loc = read_info->mem;
2143 change->next = deferred_change_list;
2144 deferred_change_list = change;
2145 change->loc = loc;
2146 change->reg = read_reg;
2148 /* Get rid of the read_info, from the point of view of the
2149 rest of dse, play like this read never happened. */
2150 read_insn->read_rec = read_info->next;
2151 delete read_info;
2152 if (dump_file && (dump_flags & TDF_DETAILS))
2154 fprintf (dump_file, " -- replaced the loaded MEM with ");
2155 print_simple_rtl (dump_file, read_reg);
2156 fprintf (dump_file, "\n");
2158 return true;
2160 else
2162 if (dump_file && (dump_flags & TDF_DETAILS))
2164 fprintf (dump_file, " -- replacing the loaded MEM with ");
2165 print_simple_rtl (dump_file, read_reg);
2166 fprintf (dump_file, " led to an invalid instruction\n");
2168 return false;
2172 /* Check the address of MEM *LOC and kill any appropriate stores that may
2173 be active. */
2175 static void
2176 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2178 rtx mem = *loc, mem_addr;
2179 insn_info_t insn_info;
2180 HOST_WIDE_INT offset = 0;
2181 HOST_WIDE_INT width = 0;
2182 alias_set_type spill_alias_set = 0;
2183 cselib_val *base = NULL;
2184 int group_id;
2185 read_info_t read_info;
2187 insn_info = bb_info->last_insn;
2189 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2190 || (MEM_VOLATILE_P (mem)))
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2193 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2194 add_wild_read (bb_info);
2195 insn_info->cannot_delete = true;
2196 return;
2199 /* If it is reading readonly mem, then there can be no conflict with
2200 another write. */
2201 if (MEM_READONLY_P (mem))
2202 return;
2204 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2206 if (dump_file && (dump_flags & TDF_DETAILS))
2207 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2208 add_wild_read (bb_info);
2209 return;
2212 if (GET_MODE (mem) == BLKmode)
2213 width = -1;
2214 else
2215 width = GET_MODE_SIZE (GET_MODE (mem));
2217 read_info = new read_info_type;
2218 read_info->group_id = group_id;
2219 read_info->mem = mem;
2220 read_info->alias_set = spill_alias_set;
2221 read_info->begin = offset;
2222 read_info->end = offset + width;
2223 read_info->next = insn_info->read_rec;
2224 insn_info->read_rec = read_info;
2225 /* For alias_set != 0 canon_true_dependence should be never called. */
2226 if (spill_alias_set)
2227 mem_addr = NULL_RTX;
2228 else
2230 if (group_id < 0)
2231 mem_addr = base->val_rtx;
2232 else
2234 group_info_t group
2235 = rtx_group_vec[group_id];
2236 mem_addr = group->canon_base_addr;
2238 /* get_addr can only handle VALUE but cannot handle expr like:
2239 VALUE + OFFSET, so call get_addr to get original addr for
2240 mem_addr before plus_constant. */
2241 mem_addr = get_addr (mem_addr);
2242 if (offset)
2243 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2246 /* We ignore the clobbers in store_info. The is mildly aggressive,
2247 but there really should not be a clobber followed by a read. */
2249 if (spill_alias_set)
2251 insn_info_t i_ptr = active_local_stores;
2252 insn_info_t last = NULL;
2254 if (dump_file && (dump_flags & TDF_DETAILS))
2255 fprintf (dump_file, " processing spill load %d\n",
2256 (int) spill_alias_set);
2258 while (i_ptr)
2260 store_info_t store_info = i_ptr->store_rec;
2262 /* Skip the clobbers. */
2263 while (!store_info->is_set)
2264 store_info = store_info->next;
2266 if (store_info->alias_set == spill_alias_set)
2268 if (dump_file && (dump_flags & TDF_DETAILS))
2269 dump_insn_info ("removing from active", i_ptr);
2271 active_local_stores_len--;
2272 if (last)
2273 last->next_local_store = i_ptr->next_local_store;
2274 else
2275 active_local_stores = i_ptr->next_local_store;
2277 else
2278 last = i_ptr;
2279 i_ptr = i_ptr->next_local_store;
2282 else if (group_id >= 0)
2284 /* This is the restricted case where the base is a constant or
2285 the frame pointer and offset is a constant. */
2286 insn_info_t i_ptr = active_local_stores;
2287 insn_info_t last = NULL;
2289 if (dump_file && (dump_flags & TDF_DETAILS))
2291 if (width == -1)
2292 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2293 group_id);
2294 else
2295 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2296 group_id, (int)offset, (int)(offset+width));
2299 while (i_ptr)
2301 bool remove = false;
2302 store_info_t store_info = i_ptr->store_rec;
2304 /* Skip the clobbers. */
2305 while (!store_info->is_set)
2306 store_info = store_info->next;
2308 /* There are three cases here. */
2309 if (store_info->group_id < 0)
2310 /* We have a cselib store followed by a read from a
2311 const base. */
2312 remove
2313 = canon_true_dependence (store_info->mem,
2314 GET_MODE (store_info->mem),
2315 store_info->mem_addr,
2316 mem, mem_addr);
2318 else if (group_id == store_info->group_id)
2320 /* This is a block mode load. We may get lucky and
2321 canon_true_dependence may save the day. */
2322 if (width == -1)
2323 remove
2324 = canon_true_dependence (store_info->mem,
2325 GET_MODE (store_info->mem),
2326 store_info->mem_addr,
2327 mem, mem_addr);
2329 /* If this read is just reading back something that we just
2330 stored, rewrite the read. */
2331 else
2333 if (store_info->rhs
2334 && offset >= store_info->begin
2335 && offset + width <= store_info->end
2336 && all_positions_needed_p (store_info,
2337 offset - store_info->begin,
2338 width)
2339 && replace_read (store_info, i_ptr, read_info,
2340 insn_info, loc, bb_info->regs_live))
2341 return;
2343 /* The bases are the same, just see if the offsets
2344 overlap. */
2345 if ((offset < store_info->end)
2346 && (offset + width > store_info->begin))
2347 remove = true;
2351 /* else
2352 The else case that is missing here is that the
2353 bases are constant but different. There is nothing
2354 to do here because there is no overlap. */
2356 if (remove)
2358 if (dump_file && (dump_flags & TDF_DETAILS))
2359 dump_insn_info ("removing from active", i_ptr);
2361 active_local_stores_len--;
2362 if (last)
2363 last->next_local_store = i_ptr->next_local_store;
2364 else
2365 active_local_stores = i_ptr->next_local_store;
2367 else
2368 last = i_ptr;
2369 i_ptr = i_ptr->next_local_store;
2372 else
2374 insn_info_t i_ptr = active_local_stores;
2375 insn_info_t last = NULL;
2376 if (dump_file && (dump_flags & TDF_DETAILS))
2378 fprintf (dump_file, " processing cselib load mem:");
2379 print_inline_rtx (dump_file, mem, 0);
2380 fprintf (dump_file, "\n");
2383 while (i_ptr)
2385 bool remove = false;
2386 store_info_t store_info = i_ptr->store_rec;
2388 if (dump_file && (dump_flags & TDF_DETAILS))
2389 fprintf (dump_file, " processing cselib load against insn %d\n",
2390 INSN_UID (i_ptr->insn));
2392 /* Skip the clobbers. */
2393 while (!store_info->is_set)
2394 store_info = store_info->next;
2396 /* If this read is just reading back something that we just
2397 stored, rewrite the read. */
2398 if (store_info->rhs
2399 && store_info->group_id == -1
2400 && store_info->cse_base == base
2401 && width != -1
2402 && offset >= store_info->begin
2403 && offset + width <= store_info->end
2404 && all_positions_needed_p (store_info,
2405 offset - store_info->begin, width)
2406 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2407 bb_info->regs_live))
2408 return;
2410 if (!store_info->alias_set)
2411 remove = canon_true_dependence (store_info->mem,
2412 GET_MODE (store_info->mem),
2413 store_info->mem_addr,
2414 mem, mem_addr);
2416 if (remove)
2418 if (dump_file && (dump_flags & TDF_DETAILS))
2419 dump_insn_info ("removing from active", i_ptr);
2421 active_local_stores_len--;
2422 if (last)
2423 last->next_local_store = i_ptr->next_local_store;
2424 else
2425 active_local_stores = i_ptr->next_local_store;
2427 else
2428 last = i_ptr;
2429 i_ptr = i_ptr->next_local_store;
2434 /* A note_uses callback in which DATA points the INSN_INFO for
2435 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2436 true for any part of *LOC. */
2438 static void
2439 check_mem_read_use (rtx *loc, void *data)
2441 subrtx_ptr_iterator::array_type array;
2442 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2444 rtx *loc = *iter;
2445 if (MEM_P (*loc))
2446 check_mem_read_rtx (loc, (bb_info_t) data);
2451 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2452 So far it only handles arguments passed in registers. */
2454 static bool
2455 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2457 CUMULATIVE_ARGS args_so_far_v;
2458 cumulative_args_t args_so_far;
2459 tree arg;
2460 int idx;
2462 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2463 args_so_far = pack_cumulative_args (&args_so_far_v);
2465 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2466 for (idx = 0;
2467 arg != void_list_node && idx < nargs;
2468 arg = TREE_CHAIN (arg), idx++)
2470 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2471 rtx reg, link, tmp;
2472 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2473 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2474 || GET_MODE_CLASS (mode) != MODE_INT)
2475 return false;
2477 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2478 link;
2479 link = XEXP (link, 1))
2480 if (GET_CODE (XEXP (link, 0)) == USE)
2482 args[idx] = XEXP (XEXP (link, 0), 0);
2483 if (REG_P (args[idx])
2484 && REGNO (args[idx]) == REGNO (reg)
2485 && (GET_MODE (args[idx]) == mode
2486 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2487 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2488 <= UNITS_PER_WORD)
2489 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2490 > GET_MODE_SIZE (mode)))))
2491 break;
2493 if (!link)
2494 return false;
2496 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2497 if (GET_MODE (args[idx]) != mode)
2499 if (!tmp || !CONST_INT_P (tmp))
2500 return false;
2501 tmp = gen_int_mode (INTVAL (tmp), mode);
2503 if (tmp)
2504 args[idx] = tmp;
2506 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2508 if (arg != void_list_node || idx != nargs)
2509 return false;
2510 return true;
2513 /* Return a bitmap of the fixed registers contained in IN. */
2515 static bitmap
2516 copy_fixed_regs (const_bitmap in)
2518 bitmap ret;
2520 ret = ALLOC_REG_SET (NULL);
2521 bitmap_and (ret, in, fixed_reg_set_regset);
2522 return ret;
2525 /* Apply record_store to all candidate stores in INSN. Mark INSN
2526 if some part of it is not a candidate store and assigns to a
2527 non-register target. */
2529 static void
2530 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2532 rtx body;
2533 insn_info_type *insn_info = new insn_info_type;
2534 int mems_found = 0;
2535 memset (insn_info, 0, sizeof (struct insn_info_type));
2537 if (dump_file && (dump_flags & TDF_DETAILS))
2538 fprintf (dump_file, "\n**scanning insn=%d\n",
2539 INSN_UID (insn));
2541 insn_info->prev_insn = bb_info->last_insn;
2542 insn_info->insn = insn;
2543 bb_info->last_insn = insn_info;
2545 if (DEBUG_INSN_P (insn))
2547 insn_info->cannot_delete = true;
2548 return;
2551 /* Look at all of the uses in the insn. */
2552 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2554 if (CALL_P (insn))
2556 bool const_call;
2557 tree memset_call = NULL_TREE;
2559 insn_info->cannot_delete = true;
2561 /* Const functions cannot do anything bad i.e. read memory,
2562 however, they can read their parameters which may have
2563 been pushed onto the stack.
2564 memset and bzero don't read memory either. */
2565 const_call = RTL_CONST_CALL_P (insn);
2566 if (!const_call)
2568 rtx call = get_call_rtx_from (insn);
2569 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2571 rtx symbol = XEXP (XEXP (call, 0), 0);
2572 if (SYMBOL_REF_DECL (symbol)
2573 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2575 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2576 == BUILT_IN_NORMAL
2577 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2578 == BUILT_IN_MEMSET))
2579 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2580 memset_call = SYMBOL_REF_DECL (symbol);
2584 if (const_call || memset_call)
2586 insn_info_t i_ptr = active_local_stores;
2587 insn_info_t last = NULL;
2589 if (dump_file && (dump_flags & TDF_DETAILS))
2590 fprintf (dump_file, "%s call %d\n",
2591 const_call ? "const" : "memset", INSN_UID (insn));
2593 /* See the head comment of the frame_read field. */
2594 if (reload_completed
2595 /* Tail calls are storing their arguments using
2596 arg pointer. If it is a frame pointer on the target,
2597 even before reload we need to kill frame pointer based
2598 stores. */
2599 || (SIBLING_CALL_P (insn)
2600 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2601 insn_info->frame_read = true;
2603 /* Loop over the active stores and remove those which are
2604 killed by the const function call. */
2605 while (i_ptr)
2607 bool remove_store = false;
2609 /* The stack pointer based stores are always killed. */
2610 if (i_ptr->stack_pointer_based)
2611 remove_store = true;
2613 /* If the frame is read, the frame related stores are killed. */
2614 else if (insn_info->frame_read)
2616 store_info_t store_info = i_ptr->store_rec;
2618 /* Skip the clobbers. */
2619 while (!store_info->is_set)
2620 store_info = store_info->next;
2622 if (store_info->group_id >= 0
2623 && rtx_group_vec[store_info->group_id]->frame_related)
2624 remove_store = true;
2627 if (remove_store)
2629 if (dump_file && (dump_flags & TDF_DETAILS))
2630 dump_insn_info ("removing from active", i_ptr);
2632 active_local_stores_len--;
2633 if (last)
2634 last->next_local_store = i_ptr->next_local_store;
2635 else
2636 active_local_stores = i_ptr->next_local_store;
2638 else
2639 last = i_ptr;
2641 i_ptr = i_ptr->next_local_store;
2644 if (memset_call)
2646 rtx args[3];
2647 if (get_call_args (insn, memset_call, args, 3)
2648 && CONST_INT_P (args[1])
2649 && CONST_INT_P (args[2])
2650 && INTVAL (args[2]) > 0)
2652 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2653 set_mem_size (mem, INTVAL (args[2]));
2654 body = gen_rtx_SET (mem, args[1]);
2655 mems_found += record_store (body, bb_info);
2656 if (dump_file && (dump_flags & TDF_DETAILS))
2657 fprintf (dump_file, "handling memset as BLKmode store\n");
2658 if (mems_found == 1)
2660 if (active_local_stores_len++
2661 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2663 active_local_stores_len = 1;
2664 active_local_stores = NULL;
2666 insn_info->fixed_regs_live
2667 = copy_fixed_regs (bb_info->regs_live);
2668 insn_info->next_local_store = active_local_stores;
2669 active_local_stores = insn_info;
2674 else if (SIBLING_CALL_P (insn) && reload_completed)
2675 /* Arguments for a sibling call that are pushed to memory are passed
2676 using the incoming argument pointer of the current function. After
2677 reload that might be (and likely is) frame pointer based. */
2678 add_wild_read (bb_info);
2679 else
2680 /* Every other call, including pure functions, may read any memory
2681 that is not relative to the frame. */
2682 add_non_frame_wild_read (bb_info);
2684 return;
2687 /* Assuming that there are sets in these insns, we cannot delete
2688 them. */
2689 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2690 || volatile_refs_p (PATTERN (insn))
2691 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2692 || (RTX_FRAME_RELATED_P (insn))
2693 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2694 insn_info->cannot_delete = true;
2696 body = PATTERN (insn);
2697 if (GET_CODE (body) == PARALLEL)
2699 int i;
2700 for (i = 0; i < XVECLEN (body, 0); i++)
2701 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2703 else
2704 mems_found += record_store (body, bb_info);
2706 if (dump_file && (dump_flags & TDF_DETAILS))
2707 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2708 mems_found, insn_info->cannot_delete ? "true" : "false");
2710 /* If we found some sets of mems, add it into the active_local_stores so
2711 that it can be locally deleted if found dead or used for
2712 replace_read and redundant constant store elimination. Otherwise mark
2713 it as cannot delete. This simplifies the processing later. */
2714 if (mems_found == 1)
2716 if (active_local_stores_len++
2717 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2719 active_local_stores_len = 1;
2720 active_local_stores = NULL;
2722 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2723 insn_info->next_local_store = active_local_stores;
2724 active_local_stores = insn_info;
2726 else
2727 insn_info->cannot_delete = true;
2731 /* Remove BASE from the set of active_local_stores. This is a
2732 callback from cselib that is used to get rid of the stores in
2733 active_local_stores. */
2735 static void
2736 remove_useless_values (cselib_val *base)
2738 insn_info_t insn_info = active_local_stores;
2739 insn_info_t last = NULL;
2741 while (insn_info)
2743 store_info_t store_info = insn_info->store_rec;
2744 bool del = false;
2746 /* If ANY of the store_infos match the cselib group that is
2747 being deleted, then the insn can not be deleted. */
2748 while (store_info)
2750 if ((store_info->group_id == -1)
2751 && (store_info->cse_base == base))
2753 del = true;
2754 break;
2756 store_info = store_info->next;
2759 if (del)
2761 active_local_stores_len--;
2762 if (last)
2763 last->next_local_store = insn_info->next_local_store;
2764 else
2765 active_local_stores = insn_info->next_local_store;
2766 free_store_info (insn_info);
2768 else
2769 last = insn_info;
2771 insn_info = insn_info->next_local_store;
2776 /* Do all of step 1. */
2778 static void
2779 dse_step1 (void)
2781 basic_block bb;
2782 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2784 cselib_init (0);
2785 all_blocks = BITMAP_ALLOC (NULL);
2786 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2787 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2789 FOR_ALL_BB_FN (bb, cfun)
2791 insn_info_t ptr;
2792 bb_info_t bb_info = new dse_bb_info_type;
2794 memset (bb_info, 0, sizeof (dse_bb_info_type));
2795 bitmap_set_bit (all_blocks, bb->index);
2796 bb_info->regs_live = regs_live;
2798 bitmap_copy (regs_live, DF_LR_IN (bb));
2799 df_simulate_initialize_forwards (bb, regs_live);
2801 bb_table[bb->index] = bb_info;
2802 cselib_discard_hook = remove_useless_values;
2804 if (bb->index >= NUM_FIXED_BLOCKS)
2806 rtx_insn *insn;
2808 active_local_stores = NULL;
2809 active_local_stores_len = 0;
2810 cselib_clear_table ();
2812 /* Scan the insns. */
2813 FOR_BB_INSNS (bb, insn)
2815 if (INSN_P (insn))
2816 scan_insn (bb_info, insn);
2817 cselib_process_insn (insn);
2818 if (INSN_P (insn))
2819 df_simulate_one_insn_forwards (bb, insn, regs_live);
2822 /* This is something of a hack, because the global algorithm
2823 is supposed to take care of the case where stores go dead
2824 at the end of the function. However, the global
2825 algorithm must take a more conservative view of block
2826 mode reads than the local alg does. So to get the case
2827 where you have a store to the frame followed by a non
2828 overlapping block more read, we look at the active local
2829 stores at the end of the function and delete all of the
2830 frame and spill based ones. */
2831 if (stores_off_frame_dead_at_return
2832 && (EDGE_COUNT (bb->succs) == 0
2833 || (single_succ_p (bb)
2834 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2835 && ! crtl->calls_eh_return)))
2837 insn_info_t i_ptr = active_local_stores;
2838 while (i_ptr)
2840 store_info_t store_info = i_ptr->store_rec;
2842 /* Skip the clobbers. */
2843 while (!store_info->is_set)
2844 store_info = store_info->next;
2845 if (store_info->alias_set && !i_ptr->cannot_delete)
2846 delete_dead_store_insn (i_ptr);
2847 else
2848 if (store_info->group_id >= 0)
2850 group_info_t group
2851 = rtx_group_vec[store_info->group_id];
2852 if (group->frame_related && !i_ptr->cannot_delete)
2853 delete_dead_store_insn (i_ptr);
2856 i_ptr = i_ptr->next_local_store;
2860 /* Get rid of the loads that were discovered in
2861 replace_read. Cselib is finished with this block. */
2862 while (deferred_change_list)
2864 deferred_change_t next = deferred_change_list->next;
2866 /* There is no reason to validate this change. That was
2867 done earlier. */
2868 *deferred_change_list->loc = deferred_change_list->reg;
2869 delete deferred_change_list;
2870 deferred_change_list = next;
2873 /* Get rid of all of the cselib based store_infos in this
2874 block and mark the containing insns as not being
2875 deletable. */
2876 ptr = bb_info->last_insn;
2877 while (ptr)
2879 if (ptr->contains_cselib_groups)
2881 store_info_t s_info = ptr->store_rec;
2882 while (s_info && !s_info->is_set)
2883 s_info = s_info->next;
2884 if (s_info
2885 && s_info->redundant_reason
2886 && s_info->redundant_reason->insn
2887 && !ptr->cannot_delete)
2889 if (dump_file && (dump_flags & TDF_DETAILS))
2890 fprintf (dump_file, "Locally deleting insn %d "
2891 "because insn %d stores the "
2892 "same value and couldn't be "
2893 "eliminated\n",
2894 INSN_UID (ptr->insn),
2895 INSN_UID (s_info->redundant_reason->insn));
2896 delete_dead_store_insn (ptr);
2898 free_store_info (ptr);
2900 else
2902 store_info_t s_info;
2904 /* Free at least positions_needed bitmaps. */
2905 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2906 if (s_info->is_large)
2908 BITMAP_FREE (s_info->positions_needed.large.bmap);
2909 s_info->is_large = false;
2912 ptr = ptr->prev_insn;
2915 cse_store_info_pool.release ();
2917 bb_info->regs_live = NULL;
2920 BITMAP_FREE (regs_live);
2921 cselib_finish ();
2922 rtx_group_table->empty ();
2926 /*----------------------------------------------------------------------------
2927 Second step.
2929 Assign each byte position in the stores that we are going to
2930 analyze globally to a position in the bitmaps. Returns true if
2931 there are any bit positions assigned.
2932 ----------------------------------------------------------------------------*/
2934 static void
2935 dse_step2_init (void)
2937 unsigned int i;
2938 group_info_t group;
2940 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2942 /* For all non stack related bases, we only consider a store to
2943 be deletable if there are two or more stores for that
2944 position. This is because it takes one store to make the
2945 other store redundant. However, for the stores that are
2946 stack related, we consider them if there is only one store
2947 for the position. We do this because the stack related
2948 stores can be deleted if their is no read between them and
2949 the end of the function.
2951 To make this work in the current framework, we take the stack
2952 related bases add all of the bits from store1 into store2.
2953 This has the effect of making the eligible even if there is
2954 only one store. */
2956 if (stores_off_frame_dead_at_return && group->frame_related)
2958 bitmap_ior_into (group->store2_n, group->store1_n);
2959 bitmap_ior_into (group->store2_p, group->store1_p);
2960 if (dump_file && (dump_flags & TDF_DETAILS))
2961 fprintf (dump_file, "group %d is frame related ", i);
2964 group->offset_map_size_n++;
2965 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2966 group->offset_map_size_n);
2967 group->offset_map_size_p++;
2968 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2969 group->offset_map_size_p);
2970 group->process_globally = false;
2971 if (dump_file && (dump_flags & TDF_DETAILS))
2973 fprintf (dump_file, "group %d(%d+%d): ", i,
2974 (int)bitmap_count_bits (group->store2_n),
2975 (int)bitmap_count_bits (group->store2_p));
2976 bitmap_print (dump_file, group->store2_n, "n ", " ");
2977 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2983 /* Init the offset tables for the normal case. */
2985 static bool
2986 dse_step2_nospill (void)
2988 unsigned int i;
2989 group_info_t group;
2990 /* Position 0 is unused because 0 is used in the maps to mean
2991 unused. */
2992 current_position = 1;
2993 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2995 bitmap_iterator bi;
2996 unsigned int j;
2998 if (group == clear_alias_group)
2999 continue;
3001 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
3002 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
3003 bitmap_clear (group->group_kill);
3005 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
3007 bitmap_set_bit (group->group_kill, current_position);
3008 if (bitmap_bit_p (group->escaped_n, j))
3009 bitmap_set_bit (kill_on_calls, current_position);
3010 group->offset_map_n[j] = current_position++;
3011 group->process_globally = true;
3013 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3015 bitmap_set_bit (group->group_kill, current_position);
3016 if (bitmap_bit_p (group->escaped_p, j))
3017 bitmap_set_bit (kill_on_calls, current_position);
3018 group->offset_map_p[j] = current_position++;
3019 group->process_globally = true;
3022 return current_position != 1;
3027 /*----------------------------------------------------------------------------
3028 Third step.
3030 Build the bit vectors for the transfer functions.
3031 ----------------------------------------------------------------------------*/
3034 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3035 there, return 0. */
3037 static int
3038 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3040 if (offset < 0)
3042 HOST_WIDE_INT offset_p = -offset;
3043 if (offset_p >= group_info->offset_map_size_n)
3044 return 0;
3045 return group_info->offset_map_n[offset_p];
3047 else
3049 if (offset >= group_info->offset_map_size_p)
3050 return 0;
3051 return group_info->offset_map_p[offset];
3056 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3057 may be NULL. */
3059 static void
3060 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3062 while (store_info)
3064 HOST_WIDE_INT i;
3065 group_info_t group_info
3066 = rtx_group_vec[store_info->group_id];
3067 if (group_info->process_globally)
3068 for (i = store_info->begin; i < store_info->end; i++)
3070 int index = get_bitmap_index (group_info, i);
3071 if (index != 0)
3073 bitmap_set_bit (gen, index);
3074 if (kill)
3075 bitmap_clear_bit (kill, index);
3078 store_info = store_info->next;
3083 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3084 may be NULL. */
3086 static void
3087 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3089 while (store_info)
3091 if (store_info->alias_set)
3093 int index = get_bitmap_index (clear_alias_group,
3094 store_info->alias_set);
3095 if (index != 0)
3097 bitmap_set_bit (gen, index);
3098 if (kill)
3099 bitmap_clear_bit (kill, index);
3102 store_info = store_info->next;
3107 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3108 may be NULL. */
3110 static void
3111 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3113 read_info_t read_info = insn_info->read_rec;
3114 int i;
3115 group_info_t group;
3117 /* If this insn reads the frame, kill all the frame related stores. */
3118 if (insn_info->frame_read)
3120 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3121 if (group->process_globally && group->frame_related)
3123 if (kill)
3124 bitmap_ior_into (kill, group->group_kill);
3125 bitmap_and_compl_into (gen, group->group_kill);
3128 if (insn_info->non_frame_wild_read)
3130 /* Kill all non-frame related stores. Kill all stores of variables that
3131 escape. */
3132 if (kill)
3133 bitmap_ior_into (kill, kill_on_calls);
3134 bitmap_and_compl_into (gen, kill_on_calls);
3135 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3136 if (group->process_globally && !group->frame_related)
3138 if (kill)
3139 bitmap_ior_into (kill, group->group_kill);
3140 bitmap_and_compl_into (gen, group->group_kill);
3143 while (read_info)
3145 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3147 if (group->process_globally)
3149 if (i == read_info->group_id)
3151 if (read_info->begin > read_info->end)
3153 /* Begin > end for block mode reads. */
3154 if (kill)
3155 bitmap_ior_into (kill, group->group_kill);
3156 bitmap_and_compl_into (gen, group->group_kill);
3158 else
3160 /* The groups are the same, just process the
3161 offsets. */
3162 HOST_WIDE_INT j;
3163 for (j = read_info->begin; j < read_info->end; j++)
3165 int index = get_bitmap_index (group, j);
3166 if (index != 0)
3168 if (kill)
3169 bitmap_set_bit (kill, index);
3170 bitmap_clear_bit (gen, index);
3175 else
3177 /* The groups are different, if the alias sets
3178 conflict, clear the entire group. We only need
3179 to apply this test if the read_info is a cselib
3180 read. Anything with a constant base cannot alias
3181 something else with a different constant
3182 base. */
3183 if ((read_info->group_id < 0)
3184 && canon_true_dependence (group->base_mem,
3185 GET_MODE (group->base_mem),
3186 group->canon_base_addr,
3187 read_info->mem, NULL_RTX))
3189 if (kill)
3190 bitmap_ior_into (kill, group->group_kill);
3191 bitmap_and_compl_into (gen, group->group_kill);
3197 read_info = read_info->next;
3201 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3202 may be NULL. */
3204 static void
3205 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3207 while (read_info)
3209 if (read_info->alias_set)
3211 int index = get_bitmap_index (clear_alias_group,
3212 read_info->alias_set);
3213 if (index != 0)
3215 if (kill)
3216 bitmap_set_bit (kill, index);
3217 bitmap_clear_bit (gen, index);
3221 read_info = read_info->next;
3226 /* Return the insn in BB_INFO before the first wild read or if there
3227 are no wild reads in the block, return the last insn. */
3229 static insn_info_t
3230 find_insn_before_first_wild_read (bb_info_t bb_info)
3232 insn_info_t insn_info = bb_info->last_insn;
3233 insn_info_t last_wild_read = NULL;
3235 while (insn_info)
3237 if (insn_info->wild_read)
3239 last_wild_read = insn_info->prev_insn;
3240 /* Block starts with wild read. */
3241 if (!last_wild_read)
3242 return NULL;
3245 insn_info = insn_info->prev_insn;
3248 if (last_wild_read)
3249 return last_wild_read;
3250 else
3251 return bb_info->last_insn;
3255 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3256 the block in order to build the gen and kill sets for the block.
3257 We start at ptr which may be the last insn in the block or may be
3258 the first insn with a wild read. In the latter case we are able to
3259 skip the rest of the block because it just does not matter:
3260 anything that happens is hidden by the wild read. */
3262 static void
3263 dse_step3_scan (bool for_spills, basic_block bb)
3265 bb_info_t bb_info = bb_table[bb->index];
3266 insn_info_t insn_info;
3268 if (for_spills)
3269 /* There are no wild reads in the spill case. */
3270 insn_info = bb_info->last_insn;
3271 else
3272 insn_info = find_insn_before_first_wild_read (bb_info);
3274 /* In the spill case or in the no_spill case if there is no wild
3275 read in the block, we will need a kill set. */
3276 if (insn_info == bb_info->last_insn)
3278 if (bb_info->kill)
3279 bitmap_clear (bb_info->kill);
3280 else
3281 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3283 else
3284 if (bb_info->kill)
3285 BITMAP_FREE (bb_info->kill);
3287 while (insn_info)
3289 /* There may have been code deleted by the dce pass run before
3290 this phase. */
3291 if (insn_info->insn && INSN_P (insn_info->insn))
3293 /* Process the read(s) last. */
3294 if (for_spills)
3296 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3297 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3299 else
3301 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3302 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3306 insn_info = insn_info->prev_insn;
3311 /* Set the gen set of the exit block, and also any block with no
3312 successors that does not have a wild read. */
3314 static void
3315 dse_step3_exit_block_scan (bb_info_t bb_info)
3317 /* The gen set is all 0's for the exit block except for the
3318 frame_pointer_group. */
3320 if (stores_off_frame_dead_at_return)
3322 unsigned int i;
3323 group_info_t group;
3325 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3327 if (group->process_globally && group->frame_related)
3328 bitmap_ior_into (bb_info->gen, group->group_kill);
3334 /* Find all of the blocks that are not backwards reachable from the
3335 exit block or any block with no successors (BB). These are the
3336 infinite loops or infinite self loops. These blocks will still
3337 have their bits set in UNREACHABLE_BLOCKS. */
3339 static void
3340 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3342 edge e;
3343 edge_iterator ei;
3345 if (bitmap_bit_p (unreachable_blocks, bb->index))
3347 bitmap_clear_bit (unreachable_blocks, bb->index);
3348 FOR_EACH_EDGE (e, ei, bb->preds)
3350 mark_reachable_blocks (unreachable_blocks, e->src);
3355 /* Build the transfer functions for the function. */
3357 static void
3358 dse_step3 (bool for_spills)
3360 basic_block bb;
3361 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3362 sbitmap_iterator sbi;
3363 bitmap all_ones = NULL;
3364 unsigned int i;
3366 bitmap_ones (unreachable_blocks);
3368 FOR_ALL_BB_FN (bb, cfun)
3370 bb_info_t bb_info = bb_table[bb->index];
3371 if (bb_info->gen)
3372 bitmap_clear (bb_info->gen);
3373 else
3374 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3376 if (bb->index == ENTRY_BLOCK)
3378 else if (bb->index == EXIT_BLOCK)
3379 dse_step3_exit_block_scan (bb_info);
3380 else
3381 dse_step3_scan (for_spills, bb);
3382 if (EDGE_COUNT (bb->succs) == 0)
3383 mark_reachable_blocks (unreachable_blocks, bb);
3385 /* If this is the second time dataflow is run, delete the old
3386 sets. */
3387 if (bb_info->in)
3388 BITMAP_FREE (bb_info->in);
3389 if (bb_info->out)
3390 BITMAP_FREE (bb_info->out);
3393 /* For any block in an infinite loop, we must initialize the out set
3394 to all ones. This could be expensive, but almost never occurs in
3395 practice. However, it is common in regression tests. */
3396 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3398 if (bitmap_bit_p (all_blocks, i))
3400 bb_info_t bb_info = bb_table[i];
3401 if (!all_ones)
3403 unsigned int j;
3404 group_info_t group;
3406 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3407 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3408 bitmap_ior_into (all_ones, group->group_kill);
3410 if (!bb_info->out)
3412 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3413 bitmap_copy (bb_info->out, all_ones);
3418 if (all_ones)
3419 BITMAP_FREE (all_ones);
3420 sbitmap_free (unreachable_blocks);
3425 /*----------------------------------------------------------------------------
3426 Fourth step.
3428 Solve the bitvector equations.
3429 ----------------------------------------------------------------------------*/
3432 /* Confluence function for blocks with no successors. Create an out
3433 set from the gen set of the exit block. This block logically has
3434 the exit block as a successor. */
3438 static void
3439 dse_confluence_0 (basic_block bb)
3441 bb_info_t bb_info = bb_table[bb->index];
3443 if (bb->index == EXIT_BLOCK)
3444 return;
3446 if (!bb_info->out)
3448 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3449 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3453 /* Propagate the information from the in set of the dest of E to the
3454 out set of the src of E. If the various in or out sets are not
3455 there, that means they are all ones. */
3457 static bool
3458 dse_confluence_n (edge e)
3460 bb_info_t src_info = bb_table[e->src->index];
3461 bb_info_t dest_info = bb_table[e->dest->index];
3463 if (dest_info->in)
3465 if (src_info->out)
3466 bitmap_and_into (src_info->out, dest_info->in);
3467 else
3469 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3470 bitmap_copy (src_info->out, dest_info->in);
3473 return true;
3477 /* Propagate the info from the out to the in set of BB_INDEX's basic
3478 block. There are three cases:
3480 1) The block has no kill set. In this case the kill set is all
3481 ones. It does not matter what the out set of the block is, none of
3482 the info can reach the top. The only thing that reaches the top is
3483 the gen set and we just copy the set.
3485 2) There is a kill set but no out set and bb has successors. In
3486 this case we just return. Eventually an out set will be created and
3487 it is better to wait than to create a set of ones.
3489 3) There is both a kill and out set. We apply the obvious transfer
3490 function.
3493 static bool
3494 dse_transfer_function (int bb_index)
3496 bb_info_t bb_info = bb_table[bb_index];
3498 if (bb_info->kill)
3500 if (bb_info->out)
3502 /* Case 3 above. */
3503 if (bb_info->in)
3504 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3505 bb_info->out, bb_info->kill);
3506 else
3508 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3509 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3510 bb_info->out, bb_info->kill);
3511 return true;
3514 else
3515 /* Case 2 above. */
3516 return false;
3518 else
3520 /* Case 1 above. If there is already an in set, nothing
3521 happens. */
3522 if (bb_info->in)
3523 return false;
3524 else
3526 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3527 bitmap_copy (bb_info->in, bb_info->gen);
3528 return true;
3533 /* Solve the dataflow equations. */
3535 static void
3536 dse_step4 (void)
3538 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3539 dse_confluence_n, dse_transfer_function,
3540 all_blocks, df_get_postorder (DF_BACKWARD),
3541 df_get_n_blocks (DF_BACKWARD));
3542 if (dump_file && (dump_flags & TDF_DETAILS))
3544 basic_block bb;
3546 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3547 FOR_ALL_BB_FN (bb, cfun)
3549 bb_info_t bb_info = bb_table[bb->index];
3551 df_print_bb_index (bb, dump_file);
3552 if (bb_info->in)
3553 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3554 else
3555 fprintf (dump_file, " in: *MISSING*\n");
3556 if (bb_info->gen)
3557 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3558 else
3559 fprintf (dump_file, " gen: *MISSING*\n");
3560 if (bb_info->kill)
3561 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3562 else
3563 fprintf (dump_file, " kill: *MISSING*\n");
3564 if (bb_info->out)
3565 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3566 else
3567 fprintf (dump_file, " out: *MISSING*\n\n");
3574 /*----------------------------------------------------------------------------
3575 Fifth step.
3577 Delete the stores that can only be deleted using the global information.
3578 ----------------------------------------------------------------------------*/
3581 static void
3582 dse_step5_nospill (void)
3584 basic_block bb;
3585 FOR_EACH_BB_FN (bb, cfun)
3587 bb_info_t bb_info = bb_table[bb->index];
3588 insn_info_t insn_info = bb_info->last_insn;
3589 bitmap v = bb_info->out;
3591 while (insn_info)
3593 bool deleted = false;
3594 if (dump_file && insn_info->insn)
3596 fprintf (dump_file, "starting to process insn %d\n",
3597 INSN_UID (insn_info->insn));
3598 bitmap_print (dump_file, v, " v: ", "\n");
3601 /* There may have been code deleted by the dce pass run before
3602 this phase. */
3603 if (insn_info->insn
3604 && INSN_P (insn_info->insn)
3605 && (!insn_info->cannot_delete)
3606 && (!bitmap_empty_p (v)))
3608 store_info_t store_info = insn_info->store_rec;
3610 /* Try to delete the current insn. */
3611 deleted = true;
3613 /* Skip the clobbers. */
3614 while (!store_info->is_set)
3615 store_info = store_info->next;
3617 if (store_info->alias_set)
3618 deleted = false;
3619 else
3621 HOST_WIDE_INT i;
3622 group_info_t group_info
3623 = rtx_group_vec[store_info->group_id];
3625 for (i = store_info->begin; i < store_info->end; i++)
3627 int index = get_bitmap_index (group_info, i);
3629 if (dump_file && (dump_flags & TDF_DETAILS))
3630 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3631 if (index == 0 || !bitmap_bit_p (v, index))
3633 if (dump_file && (dump_flags & TDF_DETAILS))
3634 fprintf (dump_file, "failing at i = %d\n", (int)i);
3635 deleted = false;
3636 break;
3640 if (deleted)
3642 if (dbg_cnt (dse)
3643 && check_for_inc_dec_1 (insn_info))
3645 delete_insn (insn_info->insn);
3646 insn_info->insn = NULL;
3647 globally_deleted++;
3651 /* We do want to process the local info if the insn was
3652 deleted. For instance, if the insn did a wild read, we
3653 no longer need to trash the info. */
3654 if (insn_info->insn
3655 && INSN_P (insn_info->insn)
3656 && (!deleted))
3658 scan_stores_nospill (insn_info->store_rec, v, NULL);
3659 if (insn_info->wild_read)
3661 if (dump_file && (dump_flags & TDF_DETAILS))
3662 fprintf (dump_file, "wild read\n");
3663 bitmap_clear (v);
3665 else if (insn_info->read_rec
3666 || insn_info->non_frame_wild_read)
3668 if (dump_file && !insn_info->non_frame_wild_read)
3669 fprintf (dump_file, "regular read\n");
3670 else if (dump_file && (dump_flags & TDF_DETAILS))
3671 fprintf (dump_file, "non-frame wild read\n");
3672 scan_reads_nospill (insn_info, v, NULL);
3676 insn_info = insn_info->prev_insn;
3683 /*----------------------------------------------------------------------------
3684 Sixth step.
3686 Delete stores made redundant by earlier stores (which store the same
3687 value) that couldn't be eliminated.
3688 ----------------------------------------------------------------------------*/
3690 static void
3691 dse_step6 (void)
3693 basic_block bb;
3695 FOR_ALL_BB_FN (bb, cfun)
3697 bb_info_t bb_info = bb_table[bb->index];
3698 insn_info_t insn_info = bb_info->last_insn;
3700 while (insn_info)
3702 /* There may have been code deleted by the dce pass run before
3703 this phase. */
3704 if (insn_info->insn
3705 && INSN_P (insn_info->insn)
3706 && !insn_info->cannot_delete)
3708 store_info_t s_info = insn_info->store_rec;
3710 while (s_info && !s_info->is_set)
3711 s_info = s_info->next;
3712 if (s_info
3713 && s_info->redundant_reason
3714 && s_info->redundant_reason->insn
3715 && INSN_P (s_info->redundant_reason->insn))
3717 rtx_insn *rinsn = s_info->redundant_reason->insn;
3718 if (dump_file && (dump_flags & TDF_DETAILS))
3719 fprintf (dump_file, "Locally deleting insn %d "
3720 "because insn %d stores the "
3721 "same value and couldn't be "
3722 "eliminated\n",
3723 INSN_UID (insn_info->insn),
3724 INSN_UID (rinsn));
3725 delete_dead_store_insn (insn_info);
3728 insn_info = insn_info->prev_insn;
3733 /*----------------------------------------------------------------------------
3734 Seventh step.
3736 Destroy everything left standing.
3737 ----------------------------------------------------------------------------*/
3739 static void
3740 dse_step7 (void)
3742 bitmap_obstack_release (&dse_bitmap_obstack);
3743 obstack_free (&dse_obstack, NULL);
3745 end_alias_analysis ();
3746 free (bb_table);
3747 delete rtx_group_table;
3748 rtx_group_table = NULL;
3749 rtx_group_vec.release ();
3750 BITMAP_FREE (all_blocks);
3751 BITMAP_FREE (scratch);
3753 rtx_store_info_pool.release ();
3754 read_info_type::pool.release ();
3755 insn_info_type::pool.release ();
3756 dse_bb_info_type::pool.release ();
3757 group_info::pool.release ();
3758 deferred_change::pool.release ();
3762 /* -------------------------------------------------------------------------
3764 ------------------------------------------------------------------------- */
3766 /* Callback for running pass_rtl_dse. */
3768 static unsigned int
3769 rest_of_handle_dse (void)
3771 df_set_flags (DF_DEFER_INSN_RESCAN);
3773 /* Need the notes since we must track live hardregs in the forwards
3774 direction. */
3775 df_note_add_problem ();
3776 df_analyze ();
3778 dse_step0 ();
3779 dse_step1 ();
3780 dse_step2_init ();
3781 if (dse_step2_nospill ())
3783 df_set_flags (DF_LR_RUN_DCE);
3784 df_analyze ();
3785 if (dump_file && (dump_flags & TDF_DETAILS))
3786 fprintf (dump_file, "doing global processing\n");
3787 dse_step3 (false);
3788 dse_step4 ();
3789 dse_step5_nospill ();
3792 dse_step6 ();
3793 dse_step7 ();
3795 if (dump_file)
3796 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3797 locally_deleted, globally_deleted, spill_deleted);
3799 /* DSE can eliminate potentially-trapping MEMs.
3800 Remove any EH edges associated with them. */
3801 if ((locally_deleted || globally_deleted)
3802 && cfun->can_throw_non_call_exceptions
3803 && purge_all_dead_edges ())
3804 cleanup_cfg (0);
3806 return 0;
3809 namespace {
3811 const pass_data pass_data_rtl_dse1 =
3813 RTL_PASS, /* type */
3814 "dse1", /* name */
3815 OPTGROUP_NONE, /* optinfo_flags */
3816 TV_DSE1, /* tv_id */
3817 0, /* properties_required */
3818 0, /* properties_provided */
3819 0, /* properties_destroyed */
3820 0, /* todo_flags_start */
3821 TODO_df_finish, /* todo_flags_finish */
3824 class pass_rtl_dse1 : public rtl_opt_pass
3826 public:
3827 pass_rtl_dse1 (gcc::context *ctxt)
3828 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3831 /* opt_pass methods: */
3832 virtual bool gate (function *)
3834 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3837 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3839 }; // class pass_rtl_dse1
3841 } // anon namespace
3843 rtl_opt_pass *
3844 make_pass_rtl_dse1 (gcc::context *ctxt)
3846 return new pass_rtl_dse1 (ctxt);
3849 namespace {
3851 const pass_data pass_data_rtl_dse2 =
3853 RTL_PASS, /* type */
3854 "dse2", /* name */
3855 OPTGROUP_NONE, /* optinfo_flags */
3856 TV_DSE2, /* tv_id */
3857 0, /* properties_required */
3858 0, /* properties_provided */
3859 0, /* properties_destroyed */
3860 0, /* todo_flags_start */
3861 TODO_df_finish, /* todo_flags_finish */
3864 class pass_rtl_dse2 : public rtl_opt_pass
3866 public:
3867 pass_rtl_dse2 (gcc::context *ctxt)
3868 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3871 /* opt_pass methods: */
3872 virtual bool gate (function *)
3874 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3877 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3879 }; // class pass_rtl_dse2
3881 } // anon namespace
3883 rtl_opt_pass *
3884 make_pass_rtl_dse2 (gcc::context *ctxt)
3886 return new pass_rtl_dse2 (ctxt);