libgomp: Use pthread mutexes in the nvptx plugin.
[official-gcc.git] / gcc / dse.c
blobe3022366dd646b94c1ac8dfa8c230dc56c627edb
1 /* RTL dead store elimination.
2 Copyright (C) 2005-2015 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hash-table.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "hash-set.h"
32 #include "machmode.h"
33 #include "vec.h"
34 #include "double-int.h"
35 #include "input.h"
36 #include "alias.h"
37 #include "symtab.h"
38 #include "wide-int.h"
39 #include "inchash.h"
40 #include "real.h"
41 #include "tree.h"
42 #include "fold-const.h"
43 #include "stor-layout.h"
44 #include "tm_p.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "regset.h"
48 #include "flags.h"
49 #include "dominance.h"
50 #include "cfg.h"
51 #include "cfgrtl.h"
52 #include "predict.h"
53 #include "basic-block.h"
54 #include "df.h"
55 #include "cselib.h"
56 #include "tree-pass.h"
57 #include "alloc-pool.h"
58 #include "alias.h"
59 #include "insn-config.h"
60 #include "expr.h"
61 #include "recog.h"
62 #include "insn-codes.h"
63 #include "optabs.h"
64 #include "dbgcnt.h"
65 #include "target.h"
66 #include "params.h"
67 #include "tree-ssa-alias.h"
68 #include "internal-fn.h"
69 #include "gimple-expr.h"
70 #include "is-a.h"
71 #include "gimple.h"
72 #include "gimple-ssa.h"
73 #include "rtl-iter.h"
75 /* This file contains three techniques for performing Dead Store
76 Elimination (dse).
78 * The first technique performs dse locally on any base address. It
79 is based on the cselib which is a local value numbering technique.
80 This technique is local to a basic block but deals with a fairly
81 general addresses.
83 * The second technique performs dse globally but is restricted to
84 base addresses that are either constant or are relative to the
85 frame_pointer.
87 * The third technique, (which is only done after register allocation)
88 processes the spill spill slots. This differs from the second
89 technique because it takes advantage of the fact that spilling is
90 completely free from the effects of aliasing.
92 Logically, dse is a backwards dataflow problem. A store can be
93 deleted if it if cannot be reached in the backward direction by any
94 use of the value being stored. However, the local technique uses a
95 forwards scan of the basic block because cselib requires that the
96 block be processed in that order.
98 The pass is logically broken into 7 steps:
100 0) Initialization.
102 1) The local algorithm, as well as scanning the insns for the two
103 global algorithms.
105 2) Analysis to see if the global algs are necessary. In the case
106 of stores base on a constant address, there must be at least two
107 stores to that address, to make it possible to delete some of the
108 stores. In the case of stores off of the frame or spill related
109 stores, only one store to an address is necessary because those
110 stores die at the end of the function.
112 3) Set up the global dataflow equations based on processing the
113 info parsed in the first step.
115 4) Solve the dataflow equations.
117 5) Delete the insns that the global analysis has indicated are
118 unnecessary.
120 6) Delete insns that store the same value as preceding store
121 where the earlier store couldn't be eliminated.
123 7) Cleanup.
125 This step uses cselib and canon_rtx to build the largest expression
126 possible for each address. This pass is a forwards pass through
127 each basic block. From the point of view of the global technique,
128 the first pass could examine a block in either direction. The
129 forwards ordering is to accommodate cselib.
131 We make a simplifying assumption: addresses fall into four broad
132 categories:
134 1) base has rtx_varies_p == false, offset is constant.
135 2) base has rtx_varies_p == false, offset variable.
136 3) base has rtx_varies_p == true, offset constant.
137 4) base has rtx_varies_p == true, offset variable.
139 The local passes are able to process all 4 kinds of addresses. The
140 global pass only handles 1).
142 The global problem is formulated as follows:
144 A store, S1, to address A, where A is not relative to the stack
145 frame, can be eliminated if all paths from S1 to the end of the
146 function contain another store to A before a read to A.
148 If the address A is relative to the stack frame, a store S2 to A
149 can be eliminated if there are no paths from S2 that reach the
150 end of the function that read A before another store to A. In
151 this case S2 can be deleted if there are paths from S2 to the
152 end of the function that have no reads or writes to A. This
153 second case allows stores to the stack frame to be deleted that
154 would otherwise die when the function returns. This cannot be
155 done if stores_off_frame_dead_at_return is not true. See the doc
156 for that variable for when this variable is false.
158 The global problem is formulated as a backwards set union
159 dataflow problem where the stores are the gens and reads are the
160 kills. Set union problems are rare and require some special
161 handling given our representation of bitmaps. A straightforward
162 implementation requires a lot of bitmaps filled with 1s.
163 These are expensive and cumbersome in our bitmap formulation so
164 care has been taken to avoid large vectors filled with 1s. See
165 the comments in bb_info and in the dataflow confluence functions
166 for details.
168 There are two places for further enhancements to this algorithm:
170 1) The original dse which was embedded in a pass called flow also
171 did local address forwarding. For example in
173 A <- r100
174 ... <- A
176 flow would replace the right hand side of the second insn with a
177 reference to r100. Most of the information is available to add this
178 to this pass. It has not done it because it is a lot of work in
179 the case that either r100 is assigned to between the first and
180 second insn and/or the second insn is a load of part of the value
181 stored by the first insn.
183 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
184 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
185 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
186 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
188 2) The cleaning up of spill code is quite profitable. It currently
189 depends on reading tea leaves and chicken entrails left by reload.
190 This pass depends on reload creating a singleton alias set for each
191 spill slot and telling the next dse pass which of these alias sets
192 are the singletons. Rather than analyze the addresses of the
193 spills, dse's spill processing just does analysis of the loads and
194 stores that use those alias sets. There are three cases where this
195 falls short:
197 a) Reload sometimes creates the slot for one mode of access, and
198 then inserts loads and/or stores for a smaller mode. In this
199 case, the current code just punts on the slot. The proper thing
200 to do is to back out and use one bit vector position for each
201 byte of the entity associated with the slot. This depends on
202 KNOWING that reload always generates the accesses for each of the
203 bytes in some canonical (read that easy to understand several
204 passes after reload happens) way.
206 b) Reload sometimes decides that spill slot it allocated was not
207 large enough for the mode and goes back and allocates more slots
208 with the same mode and alias set. The backout in this case is a
209 little more graceful than (a). In this case the slot is unmarked
210 as being a spill slot and if final address comes out to be based
211 off the frame pointer, the global algorithm handles this slot.
213 c) For any pass that may prespill, there is currently no
214 mechanism to tell the dse pass that the slot being used has the
215 special properties that reload uses. It may be that all that is
216 required is to have those passes make the same calls that reload
217 does, assuming that the alias sets can be manipulated in the same
218 way. */
220 /* There are limits to the size of constant offsets we model for the
221 global problem. There are certainly test cases, that exceed this
222 limit, however, it is unlikely that there are important programs
223 that really have constant offsets this size. */
224 #define MAX_OFFSET (64 * 1024)
226 /* Obstack for the DSE dataflow bitmaps. We don't want to put these
227 on the default obstack because these bitmaps can grow quite large
228 (~2GB for the small (!) test case of PR54146) and we'll hold on to
229 all that memory until the end of the compiler run.
230 As a bonus, delete_tree_live_info can destroy all the bitmaps by just
231 releasing the whole obstack. */
232 static bitmap_obstack dse_bitmap_obstack;
234 /* Obstack for other data. As for above: Kinda nice to be able to
235 throw it all away at the end in one big sweep. */
236 static struct obstack dse_obstack;
238 /* Scratch bitmap for cselib's cselib_expand_value_rtx. */
239 static bitmap scratch = NULL;
241 struct insn_info;
243 /* This structure holds information about a candidate store. */
244 struct store_info
247 /* False means this is a clobber. */
248 bool is_set;
250 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
251 bool is_large;
253 /* The id of the mem group of the base address. If rtx_varies_p is
254 true, this is -1. Otherwise, it is the index into the group
255 table. */
256 int group_id;
258 /* This is the cselib value. */
259 cselib_val *cse_base;
261 /* This canonized mem. */
262 rtx mem;
264 /* Canonized MEM address for use by canon_true_dependence. */
265 rtx mem_addr;
267 /* If this is non-zero, it is the alias set of a spill location. */
268 alias_set_type alias_set;
270 /* The offset of the first and byte before the last byte associated
271 with the operation. */
272 HOST_WIDE_INT begin, end;
274 union
276 /* A bitmask as wide as the number of bytes in the word that
277 contains a 1 if the byte may be needed. The store is unused if
278 all of the bits are 0. This is used if IS_LARGE is false. */
279 unsigned HOST_WIDE_INT small_bitmask;
281 struct
283 /* A bitmap with one bit per byte. Cleared bit means the position
284 is needed. Used if IS_LARGE is false. */
285 bitmap bmap;
287 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
288 equal to END - BEGIN, the whole store is unused. */
289 int count;
290 } large;
291 } positions_needed;
293 /* The next store info for this insn. */
294 struct store_info *next;
296 /* The right hand side of the store. This is used if there is a
297 subsequent reload of the mems address somewhere later in the
298 basic block. */
299 rtx rhs;
301 /* If rhs is or holds a constant, this contains that constant,
302 otherwise NULL. */
303 rtx const_rhs;
305 /* Set if this store stores the same constant value as REDUNDANT_REASON
306 insn stored. These aren't eliminated early, because doing that
307 might prevent the earlier larger store to be eliminated. */
308 struct insn_info *redundant_reason;
311 /* Return a bitmask with the first N low bits set. */
313 static unsigned HOST_WIDE_INT
314 lowpart_bitmask (int n)
316 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
317 return mask >> (HOST_BITS_PER_WIDE_INT - n);
320 typedef struct store_info *store_info_t;
321 static alloc_pool cse_store_info_pool;
322 static alloc_pool rtx_store_info_pool;
324 /* This structure holds information about a load. These are only
325 built for rtx bases. */
326 struct read_info
328 /* The id of the mem group of the base address. */
329 int group_id;
331 /* If this is non-zero, it is the alias set of a spill location. */
332 alias_set_type alias_set;
334 /* The offset of the first and byte after the last byte associated
335 with the operation. If begin == end == 0, the read did not have
336 a constant offset. */
337 int begin, end;
339 /* The mem being read. */
340 rtx mem;
342 /* The next read_info for this insn. */
343 struct read_info *next;
345 typedef struct read_info *read_info_t;
346 static alloc_pool read_info_pool;
349 /* One of these records is created for each insn. */
351 struct insn_info
353 /* Set true if the insn contains a store but the insn itself cannot
354 be deleted. This is set if the insn is a parallel and there is
355 more than one non dead output or if the insn is in some way
356 volatile. */
357 bool cannot_delete;
359 /* This field is only used by the global algorithm. It is set true
360 if the insn contains any read of mem except for a (1). This is
361 also set if the insn is a call or has a clobber mem. If the insn
362 contains a wild read, the use_rec will be null. */
363 bool wild_read;
365 /* This is true only for CALL instructions which could potentially read
366 any non-frame memory location. This field is used by the global
367 algorithm. */
368 bool non_frame_wild_read;
370 /* This field is only used for the processing of const functions.
371 These functions cannot read memory, but they can read the stack
372 because that is where they may get their parms. We need to be
373 this conservative because, like the store motion pass, we don't
374 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
375 Moreover, we need to distinguish two cases:
376 1. Before reload (register elimination), the stores related to
377 outgoing arguments are stack pointer based and thus deemed
378 of non-constant base in this pass. This requires special
379 handling but also means that the frame pointer based stores
380 need not be killed upon encountering a const function call.
381 2. After reload, the stores related to outgoing arguments can be
382 either stack pointer or hard frame pointer based. This means
383 that we have no other choice than also killing all the frame
384 pointer based stores upon encountering a const function call.
385 This field is set after reload for const function calls and before
386 reload for const tail function calls on targets where arg pointer
387 is the frame pointer. Having this set is less severe than a wild
388 read, it just means that all the frame related stores are killed
389 rather than all the stores. */
390 bool frame_read;
392 /* This field is only used for the processing of const functions.
393 It is set if the insn may contain a stack pointer based store. */
394 bool stack_pointer_based;
396 /* This is true if any of the sets within the store contains a
397 cselib base. Such stores can only be deleted by the local
398 algorithm. */
399 bool contains_cselib_groups;
401 /* The insn. */
402 rtx_insn *insn;
404 /* The list of mem sets or mem clobbers that are contained in this
405 insn. If the insn is deletable, it contains only one mem set.
406 But it could also contain clobbers. Insns that contain more than
407 one mem set are not deletable, but each of those mems are here in
408 order to provide info to delete other insns. */
409 store_info_t store_rec;
411 /* The linked list of mem uses in this insn. Only the reads from
412 rtx bases are listed here. The reads to cselib bases are
413 completely processed during the first scan and so are never
414 created. */
415 read_info_t read_rec;
417 /* The live fixed registers. We assume only fixed registers can
418 cause trouble by being clobbered from an expanded pattern;
419 storing only the live fixed registers (rather than all registers)
420 means less memory needs to be allocated / copied for the individual
421 stores. */
422 regset fixed_regs_live;
424 /* The prev insn in the basic block. */
425 struct insn_info * prev_insn;
427 /* The linked list of insns that are in consideration for removal in
428 the forwards pass through the basic block. This pointer may be
429 trash as it is not cleared when a wild read occurs. The only
430 time it is guaranteed to be correct is when the traversal starts
431 at active_local_stores. */
432 struct insn_info * next_local_store;
435 typedef struct insn_info *insn_info_t;
436 static alloc_pool insn_info_pool;
438 /* The linked list of stores that are under consideration in this
439 basic block. */
440 static insn_info_t active_local_stores;
441 static int active_local_stores_len;
443 struct dse_bb_info
446 /* Pointer to the insn info for the last insn in the block. These
447 are linked so this is how all of the insns are reached. During
448 scanning this is the current insn being scanned. */
449 insn_info_t last_insn;
451 /* The info for the global dataflow problem. */
454 /* This is set if the transfer function should and in the wild_read
455 bitmap before applying the kill and gen sets. That vector knocks
456 out most of the bits in the bitmap and thus speeds up the
457 operations. */
458 bool apply_wild_read;
460 /* The following 4 bitvectors hold information about which positions
461 of which stores are live or dead. They are indexed by
462 get_bitmap_index. */
464 /* The set of store positions that exist in this block before a wild read. */
465 bitmap gen;
467 /* The set of load positions that exist in this block above the
468 same position of a store. */
469 bitmap kill;
471 /* The set of stores that reach the top of the block without being
472 killed by a read.
474 Do not represent the in if it is all ones. Note that this is
475 what the bitvector should logically be initialized to for a set
476 intersection problem. However, like the kill set, this is too
477 expensive. So initially, the in set will only be created for the
478 exit block and any block that contains a wild read. */
479 bitmap in;
481 /* The set of stores that reach the bottom of the block from it's
482 successors.
484 Do not represent the in if it is all ones. Note that this is
485 what the bitvector should logically be initialized to for a set
486 intersection problem. However, like the kill and in set, this is
487 too expensive. So what is done is that the confluence operator
488 just initializes the vector from one of the out sets of the
489 successors of the block. */
490 bitmap out;
492 /* The following bitvector is indexed by the reg number. It
493 contains the set of regs that are live at the current instruction
494 being processed. While it contains info for all of the
495 registers, only the hard registers are actually examined. It is used
496 to assure that shift and/or add sequences that are inserted do not
497 accidentally clobber live hard regs. */
498 bitmap regs_live;
501 typedef struct dse_bb_info *bb_info_t;
502 static alloc_pool bb_info_pool;
504 /* Table to hold all bb_infos. */
505 static bb_info_t *bb_table;
507 /* There is a group_info for each rtx base that is used to reference
508 memory. There are also not many of the rtx bases because they are
509 very limited in scope. */
511 struct group_info
513 /* The actual base of the address. */
514 rtx rtx_base;
516 /* The sequential id of the base. This allows us to have a
517 canonical ordering of these that is not based on addresses. */
518 int id;
520 /* True if there are any positions that are to be processed
521 globally. */
522 bool process_globally;
524 /* True if the base of this group is either the frame_pointer or
525 hard_frame_pointer. */
526 bool frame_related;
528 /* A mem wrapped around the base pointer for the group in order to do
529 read dependency. It must be given BLKmode in order to encompass all
530 the possible offsets from the base. */
531 rtx base_mem;
533 /* Canonized version of base_mem's address. */
534 rtx canon_base_addr;
536 /* These two sets of two bitmaps are used to keep track of how many
537 stores are actually referencing that position from this base. We
538 only do this for rtx bases as this will be used to assign
539 positions in the bitmaps for the global problem. Bit N is set in
540 store1 on the first store for offset N. Bit N is set in store2
541 for the second store to offset N. This is all we need since we
542 only care about offsets that have two or more stores for them.
544 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
545 for 0 and greater offsets.
547 There is one special case here, for stores into the stack frame,
548 we will or store1 into store2 before deciding which stores look
549 at globally. This is because stores to the stack frame that have
550 no other reads before the end of the function can also be
551 deleted. */
552 bitmap store1_n, store1_p, store2_n, store2_p;
554 /* These bitmaps keep track of offsets in this group escape this function.
555 An offset escapes if it corresponds to a named variable whose
556 addressable flag is set. */
557 bitmap escaped_n, escaped_p;
559 /* The positions in this bitmap have the same assignments as the in,
560 out, gen and kill bitmaps. This bitmap is all zeros except for
561 the positions that are occupied by stores for this group. */
562 bitmap group_kill;
564 /* The offset_map is used to map the offsets from this base into
565 positions in the global bitmaps. It is only created after all of
566 the all of stores have been scanned and we know which ones we
567 care about. */
568 int *offset_map_n, *offset_map_p;
569 int offset_map_size_n, offset_map_size_p;
571 typedef struct group_info *group_info_t;
572 typedef const struct group_info *const_group_info_t;
573 static alloc_pool rtx_group_info_pool;
575 /* Index into the rtx_group_vec. */
576 static int rtx_group_next_id;
579 static vec<group_info_t> rtx_group_vec;
582 /* This structure holds the set of changes that are being deferred
583 when removing read operation. See replace_read. */
584 struct deferred_change
587 /* The mem that is being replaced. */
588 rtx *loc;
590 /* The reg it is being replaced with. */
591 rtx reg;
593 struct deferred_change *next;
596 typedef struct deferred_change *deferred_change_t;
597 static alloc_pool deferred_change_pool;
599 static deferred_change_t deferred_change_list = NULL;
601 /* The group that holds all of the clear_alias_sets. */
602 static group_info_t clear_alias_group;
604 /* The modes of the clear_alias_sets. */
605 static htab_t clear_alias_mode_table;
607 /* Hash table element to look up the mode for an alias set. */
608 struct clear_alias_mode_holder
610 alias_set_type alias_set;
611 machine_mode mode;
614 /* This is true except if cfun->stdarg -- i.e. we cannot do
615 this for vararg functions because they play games with the frame. */
616 static bool stores_off_frame_dead_at_return;
618 /* Counter for stats. */
619 static int globally_deleted;
620 static int locally_deleted;
621 static int spill_deleted;
623 static bitmap all_blocks;
625 /* Locations that are killed by calls in the global phase. */
626 static bitmap kill_on_calls;
628 /* The number of bits used in the global bitmaps. */
629 static unsigned int current_position;
631 /*----------------------------------------------------------------------------
632 Zeroth step.
634 Initialization.
635 ----------------------------------------------------------------------------*/
638 /* Find the entry associated with ALIAS_SET. */
640 static struct clear_alias_mode_holder *
641 clear_alias_set_lookup (alias_set_type alias_set)
643 struct clear_alias_mode_holder tmp_holder;
644 void **slot;
646 tmp_holder.alias_set = alias_set;
647 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
648 gcc_assert (*slot);
650 return (struct clear_alias_mode_holder *) *slot;
654 /* Hashtable callbacks for maintaining the "bases" field of
655 store_group_info, given that the addresses are function invariants. */
657 struct invariant_group_base_hasher : typed_noop_remove <group_info>
659 typedef group_info value_type;
660 typedef group_info compare_type;
661 static inline hashval_t hash (const value_type *);
662 static inline bool equal (const value_type *, const compare_type *);
665 inline bool
666 invariant_group_base_hasher::equal (const value_type *gi1,
667 const compare_type *gi2)
669 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
672 inline hashval_t
673 invariant_group_base_hasher::hash (const value_type *gi)
675 int do_not_record;
676 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
679 /* Tables of group_info structures, hashed by base value. */
680 static hash_table<invariant_group_base_hasher> *rtx_group_table;
683 /* Get the GROUP for BASE. Add a new group if it is not there. */
685 static group_info_t
686 get_group_info (rtx base)
688 struct group_info tmp_gi;
689 group_info_t gi;
690 group_info **slot;
692 if (base)
694 /* Find the store_base_info structure for BASE, creating a new one
695 if necessary. */
696 tmp_gi.rtx_base = base;
697 slot = rtx_group_table->find_slot (&tmp_gi, INSERT);
698 gi = (group_info_t) *slot;
700 else
702 if (!clear_alias_group)
704 clear_alias_group = gi =
705 (group_info_t) pool_alloc (rtx_group_info_pool);
706 memset (gi, 0, sizeof (struct group_info));
707 gi->id = rtx_group_next_id++;
708 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
709 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
710 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
711 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
712 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
713 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
714 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
715 gi->process_globally = false;
716 gi->offset_map_size_n = 0;
717 gi->offset_map_size_p = 0;
718 gi->offset_map_n = NULL;
719 gi->offset_map_p = NULL;
720 rtx_group_vec.safe_push (gi);
722 return clear_alias_group;
725 if (gi == NULL)
727 *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
728 gi->rtx_base = base;
729 gi->id = rtx_group_next_id++;
730 gi->base_mem = gen_rtx_MEM (BLKmode, base);
731 gi->canon_base_addr = canon_rtx (base);
732 gi->store1_n = BITMAP_ALLOC (&dse_bitmap_obstack);
733 gi->store1_p = BITMAP_ALLOC (&dse_bitmap_obstack);
734 gi->store2_n = BITMAP_ALLOC (&dse_bitmap_obstack);
735 gi->store2_p = BITMAP_ALLOC (&dse_bitmap_obstack);
736 gi->escaped_p = BITMAP_ALLOC (&dse_bitmap_obstack);
737 gi->escaped_n = BITMAP_ALLOC (&dse_bitmap_obstack);
738 gi->group_kill = BITMAP_ALLOC (&dse_bitmap_obstack);
739 gi->process_globally = false;
740 gi->frame_related =
741 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
742 gi->offset_map_size_n = 0;
743 gi->offset_map_size_p = 0;
744 gi->offset_map_n = NULL;
745 gi->offset_map_p = NULL;
746 rtx_group_vec.safe_push (gi);
749 return gi;
753 /* Initialization of data structures. */
755 static void
756 dse_step0 (void)
758 locally_deleted = 0;
759 globally_deleted = 0;
760 spill_deleted = 0;
762 bitmap_obstack_initialize (&dse_bitmap_obstack);
763 gcc_obstack_init (&dse_obstack);
765 scratch = BITMAP_ALLOC (&reg_obstack);
766 kill_on_calls = BITMAP_ALLOC (&dse_bitmap_obstack);
768 rtx_store_info_pool
769 = create_alloc_pool ("rtx_store_info_pool",
770 sizeof (struct store_info), 100);
771 read_info_pool
772 = create_alloc_pool ("read_info_pool",
773 sizeof (struct read_info), 100);
774 insn_info_pool
775 = create_alloc_pool ("insn_info_pool",
776 sizeof (struct insn_info), 100);
777 bb_info_pool
778 = create_alloc_pool ("bb_info_pool",
779 sizeof (struct dse_bb_info), 100);
780 rtx_group_info_pool
781 = create_alloc_pool ("rtx_group_info_pool",
782 sizeof (struct group_info), 100);
783 deferred_change_pool
784 = create_alloc_pool ("deferred_change_pool",
785 sizeof (struct deferred_change), 10);
787 rtx_group_table = new hash_table<invariant_group_base_hasher> (11);
789 bb_table = XNEWVEC (bb_info_t, last_basic_block_for_fn (cfun));
790 rtx_group_next_id = 0;
792 stores_off_frame_dead_at_return = !cfun->stdarg;
794 init_alias_analysis ();
796 clear_alias_group = NULL;
801 /*----------------------------------------------------------------------------
802 First step.
804 Scan all of the insns. Any random ordering of the blocks is fine.
805 Each block is scanned in forward order to accommodate cselib which
806 is used to remove stores with non-constant bases.
807 ----------------------------------------------------------------------------*/
809 /* Delete all of the store_info recs from INSN_INFO. */
811 static void
812 free_store_info (insn_info_t insn_info)
814 store_info_t store_info = insn_info->store_rec;
815 while (store_info)
817 store_info_t next = store_info->next;
818 if (store_info->is_large)
819 BITMAP_FREE (store_info->positions_needed.large.bmap);
820 if (store_info->cse_base)
821 pool_free (cse_store_info_pool, store_info);
822 else
823 pool_free (rtx_store_info_pool, store_info);
824 store_info = next;
827 insn_info->cannot_delete = true;
828 insn_info->contains_cselib_groups = false;
829 insn_info->store_rec = NULL;
832 typedef struct
834 rtx_insn *first, *current;
835 regset fixed_regs_live;
836 bool failure;
837 } note_add_store_info;
839 /* Callback for emit_inc_dec_insn_before via note_stores.
840 Check if a register is clobbered which is live afterwards. */
842 static void
843 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
845 rtx_insn *insn;
846 note_add_store_info *info = (note_add_store_info *) data;
847 int r, n;
849 if (!REG_P (loc))
850 return;
852 /* If this register is referenced by the current or an earlier insn,
853 that's OK. E.g. this applies to the register that is being incremented
854 with this addition. */
855 for (insn = info->first;
856 insn != NEXT_INSN (info->current);
857 insn = NEXT_INSN (insn))
858 if (reg_referenced_p (loc, PATTERN (insn)))
859 return;
861 /* If we come here, we have a clobber of a register that's only OK
862 if that register is not live. If we don't have liveness information
863 available, fail now. */
864 if (!info->fixed_regs_live)
866 info->failure = true;
867 return;
869 /* Now check if this is a live fixed register. */
870 r = REGNO (loc);
871 n = hard_regno_nregs[r][GET_MODE (loc)];
872 while (--n >= 0)
873 if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
874 info->failure = true;
877 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
878 SRC + SRCOFF before insn ARG. */
880 static int
881 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
882 rtx op ATTRIBUTE_UNUSED,
883 rtx dest, rtx src, rtx srcoff, void *arg)
885 insn_info_t insn_info = (insn_info_t) arg;
886 rtx_insn *insn = insn_info->insn, *new_insn, *cur;
887 note_add_store_info info;
889 /* We can reuse all operands without copying, because we are about
890 to delete the insn that contained it. */
891 if (srcoff)
893 start_sequence ();
894 emit_insn (gen_add3_insn (dest, src, srcoff));
895 new_insn = get_insns ();
896 end_sequence ();
898 else
899 new_insn = as_a <rtx_insn *> (gen_move_insn (dest, src));
900 info.first = new_insn;
901 info.fixed_regs_live = insn_info->fixed_regs_live;
902 info.failure = false;
903 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
905 info.current = cur;
906 note_stores (PATTERN (cur), note_add_store, &info);
909 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
910 return it immediately, communicating the failure to its caller. */
911 if (info.failure)
912 return 1;
914 emit_insn_before (new_insn, insn);
916 return 0;
919 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
920 is there, is split into a separate insn.
921 Return true on success (or if there was nothing to do), false on failure. */
923 static bool
924 check_for_inc_dec_1 (insn_info_t insn_info)
926 rtx_insn *insn = insn_info->insn;
927 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
928 if (note)
929 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
930 insn_info) == 0;
931 return true;
935 /* Entry point for postreload. If you work on reload_cse, or you need this
936 anywhere else, consider if you can provide register liveness information
937 and add a parameter to this function so that it can be passed down in
938 insn_info.fixed_regs_live. */
939 bool
940 check_for_inc_dec (rtx_insn *insn)
942 struct insn_info insn_info;
943 rtx note;
945 insn_info.insn = insn;
946 insn_info.fixed_regs_live = NULL;
947 note = find_reg_note (insn, REG_INC, NULL_RTX);
948 if (note)
949 return for_each_inc_dec (PATTERN (insn), emit_inc_dec_insn_before,
950 &insn_info) == 0;
951 return true;
954 /* Delete the insn and free all of the fields inside INSN_INFO. */
956 static void
957 delete_dead_store_insn (insn_info_t insn_info)
959 read_info_t read_info;
961 if (!dbg_cnt (dse))
962 return;
964 if (!check_for_inc_dec_1 (insn_info))
965 return;
966 if (dump_file && (dump_flags & TDF_DETAILS))
968 fprintf (dump_file, "Locally deleting insn %d ",
969 INSN_UID (insn_info->insn));
970 if (insn_info->store_rec->alias_set)
971 fprintf (dump_file, "alias set %d\n",
972 (int) insn_info->store_rec->alias_set);
973 else
974 fprintf (dump_file, "\n");
977 free_store_info (insn_info);
978 read_info = insn_info->read_rec;
980 while (read_info)
982 read_info_t next = read_info->next;
983 pool_free (read_info_pool, read_info);
984 read_info = next;
986 insn_info->read_rec = NULL;
988 delete_insn (insn_info->insn);
989 locally_deleted++;
990 insn_info->insn = NULL;
992 insn_info->wild_read = false;
995 /* Return whether DECL, a local variable, can possibly escape the current
996 function scope. */
998 static bool
999 local_variable_can_escape (tree decl)
1001 if (TREE_ADDRESSABLE (decl))
1002 return true;
1004 /* If this is a partitioned variable, we need to consider all the variables
1005 in the partition. This is necessary because a store into one of them can
1006 be replaced with a store into another and this may not change the outcome
1007 of the escape analysis. */
1008 if (cfun->gimple_df->decls_to_pointers != NULL)
1010 tree *namep = cfun->gimple_df->decls_to_pointers->get (decl);
1011 if (namep)
1012 return TREE_ADDRESSABLE (*namep);
1015 return false;
1018 /* Return whether EXPR can possibly escape the current function scope. */
1020 static bool
1021 can_escape (tree expr)
1023 tree base;
1024 if (!expr)
1025 return true;
1026 base = get_base_address (expr);
1027 if (DECL_P (base)
1028 && !may_be_aliased (base)
1029 && !(TREE_CODE (base) == VAR_DECL
1030 && !DECL_EXTERNAL (base)
1031 && !TREE_STATIC (base)
1032 && local_variable_can_escape (base)))
1033 return false;
1034 return true;
1037 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1038 OFFSET and WIDTH. */
1040 static void
1041 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1042 tree expr)
1044 HOST_WIDE_INT i;
1045 bool expr_escapes = can_escape (expr);
1046 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1047 for (i=offset; i<offset+width; i++)
1049 bitmap store1;
1050 bitmap store2;
1051 bitmap escaped;
1052 int ai;
1053 if (i < 0)
1055 store1 = group->store1_n;
1056 store2 = group->store2_n;
1057 escaped = group->escaped_n;
1058 ai = -i;
1060 else
1062 store1 = group->store1_p;
1063 store2 = group->store2_p;
1064 escaped = group->escaped_p;
1065 ai = i;
1068 if (!bitmap_set_bit (store1, ai))
1069 bitmap_set_bit (store2, ai);
1070 else
1072 if (i < 0)
1074 if (group->offset_map_size_n < ai)
1075 group->offset_map_size_n = ai;
1077 else
1079 if (group->offset_map_size_p < ai)
1080 group->offset_map_size_p = ai;
1083 if (expr_escapes)
1084 bitmap_set_bit (escaped, ai);
1088 static void
1089 reset_active_stores (void)
1091 active_local_stores = NULL;
1092 active_local_stores_len = 0;
1095 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1097 static void
1098 free_read_records (bb_info_t bb_info)
1100 insn_info_t insn_info = bb_info->last_insn;
1101 read_info_t *ptr = &insn_info->read_rec;
1102 while (*ptr)
1104 read_info_t next = (*ptr)->next;
1105 if ((*ptr)->alias_set == 0)
1107 pool_free (read_info_pool, *ptr);
1108 *ptr = next;
1110 else
1111 ptr = &(*ptr)->next;
1115 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1117 static void
1118 add_wild_read (bb_info_t bb_info)
1120 insn_info_t insn_info = bb_info->last_insn;
1121 insn_info->wild_read = true;
1122 free_read_records (bb_info);
1123 reset_active_stores ();
1126 /* Set the BB_INFO so that the last insn is marked as a wild read of
1127 non-frame locations. */
1129 static void
1130 add_non_frame_wild_read (bb_info_t bb_info)
1132 insn_info_t insn_info = bb_info->last_insn;
1133 insn_info->non_frame_wild_read = true;
1134 free_read_records (bb_info);
1135 reset_active_stores ();
1138 /* Return true if X is a constant or one of the registers that behave
1139 as a constant over the life of a function. This is equivalent to
1140 !rtx_varies_p for memory addresses. */
1142 static bool
1143 const_or_frame_p (rtx x)
1145 if (CONSTANT_P (x))
1146 return true;
1148 if (GET_CODE (x) == REG)
1150 /* Note that we have to test for the actual rtx used for the frame
1151 and arg pointers and not just the register number in case we have
1152 eliminated the frame and/or arg pointer and are using it
1153 for pseudos. */
1154 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1155 /* The arg pointer varies if it is not a fixed register. */
1156 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1157 || x == pic_offset_table_rtx)
1158 return true;
1159 return false;
1162 return false;
1165 /* Take all reasonable action to put the address of MEM into the form
1166 that we can do analysis on.
1168 The gold standard is to get the address into the form: address +
1169 OFFSET where address is something that rtx_varies_p considers a
1170 constant. When we can get the address in this form, we can do
1171 global analysis on it. Note that for constant bases, address is
1172 not actually returned, only the group_id. The address can be
1173 obtained from that.
1175 If that fails, we try cselib to get a value we can at least use
1176 locally. If that fails we return false.
1178 The GROUP_ID is set to -1 for cselib bases and the index of the
1179 group for non_varying bases.
1181 FOR_READ is true if this is a mem read and false if not. */
1183 static bool
1184 canon_address (rtx mem,
1185 alias_set_type *alias_set_out,
1186 int *group_id,
1187 HOST_WIDE_INT *offset,
1188 cselib_val **base)
1190 machine_mode address_mode = get_address_mode (mem);
1191 rtx mem_address = XEXP (mem, 0);
1192 rtx expanded_address, address;
1193 int expanded;
1195 *alias_set_out = 0;
1197 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1199 if (dump_file && (dump_flags & TDF_DETAILS))
1201 fprintf (dump_file, " mem: ");
1202 print_inline_rtx (dump_file, mem_address, 0);
1203 fprintf (dump_file, "\n");
1206 /* First see if just canon_rtx (mem_address) is const or frame,
1207 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1208 address = NULL_RTX;
1209 for (expanded = 0; expanded < 2; expanded++)
1211 if (expanded)
1213 /* Use cselib to replace all of the reg references with the full
1214 expression. This will take care of the case where we have
1216 r_x = base + offset;
1217 val = *r_x;
1219 by making it into
1221 val = *(base + offset); */
1223 expanded_address = cselib_expand_value_rtx (mem_address,
1224 scratch, 5);
1226 /* If this fails, just go with the address from first
1227 iteration. */
1228 if (!expanded_address)
1229 break;
1231 else
1232 expanded_address = mem_address;
1234 /* Split the address into canonical BASE + OFFSET terms. */
1235 address = canon_rtx (expanded_address);
1237 *offset = 0;
1239 if (dump_file && (dump_flags & TDF_DETAILS))
1241 if (expanded)
1243 fprintf (dump_file, "\n after cselib_expand address: ");
1244 print_inline_rtx (dump_file, expanded_address, 0);
1245 fprintf (dump_file, "\n");
1248 fprintf (dump_file, "\n after canon_rtx address: ");
1249 print_inline_rtx (dump_file, address, 0);
1250 fprintf (dump_file, "\n");
1253 if (GET_CODE (address) == CONST)
1254 address = XEXP (address, 0);
1256 if (GET_CODE (address) == PLUS
1257 && CONST_INT_P (XEXP (address, 1)))
1259 *offset = INTVAL (XEXP (address, 1));
1260 address = XEXP (address, 0);
1263 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1264 && const_or_frame_p (address))
1266 group_info_t group = get_group_info (address);
1268 if (dump_file && (dump_flags & TDF_DETAILS))
1269 fprintf (dump_file, " gid=%d offset=%d \n",
1270 group->id, (int)*offset);
1271 *base = NULL;
1272 *group_id = group->id;
1273 return true;
1277 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1278 *group_id = -1;
1280 if (*base == NULL)
1282 if (dump_file && (dump_flags & TDF_DETAILS))
1283 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1284 return false;
1286 if (dump_file && (dump_flags & TDF_DETAILS))
1287 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1288 (*base)->uid, (*base)->hash, (int)*offset);
1289 return true;
1293 /* Clear the rhs field from the active_local_stores array. */
1295 static void
1296 clear_rhs_from_active_local_stores (void)
1298 insn_info_t ptr = active_local_stores;
1300 while (ptr)
1302 store_info_t store_info = ptr->store_rec;
1303 /* Skip the clobbers. */
1304 while (!store_info->is_set)
1305 store_info = store_info->next;
1307 store_info->rhs = NULL;
1308 store_info->const_rhs = NULL;
1310 ptr = ptr->next_local_store;
1315 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1317 static inline void
1318 set_position_unneeded (store_info_t s_info, int pos)
1320 if (__builtin_expect (s_info->is_large, false))
1322 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1323 s_info->positions_needed.large.count++;
1325 else
1326 s_info->positions_needed.small_bitmask
1327 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1330 /* Mark the whole store S_INFO as unneeded. */
1332 static inline void
1333 set_all_positions_unneeded (store_info_t s_info)
1335 if (__builtin_expect (s_info->is_large, false))
1337 int pos, end = s_info->end - s_info->begin;
1338 for (pos = 0; pos < end; pos++)
1339 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1340 s_info->positions_needed.large.count = end;
1342 else
1343 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1346 /* Return TRUE if any bytes from S_INFO store are needed. */
1348 static inline bool
1349 any_positions_needed_p (store_info_t s_info)
1351 if (__builtin_expect (s_info->is_large, false))
1352 return (s_info->positions_needed.large.count
1353 < s_info->end - s_info->begin);
1354 else
1355 return (s_info->positions_needed.small_bitmask
1356 != (unsigned HOST_WIDE_INT) 0);
1359 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1360 store are needed. */
1362 static inline bool
1363 all_positions_needed_p (store_info_t s_info, int start, int width)
1365 if (__builtin_expect (s_info->is_large, false))
1367 int end = start + width;
1368 while (start < end)
1369 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1370 return false;
1371 return true;
1373 else
1375 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1376 return (s_info->positions_needed.small_bitmask & mask) == mask;
1381 static rtx get_stored_val (store_info_t, machine_mode, HOST_WIDE_INT,
1382 HOST_WIDE_INT, basic_block, bool);
1385 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1386 there is a candidate store, after adding it to the appropriate
1387 local store group if so. */
1389 static int
1390 record_store (rtx body, bb_info_t bb_info)
1392 rtx mem, rhs, const_rhs, mem_addr;
1393 HOST_WIDE_INT offset = 0;
1394 HOST_WIDE_INT width = 0;
1395 alias_set_type spill_alias_set;
1396 insn_info_t insn_info = bb_info->last_insn;
1397 store_info_t store_info = NULL;
1398 int group_id;
1399 cselib_val *base = NULL;
1400 insn_info_t ptr, last, redundant_reason;
1401 bool store_is_unused;
1403 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1404 return 0;
1406 mem = SET_DEST (body);
1408 /* If this is not used, then this cannot be used to keep the insn
1409 from being deleted. On the other hand, it does provide something
1410 that can be used to prove that another store is dead. */
1411 store_is_unused
1412 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1414 /* Check whether that value is a suitable memory location. */
1415 if (!MEM_P (mem))
1417 /* If the set or clobber is unused, then it does not effect our
1418 ability to get rid of the entire insn. */
1419 if (!store_is_unused)
1420 insn_info->cannot_delete = true;
1421 return 0;
1424 /* At this point we know mem is a mem. */
1425 if (GET_MODE (mem) == BLKmode)
1427 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1429 if (dump_file && (dump_flags & TDF_DETAILS))
1430 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1431 add_wild_read (bb_info);
1432 insn_info->cannot_delete = true;
1433 return 0;
1435 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1436 as memset (addr, 0, 36); */
1437 else if (!MEM_SIZE_KNOWN_P (mem)
1438 || MEM_SIZE (mem) <= 0
1439 || MEM_SIZE (mem) > MAX_OFFSET
1440 || GET_CODE (body) != SET
1441 || !CONST_INT_P (SET_SRC (body)))
1443 if (!store_is_unused)
1445 /* If the set or clobber is unused, then it does not effect our
1446 ability to get rid of the entire insn. */
1447 insn_info->cannot_delete = true;
1448 clear_rhs_from_active_local_stores ();
1450 return 0;
1454 /* We can still process a volatile mem, we just cannot delete it. */
1455 if (MEM_VOLATILE_P (mem))
1456 insn_info->cannot_delete = true;
1458 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1460 clear_rhs_from_active_local_stores ();
1461 return 0;
1464 if (GET_MODE (mem) == BLKmode)
1465 width = MEM_SIZE (mem);
1466 else
1467 width = GET_MODE_SIZE (GET_MODE (mem));
1469 if (spill_alias_set)
1471 bitmap store1 = clear_alias_group->store1_p;
1472 bitmap store2 = clear_alias_group->store2_p;
1474 gcc_assert (GET_MODE (mem) != BLKmode);
1476 if (!bitmap_set_bit (store1, spill_alias_set))
1477 bitmap_set_bit (store2, spill_alias_set);
1479 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1480 clear_alias_group->offset_map_size_p = spill_alias_set;
1482 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1484 if (dump_file && (dump_flags & TDF_DETAILS))
1485 fprintf (dump_file, " processing spill store %d(%s)\n",
1486 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1488 else if (group_id >= 0)
1490 /* In the restrictive case where the base is a constant or the
1491 frame pointer we can do global analysis. */
1493 group_info_t group
1494 = rtx_group_vec[group_id];
1495 tree expr = MEM_EXPR (mem);
1497 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1498 set_usage_bits (group, offset, width, expr);
1500 if (dump_file && (dump_flags & TDF_DETAILS))
1501 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1502 group_id, (int)offset, (int)(offset+width));
1504 else
1506 if (may_be_sp_based_p (XEXP (mem, 0)))
1507 insn_info->stack_pointer_based = true;
1508 insn_info->contains_cselib_groups = true;
1510 store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1511 group_id = -1;
1513 if (dump_file && (dump_flags & TDF_DETAILS))
1514 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1515 (int)offset, (int)(offset+width));
1518 const_rhs = rhs = NULL_RTX;
1519 if (GET_CODE (body) == SET
1520 /* No place to keep the value after ra. */
1521 && !reload_completed
1522 && (REG_P (SET_SRC (body))
1523 || GET_CODE (SET_SRC (body)) == SUBREG
1524 || CONSTANT_P (SET_SRC (body)))
1525 && !MEM_VOLATILE_P (mem)
1526 /* Sometimes the store and reload is used for truncation and
1527 rounding. */
1528 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1530 rhs = SET_SRC (body);
1531 if (CONSTANT_P (rhs))
1532 const_rhs = rhs;
1533 else if (body == PATTERN (insn_info->insn))
1535 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1536 if (tem && CONSTANT_P (XEXP (tem, 0)))
1537 const_rhs = XEXP (tem, 0);
1539 if (const_rhs == NULL_RTX && REG_P (rhs))
1541 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1543 if (tem && CONSTANT_P (tem))
1544 const_rhs = tem;
1548 /* Check to see if this stores causes some other stores to be
1549 dead. */
1550 ptr = active_local_stores;
1551 last = NULL;
1552 redundant_reason = NULL;
1553 mem = canon_rtx (mem);
1554 /* For alias_set != 0 canon_true_dependence should be never called. */
1555 if (spill_alias_set)
1556 mem_addr = NULL_RTX;
1557 else
1559 if (group_id < 0)
1560 mem_addr = base->val_rtx;
1561 else
1563 group_info_t group
1564 = rtx_group_vec[group_id];
1565 mem_addr = group->canon_base_addr;
1567 if (offset)
1568 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
1571 while (ptr)
1573 insn_info_t next = ptr->next_local_store;
1574 store_info_t s_info = ptr->store_rec;
1575 bool del = true;
1577 /* Skip the clobbers. We delete the active insn if this insn
1578 shadows the set. To have been put on the active list, it
1579 has exactly on set. */
1580 while (!s_info->is_set)
1581 s_info = s_info->next;
1583 if (s_info->alias_set != spill_alias_set)
1584 del = false;
1585 else if (s_info->alias_set)
1587 struct clear_alias_mode_holder *entry
1588 = clear_alias_set_lookup (s_info->alias_set);
1589 /* Generally, spills cannot be processed if and of the
1590 references to the slot have a different mode. But if
1591 we are in the same block and mode is exactly the same
1592 between this store and one before in the same block,
1593 we can still delete it. */
1594 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1595 && (GET_MODE (mem) == entry->mode))
1597 del = true;
1598 set_all_positions_unneeded (s_info);
1600 if (dump_file && (dump_flags & TDF_DETAILS))
1601 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1602 INSN_UID (ptr->insn), (int) s_info->alias_set);
1604 else if ((s_info->group_id == group_id)
1605 && (s_info->cse_base == base))
1607 HOST_WIDE_INT i;
1608 if (dump_file && (dump_flags & TDF_DETAILS))
1609 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1610 INSN_UID (ptr->insn), s_info->group_id,
1611 (int)s_info->begin, (int)s_info->end);
1613 /* Even if PTR won't be eliminated as unneeded, if both
1614 PTR and this insn store the same constant value, we might
1615 eliminate this insn instead. */
1616 if (s_info->const_rhs
1617 && const_rhs
1618 && offset >= s_info->begin
1619 && offset + width <= s_info->end
1620 && all_positions_needed_p (s_info, offset - s_info->begin,
1621 width))
1623 if (GET_MODE (mem) == BLKmode)
1625 if (GET_MODE (s_info->mem) == BLKmode
1626 && s_info->const_rhs == const_rhs)
1627 redundant_reason = ptr;
1629 else if (s_info->const_rhs == const0_rtx
1630 && const_rhs == const0_rtx)
1631 redundant_reason = ptr;
1632 else
1634 rtx val;
1635 start_sequence ();
1636 val = get_stored_val (s_info, GET_MODE (mem),
1637 offset, offset + width,
1638 BLOCK_FOR_INSN (insn_info->insn),
1639 true);
1640 if (get_insns () != NULL)
1641 val = NULL_RTX;
1642 end_sequence ();
1643 if (val && rtx_equal_p (val, const_rhs))
1644 redundant_reason = ptr;
1648 for (i = MAX (offset, s_info->begin);
1649 i < offset + width && i < s_info->end;
1650 i++)
1651 set_position_unneeded (s_info, i - s_info->begin);
1653 else if (s_info->rhs)
1654 /* Need to see if it is possible for this store to overwrite
1655 the value of store_info. If it is, set the rhs to NULL to
1656 keep it from being used to remove a load. */
1658 if (canon_true_dependence (s_info->mem,
1659 GET_MODE (s_info->mem),
1660 s_info->mem_addr,
1661 mem, mem_addr))
1663 s_info->rhs = NULL;
1664 s_info->const_rhs = NULL;
1668 /* An insn can be deleted if every position of every one of
1669 its s_infos is zero. */
1670 if (any_positions_needed_p (s_info))
1671 del = false;
1673 if (del)
1675 insn_info_t insn_to_delete = ptr;
1677 active_local_stores_len--;
1678 if (last)
1679 last->next_local_store = ptr->next_local_store;
1680 else
1681 active_local_stores = ptr->next_local_store;
1683 if (!insn_to_delete->cannot_delete)
1684 delete_dead_store_insn (insn_to_delete);
1686 else
1687 last = ptr;
1689 ptr = next;
1692 /* Finish filling in the store_info. */
1693 store_info->next = insn_info->store_rec;
1694 insn_info->store_rec = store_info;
1695 store_info->mem = mem;
1696 store_info->alias_set = spill_alias_set;
1697 store_info->mem_addr = mem_addr;
1698 store_info->cse_base = base;
1699 if (width > HOST_BITS_PER_WIDE_INT)
1701 store_info->is_large = true;
1702 store_info->positions_needed.large.count = 0;
1703 store_info->positions_needed.large.bmap = BITMAP_ALLOC (&dse_bitmap_obstack);
1705 else
1707 store_info->is_large = false;
1708 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1710 store_info->group_id = group_id;
1711 store_info->begin = offset;
1712 store_info->end = offset + width;
1713 store_info->is_set = GET_CODE (body) == SET;
1714 store_info->rhs = rhs;
1715 store_info->const_rhs = const_rhs;
1716 store_info->redundant_reason = redundant_reason;
1718 /* If this is a clobber, we return 0. We will only be able to
1719 delete this insn if there is only one store USED store, but we
1720 can use the clobber to delete other stores earlier. */
1721 return store_info->is_set ? 1 : 0;
1725 static void
1726 dump_insn_info (const char * start, insn_info_t insn_info)
1728 fprintf (dump_file, "%s insn=%d %s\n", start,
1729 INSN_UID (insn_info->insn),
1730 insn_info->store_rec ? "has store" : "naked");
1734 /* If the modes are different and the value's source and target do not
1735 line up, we need to extract the value from lower part of the rhs of
1736 the store, shift it, and then put it into a form that can be shoved
1737 into the read_insn. This function generates a right SHIFT of a
1738 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1739 shift sequence is returned or NULL if we failed to find a
1740 shift. */
1742 static rtx
1743 find_shift_sequence (int access_size,
1744 store_info_t store_info,
1745 machine_mode read_mode,
1746 int shift, bool speed, bool require_cst)
1748 machine_mode store_mode = GET_MODE (store_info->mem);
1749 machine_mode new_mode;
1750 rtx read_reg = NULL;
1752 /* Some machines like the x86 have shift insns for each size of
1753 operand. Other machines like the ppc or the ia-64 may only have
1754 shift insns that shift values within 32 or 64 bit registers.
1755 This loop tries to find the smallest shift insn that will right
1756 justify the value we want to read but is available in one insn on
1757 the machine. */
1759 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1760 MODE_INT);
1761 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1762 new_mode = GET_MODE_WIDER_MODE (new_mode))
1764 rtx target, new_reg, new_lhs;
1765 rtx_insn *shift_seq, *insn;
1766 int cost;
1768 /* If a constant was stored into memory, try to simplify it here,
1769 otherwise the cost of the shift might preclude this optimization
1770 e.g. at -Os, even when no actual shift will be needed. */
1771 if (store_info->const_rhs)
1773 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1774 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1775 store_mode, byte);
1776 if (ret && CONSTANT_P (ret))
1778 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1779 ret, GEN_INT (shift));
1780 if (ret && CONSTANT_P (ret))
1782 byte = subreg_lowpart_offset (read_mode, new_mode);
1783 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1784 if (ret && CONSTANT_P (ret)
1785 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1786 return ret;
1791 if (require_cst)
1792 return NULL_RTX;
1794 /* Try a wider mode if truncating the store mode to NEW_MODE
1795 requires a real instruction. */
1796 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1797 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1798 continue;
1800 /* Also try a wider mode if the necessary punning is either not
1801 desirable or not possible. */
1802 if (!CONSTANT_P (store_info->rhs)
1803 && !MODES_TIEABLE_P (new_mode, store_mode))
1804 continue;
1806 new_reg = gen_reg_rtx (new_mode);
1808 start_sequence ();
1810 /* In theory we could also check for an ashr. Ian Taylor knows
1811 of one dsp where the cost of these two was not the same. But
1812 this really is a rare case anyway. */
1813 target = expand_binop (new_mode, lshr_optab, new_reg,
1814 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1816 shift_seq = get_insns ();
1817 end_sequence ();
1819 if (target != new_reg || shift_seq == NULL)
1820 continue;
1822 cost = 0;
1823 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1824 if (INSN_P (insn))
1825 cost += insn_rtx_cost (PATTERN (insn), speed);
1827 /* The computation up to here is essentially independent
1828 of the arguments and could be precomputed. It may
1829 not be worth doing so. We could precompute if
1830 worthwhile or at least cache the results. The result
1831 technically depends on both SHIFT and ACCESS_SIZE,
1832 but in practice the answer will depend only on ACCESS_SIZE. */
1834 if (cost > COSTS_N_INSNS (1))
1835 continue;
1837 new_lhs = extract_low_bits (new_mode, store_mode,
1838 copy_rtx (store_info->rhs));
1839 if (new_lhs == NULL_RTX)
1840 continue;
1842 /* We found an acceptable shift. Generate a move to
1843 take the value from the store and put it into the
1844 shift pseudo, then shift it, then generate another
1845 move to put in into the target of the read. */
1846 emit_move_insn (new_reg, new_lhs);
1847 emit_insn (shift_seq);
1848 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1849 break;
1852 return read_reg;
1856 /* Call back for note_stores to find the hard regs set or clobbered by
1857 insn. Data is a bitmap of the hardregs set so far. */
1859 static void
1860 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1862 bitmap regs_set = (bitmap) data;
1864 if (REG_P (x)
1865 && HARD_REGISTER_P (x))
1867 unsigned int regno = REGNO (x);
1868 bitmap_set_range (regs_set, regno,
1869 hard_regno_nregs[regno][GET_MODE (x)]);
1873 /* Helper function for replace_read and record_store.
1874 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1875 to one before READ_END bytes read in READ_MODE. Return NULL
1876 if not successful. If REQUIRE_CST is true, return always constant. */
1878 static rtx
1879 get_stored_val (store_info_t store_info, machine_mode read_mode,
1880 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1881 basic_block bb, bool require_cst)
1883 machine_mode store_mode = GET_MODE (store_info->mem);
1884 int shift;
1885 int access_size; /* In bytes. */
1886 rtx read_reg;
1888 /* To get here the read is within the boundaries of the write so
1889 shift will never be negative. Start out with the shift being in
1890 bytes. */
1891 if (store_mode == BLKmode)
1892 shift = 0;
1893 else if (BYTES_BIG_ENDIAN)
1894 shift = store_info->end - read_end;
1895 else
1896 shift = read_begin - store_info->begin;
1898 access_size = shift + GET_MODE_SIZE (read_mode);
1900 /* From now on it is bits. */
1901 shift *= BITS_PER_UNIT;
1903 if (shift)
1904 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1905 optimize_bb_for_speed_p (bb),
1906 require_cst);
1907 else if (store_mode == BLKmode)
1909 /* The store is a memset (addr, const_val, const_size). */
1910 gcc_assert (CONST_INT_P (store_info->rhs));
1911 store_mode = int_mode_for_mode (read_mode);
1912 if (store_mode == BLKmode)
1913 read_reg = NULL_RTX;
1914 else if (store_info->rhs == const0_rtx)
1915 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1916 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1917 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1918 read_reg = NULL_RTX;
1919 else
1921 unsigned HOST_WIDE_INT c
1922 = INTVAL (store_info->rhs)
1923 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1924 int shift = BITS_PER_UNIT;
1925 while (shift < HOST_BITS_PER_WIDE_INT)
1927 c |= (c << shift);
1928 shift <<= 1;
1930 read_reg = gen_int_mode (c, store_mode);
1931 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1934 else if (store_info->const_rhs
1935 && (require_cst
1936 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1937 read_reg = extract_low_bits (read_mode, store_mode,
1938 copy_rtx (store_info->const_rhs));
1939 else
1940 read_reg = extract_low_bits (read_mode, store_mode,
1941 copy_rtx (store_info->rhs));
1942 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1943 read_reg = NULL_RTX;
1944 return read_reg;
1947 /* Take a sequence of:
1948 A <- r1
1950 ... <- A
1952 and change it into
1953 r2 <- r1
1954 A <- r1
1956 ... <- r2
1960 r3 <- extract (r1)
1961 r3 <- r3 >> shift
1962 r2 <- extract (r3)
1963 ... <- r2
1967 r2 <- extract (r1)
1968 ... <- r2
1970 Depending on the alignment and the mode of the store and
1971 subsequent load.
1974 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1975 and READ_INSN are for the read. Return true if the replacement
1976 went ok. */
1978 static bool
1979 replace_read (store_info_t store_info, insn_info_t store_insn,
1980 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1981 bitmap regs_live)
1983 machine_mode store_mode = GET_MODE (store_info->mem);
1984 machine_mode read_mode = GET_MODE (read_info->mem);
1985 rtx_insn *insns, *this_insn;
1986 rtx read_reg;
1987 basic_block bb;
1989 if (!dbg_cnt (dse))
1990 return false;
1992 /* Create a sequence of instructions to set up the read register.
1993 This sequence goes immediately before the store and its result
1994 is read by the load.
1996 We need to keep this in perspective. We are replacing a read
1997 with a sequence of insns, but the read will almost certainly be
1998 in cache, so it is not going to be an expensive one. Thus, we
1999 are not willing to do a multi insn shift or worse a subroutine
2000 call to get rid of the read. */
2001 if (dump_file && (dump_flags & TDF_DETAILS))
2002 fprintf (dump_file, "trying to replace %smode load in insn %d"
2003 " from %smode store in insn %d\n",
2004 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2005 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2006 start_sequence ();
2007 bb = BLOCK_FOR_INSN (read_insn->insn);
2008 read_reg = get_stored_val (store_info,
2009 read_mode, read_info->begin, read_info->end,
2010 bb, false);
2011 if (read_reg == NULL_RTX)
2013 end_sequence ();
2014 if (dump_file && (dump_flags & TDF_DETAILS))
2015 fprintf (dump_file, " -- could not extract bits of stored value\n");
2016 return false;
2018 /* Force the value into a new register so that it won't be clobbered
2019 between the store and the load. */
2020 read_reg = copy_to_mode_reg (read_mode, read_reg);
2021 insns = get_insns ();
2022 end_sequence ();
2024 if (insns != NULL_RTX)
2026 /* Now we have to scan the set of new instructions to see if the
2027 sequence contains and sets of hardregs that happened to be
2028 live at this point. For instance, this can happen if one of
2029 the insns sets the CC and the CC happened to be live at that
2030 point. This does occasionally happen, see PR 37922. */
2031 bitmap regs_set = BITMAP_ALLOC (&reg_obstack);
2033 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2034 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2036 bitmap_and_into (regs_set, regs_live);
2037 if (!bitmap_empty_p (regs_set))
2039 if (dump_file && (dump_flags & TDF_DETAILS))
2041 fprintf (dump_file,
2042 "abandoning replacement because sequence clobbers live hardregs:");
2043 df_print_regset (dump_file, regs_set);
2046 BITMAP_FREE (regs_set);
2047 return false;
2049 BITMAP_FREE (regs_set);
2052 if (validate_change (read_insn->insn, loc, read_reg, 0))
2054 deferred_change_t deferred_change =
2055 (deferred_change_t) pool_alloc (deferred_change_pool);
2057 /* Insert this right before the store insn where it will be safe
2058 from later insns that might change it before the read. */
2059 emit_insn_before (insns, store_insn->insn);
2061 /* And now for the kludge part: cselib croaks if you just
2062 return at this point. There are two reasons for this:
2064 1) Cselib has an idea of how many pseudos there are and
2065 that does not include the new ones we just added.
2067 2) Cselib does not know about the move insn we added
2068 above the store_info, and there is no way to tell it
2069 about it, because it has "moved on".
2071 Problem (1) is fixable with a certain amount of engineering.
2072 Problem (2) is requires starting the bb from scratch. This
2073 could be expensive.
2075 So we are just going to have to lie. The move/extraction
2076 insns are not really an issue, cselib did not see them. But
2077 the use of the new pseudo read_insn is a real problem because
2078 cselib has not scanned this insn. The way that we solve this
2079 problem is that we are just going to put the mem back for now
2080 and when we are finished with the block, we undo this. We
2081 keep a table of mems to get rid of. At the end of the basic
2082 block we can put them back. */
2084 *loc = read_info->mem;
2085 deferred_change->next = deferred_change_list;
2086 deferred_change_list = deferred_change;
2087 deferred_change->loc = loc;
2088 deferred_change->reg = read_reg;
2090 /* Get rid of the read_info, from the point of view of the
2091 rest of dse, play like this read never happened. */
2092 read_insn->read_rec = read_info->next;
2093 pool_free (read_info_pool, read_info);
2094 if (dump_file && (dump_flags & TDF_DETAILS))
2096 fprintf (dump_file, " -- replaced the loaded MEM with ");
2097 print_simple_rtl (dump_file, read_reg);
2098 fprintf (dump_file, "\n");
2100 return true;
2102 else
2104 if (dump_file && (dump_flags & TDF_DETAILS))
2106 fprintf (dump_file, " -- replacing the loaded MEM with ");
2107 print_simple_rtl (dump_file, read_reg);
2108 fprintf (dump_file, " led to an invalid instruction\n");
2110 return false;
2114 /* Check the address of MEM *LOC and kill any appropriate stores that may
2115 be active. */
2117 static void
2118 check_mem_read_rtx (rtx *loc, bb_info_t bb_info)
2120 rtx mem = *loc, mem_addr;
2121 insn_info_t insn_info;
2122 HOST_WIDE_INT offset = 0;
2123 HOST_WIDE_INT width = 0;
2124 alias_set_type spill_alias_set = 0;
2125 cselib_val *base = NULL;
2126 int group_id;
2127 read_info_t read_info;
2129 insn_info = bb_info->last_insn;
2131 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2132 || (MEM_VOLATILE_P (mem)))
2134 if (dump_file && (dump_flags & TDF_DETAILS))
2135 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2136 add_wild_read (bb_info);
2137 insn_info->cannot_delete = true;
2138 return;
2141 /* If it is reading readonly mem, then there can be no conflict with
2142 another write. */
2143 if (MEM_READONLY_P (mem))
2144 return;
2146 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2148 if (dump_file && (dump_flags & TDF_DETAILS))
2149 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2150 add_wild_read (bb_info);
2151 return;
2154 if (GET_MODE (mem) == BLKmode)
2155 width = -1;
2156 else
2157 width = GET_MODE_SIZE (GET_MODE (mem));
2159 read_info = (read_info_t) pool_alloc (read_info_pool);
2160 read_info->group_id = group_id;
2161 read_info->mem = mem;
2162 read_info->alias_set = spill_alias_set;
2163 read_info->begin = offset;
2164 read_info->end = offset + width;
2165 read_info->next = insn_info->read_rec;
2166 insn_info->read_rec = read_info;
2167 /* For alias_set != 0 canon_true_dependence should be never called. */
2168 if (spill_alias_set)
2169 mem_addr = NULL_RTX;
2170 else
2172 if (group_id < 0)
2173 mem_addr = base->val_rtx;
2174 else
2176 group_info_t group
2177 = rtx_group_vec[group_id];
2178 mem_addr = group->canon_base_addr;
2180 if (offset)
2181 mem_addr = plus_constant (get_address_mode (mem), mem_addr, offset);
2184 /* We ignore the clobbers in store_info. The is mildly aggressive,
2185 but there really should not be a clobber followed by a read. */
2187 if (spill_alias_set)
2189 insn_info_t i_ptr = active_local_stores;
2190 insn_info_t last = NULL;
2192 if (dump_file && (dump_flags & TDF_DETAILS))
2193 fprintf (dump_file, " processing spill load %d\n",
2194 (int) spill_alias_set);
2196 while (i_ptr)
2198 store_info_t store_info = i_ptr->store_rec;
2200 /* Skip the clobbers. */
2201 while (!store_info->is_set)
2202 store_info = store_info->next;
2204 if (store_info->alias_set == spill_alias_set)
2206 if (dump_file && (dump_flags & TDF_DETAILS))
2207 dump_insn_info ("removing from active", i_ptr);
2209 active_local_stores_len--;
2210 if (last)
2211 last->next_local_store = i_ptr->next_local_store;
2212 else
2213 active_local_stores = i_ptr->next_local_store;
2215 else
2216 last = i_ptr;
2217 i_ptr = i_ptr->next_local_store;
2220 else if (group_id >= 0)
2222 /* This is the restricted case where the base is a constant or
2223 the frame pointer and offset is a constant. */
2224 insn_info_t i_ptr = active_local_stores;
2225 insn_info_t last = NULL;
2227 if (dump_file && (dump_flags & TDF_DETAILS))
2229 if (width == -1)
2230 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2231 group_id);
2232 else
2233 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2234 group_id, (int)offset, (int)(offset+width));
2237 while (i_ptr)
2239 bool remove = false;
2240 store_info_t store_info = i_ptr->store_rec;
2242 /* Skip the clobbers. */
2243 while (!store_info->is_set)
2244 store_info = store_info->next;
2246 /* There are three cases here. */
2247 if (store_info->group_id < 0)
2248 /* We have a cselib store followed by a read from a
2249 const base. */
2250 remove
2251 = canon_true_dependence (store_info->mem,
2252 GET_MODE (store_info->mem),
2253 store_info->mem_addr,
2254 mem, mem_addr);
2256 else if (group_id == store_info->group_id)
2258 /* This is a block mode load. We may get lucky and
2259 canon_true_dependence may save the day. */
2260 if (width == -1)
2261 remove
2262 = canon_true_dependence (store_info->mem,
2263 GET_MODE (store_info->mem),
2264 store_info->mem_addr,
2265 mem, mem_addr);
2267 /* If this read is just reading back something that we just
2268 stored, rewrite the read. */
2269 else
2271 if (store_info->rhs
2272 && offset >= store_info->begin
2273 && offset + width <= store_info->end
2274 && all_positions_needed_p (store_info,
2275 offset - store_info->begin,
2276 width)
2277 && replace_read (store_info, i_ptr, read_info,
2278 insn_info, loc, bb_info->regs_live))
2279 return;
2281 /* The bases are the same, just see if the offsets
2282 overlap. */
2283 if ((offset < store_info->end)
2284 && (offset + width > store_info->begin))
2285 remove = true;
2289 /* else
2290 The else case that is missing here is that the
2291 bases are constant but different. There is nothing
2292 to do here because there is no overlap. */
2294 if (remove)
2296 if (dump_file && (dump_flags & TDF_DETAILS))
2297 dump_insn_info ("removing from active", i_ptr);
2299 active_local_stores_len--;
2300 if (last)
2301 last->next_local_store = i_ptr->next_local_store;
2302 else
2303 active_local_stores = i_ptr->next_local_store;
2305 else
2306 last = i_ptr;
2307 i_ptr = i_ptr->next_local_store;
2310 else
2312 insn_info_t i_ptr = active_local_stores;
2313 insn_info_t last = NULL;
2314 if (dump_file && (dump_flags & TDF_DETAILS))
2316 fprintf (dump_file, " processing cselib load mem:");
2317 print_inline_rtx (dump_file, mem, 0);
2318 fprintf (dump_file, "\n");
2321 while (i_ptr)
2323 bool remove = false;
2324 store_info_t store_info = i_ptr->store_rec;
2326 if (dump_file && (dump_flags & TDF_DETAILS))
2327 fprintf (dump_file, " processing cselib load against insn %d\n",
2328 INSN_UID (i_ptr->insn));
2330 /* Skip the clobbers. */
2331 while (!store_info->is_set)
2332 store_info = store_info->next;
2334 /* If this read is just reading back something that we just
2335 stored, rewrite the read. */
2336 if (store_info->rhs
2337 && store_info->group_id == -1
2338 && store_info->cse_base == base
2339 && width != -1
2340 && offset >= store_info->begin
2341 && offset + width <= store_info->end
2342 && all_positions_needed_p (store_info,
2343 offset - store_info->begin, width)
2344 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2345 bb_info->regs_live))
2346 return;
2348 if (!store_info->alias_set)
2349 remove = canon_true_dependence (store_info->mem,
2350 GET_MODE (store_info->mem),
2351 store_info->mem_addr,
2352 mem, mem_addr);
2354 if (remove)
2356 if (dump_file && (dump_flags & TDF_DETAILS))
2357 dump_insn_info ("removing from active", i_ptr);
2359 active_local_stores_len--;
2360 if (last)
2361 last->next_local_store = i_ptr->next_local_store;
2362 else
2363 active_local_stores = i_ptr->next_local_store;
2365 else
2366 last = i_ptr;
2367 i_ptr = i_ptr->next_local_store;
2372 /* A note_uses callback in which DATA points the INSN_INFO for
2373 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2374 true for any part of *LOC. */
2376 static void
2377 check_mem_read_use (rtx *loc, void *data)
2379 subrtx_ptr_iterator::array_type array;
2380 FOR_EACH_SUBRTX_PTR (iter, array, loc, NONCONST)
2382 rtx *loc = *iter;
2383 if (MEM_P (*loc))
2384 check_mem_read_rtx (loc, (bb_info_t) data);
2389 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2390 So far it only handles arguments passed in registers. */
2392 static bool
2393 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2395 CUMULATIVE_ARGS args_so_far_v;
2396 cumulative_args_t args_so_far;
2397 tree arg;
2398 int idx;
2400 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2401 args_so_far = pack_cumulative_args (&args_so_far_v);
2403 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2404 for (idx = 0;
2405 arg != void_list_node && idx < nargs;
2406 arg = TREE_CHAIN (arg), idx++)
2408 machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2409 rtx reg, link, tmp;
2410 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2411 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2412 || GET_MODE_CLASS (mode) != MODE_INT)
2413 return false;
2415 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2416 link;
2417 link = XEXP (link, 1))
2418 if (GET_CODE (XEXP (link, 0)) == USE)
2420 args[idx] = XEXP (XEXP (link, 0), 0);
2421 if (REG_P (args[idx])
2422 && REGNO (args[idx]) == REGNO (reg)
2423 && (GET_MODE (args[idx]) == mode
2424 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2425 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2426 <= UNITS_PER_WORD)
2427 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2428 > GET_MODE_SIZE (mode)))))
2429 break;
2431 if (!link)
2432 return false;
2434 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2435 if (GET_MODE (args[idx]) != mode)
2437 if (!tmp || !CONST_INT_P (tmp))
2438 return false;
2439 tmp = gen_int_mode (INTVAL (tmp), mode);
2441 if (tmp)
2442 args[idx] = tmp;
2444 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2446 if (arg != void_list_node || idx != nargs)
2447 return false;
2448 return true;
2451 /* Return a bitmap of the fixed registers contained in IN. */
2453 static bitmap
2454 copy_fixed_regs (const_bitmap in)
2456 bitmap ret;
2458 ret = ALLOC_REG_SET (NULL);
2459 bitmap_and (ret, in, fixed_reg_set_regset);
2460 return ret;
2463 /* Apply record_store to all candidate stores in INSN. Mark INSN
2464 if some part of it is not a candidate store and assigns to a
2465 non-register target. */
2467 static void
2468 scan_insn (bb_info_t bb_info, rtx_insn *insn)
2470 rtx body;
2471 insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2472 int mems_found = 0;
2473 memset (insn_info, 0, sizeof (struct insn_info));
2475 if (dump_file && (dump_flags & TDF_DETAILS))
2476 fprintf (dump_file, "\n**scanning insn=%d\n",
2477 INSN_UID (insn));
2479 insn_info->prev_insn = bb_info->last_insn;
2480 insn_info->insn = insn;
2481 bb_info->last_insn = insn_info;
2483 if (DEBUG_INSN_P (insn))
2485 insn_info->cannot_delete = true;
2486 return;
2489 /* Look at all of the uses in the insn. */
2490 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2492 if (CALL_P (insn))
2494 bool const_call;
2495 tree memset_call = NULL_TREE;
2497 insn_info->cannot_delete = true;
2499 /* Const functions cannot do anything bad i.e. read memory,
2500 however, they can read their parameters which may have
2501 been pushed onto the stack.
2502 memset and bzero don't read memory either. */
2503 const_call = RTL_CONST_CALL_P (insn);
2504 if (!const_call)
2506 rtx call = get_call_rtx_from (insn);
2507 if (call && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2509 rtx symbol = XEXP (XEXP (call, 0), 0);
2510 if (SYMBOL_REF_DECL (symbol)
2511 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2513 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2514 == BUILT_IN_NORMAL
2515 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2516 == BUILT_IN_MEMSET))
2517 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2518 memset_call = SYMBOL_REF_DECL (symbol);
2522 if (const_call || memset_call)
2524 insn_info_t i_ptr = active_local_stores;
2525 insn_info_t last = NULL;
2527 if (dump_file && (dump_flags & TDF_DETAILS))
2528 fprintf (dump_file, "%s call %d\n",
2529 const_call ? "const" : "memset", INSN_UID (insn));
2531 /* See the head comment of the frame_read field. */
2532 if (reload_completed
2533 /* Tail calls are storing their arguments using
2534 arg pointer. If it is a frame pointer on the target,
2535 even before reload we need to kill frame pointer based
2536 stores. */
2537 || (SIBLING_CALL_P (insn)
2538 && HARD_FRAME_POINTER_IS_ARG_POINTER))
2539 insn_info->frame_read = true;
2541 /* Loop over the active stores and remove those which are
2542 killed by the const function call. */
2543 while (i_ptr)
2545 bool remove_store = false;
2547 /* The stack pointer based stores are always killed. */
2548 if (i_ptr->stack_pointer_based)
2549 remove_store = true;
2551 /* If the frame is read, the frame related stores are killed. */
2552 else if (insn_info->frame_read)
2554 store_info_t store_info = i_ptr->store_rec;
2556 /* Skip the clobbers. */
2557 while (!store_info->is_set)
2558 store_info = store_info->next;
2560 if (store_info->group_id >= 0
2561 && rtx_group_vec[store_info->group_id]->frame_related)
2562 remove_store = true;
2565 if (remove_store)
2567 if (dump_file && (dump_flags & TDF_DETAILS))
2568 dump_insn_info ("removing from active", i_ptr);
2570 active_local_stores_len--;
2571 if (last)
2572 last->next_local_store = i_ptr->next_local_store;
2573 else
2574 active_local_stores = i_ptr->next_local_store;
2576 else
2577 last = i_ptr;
2579 i_ptr = i_ptr->next_local_store;
2582 if (memset_call)
2584 rtx args[3];
2585 if (get_call_args (insn, memset_call, args, 3)
2586 && CONST_INT_P (args[1])
2587 && CONST_INT_P (args[2])
2588 && INTVAL (args[2]) > 0)
2590 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2591 set_mem_size (mem, INTVAL (args[2]));
2592 body = gen_rtx_SET (VOIDmode, mem, args[1]);
2593 mems_found += record_store (body, bb_info);
2594 if (dump_file && (dump_flags & TDF_DETAILS))
2595 fprintf (dump_file, "handling memset as BLKmode store\n");
2596 if (mems_found == 1)
2598 if (active_local_stores_len++
2599 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2601 active_local_stores_len = 1;
2602 active_local_stores = NULL;
2604 insn_info->fixed_regs_live
2605 = copy_fixed_regs (bb_info->regs_live);
2606 insn_info->next_local_store = active_local_stores;
2607 active_local_stores = insn_info;
2612 else if (SIBLING_CALL_P (insn) && reload_completed)
2613 /* Arguments for a sibling call that are pushed to memory are passed
2614 using the incoming argument pointer of the current function. After
2615 reload that might be (and likely is) frame pointer based. */
2616 add_wild_read (bb_info);
2617 else
2618 /* Every other call, including pure functions, may read any memory
2619 that is not relative to the frame. */
2620 add_non_frame_wild_read (bb_info);
2622 return;
2625 /* Assuming that there are sets in these insns, we cannot delete
2626 them. */
2627 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2628 || volatile_refs_p (PATTERN (insn))
2629 || (!cfun->can_delete_dead_exceptions && !insn_nothrow_p (insn))
2630 || (RTX_FRAME_RELATED_P (insn))
2631 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2632 insn_info->cannot_delete = true;
2634 body = PATTERN (insn);
2635 if (GET_CODE (body) == PARALLEL)
2637 int i;
2638 for (i = 0; i < XVECLEN (body, 0); i++)
2639 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2641 else
2642 mems_found += record_store (body, bb_info);
2644 if (dump_file && (dump_flags & TDF_DETAILS))
2645 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2646 mems_found, insn_info->cannot_delete ? "true" : "false");
2648 /* If we found some sets of mems, add it into the active_local_stores so
2649 that it can be locally deleted if found dead or used for
2650 replace_read and redundant constant store elimination. Otherwise mark
2651 it as cannot delete. This simplifies the processing later. */
2652 if (mems_found == 1)
2654 if (active_local_stores_len++
2655 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2657 active_local_stores_len = 1;
2658 active_local_stores = NULL;
2660 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2661 insn_info->next_local_store = active_local_stores;
2662 active_local_stores = insn_info;
2664 else
2665 insn_info->cannot_delete = true;
2669 /* Remove BASE from the set of active_local_stores. This is a
2670 callback from cselib that is used to get rid of the stores in
2671 active_local_stores. */
2673 static void
2674 remove_useless_values (cselib_val *base)
2676 insn_info_t insn_info = active_local_stores;
2677 insn_info_t last = NULL;
2679 while (insn_info)
2681 store_info_t store_info = insn_info->store_rec;
2682 bool del = false;
2684 /* If ANY of the store_infos match the cselib group that is
2685 being deleted, then the insn can not be deleted. */
2686 while (store_info)
2688 if ((store_info->group_id == -1)
2689 && (store_info->cse_base == base))
2691 del = true;
2692 break;
2694 store_info = store_info->next;
2697 if (del)
2699 active_local_stores_len--;
2700 if (last)
2701 last->next_local_store = insn_info->next_local_store;
2702 else
2703 active_local_stores = insn_info->next_local_store;
2704 free_store_info (insn_info);
2706 else
2707 last = insn_info;
2709 insn_info = insn_info->next_local_store;
2714 /* Do all of step 1. */
2716 static void
2717 dse_step1 (void)
2719 basic_block bb;
2720 bitmap regs_live = BITMAP_ALLOC (&reg_obstack);
2722 cselib_init (0);
2723 all_blocks = BITMAP_ALLOC (NULL);
2724 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2725 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2727 FOR_ALL_BB_FN (bb, cfun)
2729 insn_info_t ptr;
2730 bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2732 memset (bb_info, 0, sizeof (struct dse_bb_info));
2733 bitmap_set_bit (all_blocks, bb->index);
2734 bb_info->regs_live = regs_live;
2736 bitmap_copy (regs_live, DF_LR_IN (bb));
2737 df_simulate_initialize_forwards (bb, regs_live);
2739 bb_table[bb->index] = bb_info;
2740 cselib_discard_hook = remove_useless_values;
2742 if (bb->index >= NUM_FIXED_BLOCKS)
2744 rtx_insn *insn;
2746 cse_store_info_pool
2747 = create_alloc_pool ("cse_store_info_pool",
2748 sizeof (struct store_info), 100);
2749 active_local_stores = NULL;
2750 active_local_stores_len = 0;
2751 cselib_clear_table ();
2753 /* Scan the insns. */
2754 FOR_BB_INSNS (bb, insn)
2756 if (INSN_P (insn))
2757 scan_insn (bb_info, insn);
2758 cselib_process_insn (insn);
2759 if (INSN_P (insn))
2760 df_simulate_one_insn_forwards (bb, insn, regs_live);
2763 /* This is something of a hack, because the global algorithm
2764 is supposed to take care of the case where stores go dead
2765 at the end of the function. However, the global
2766 algorithm must take a more conservative view of block
2767 mode reads than the local alg does. So to get the case
2768 where you have a store to the frame followed by a non
2769 overlapping block more read, we look at the active local
2770 stores at the end of the function and delete all of the
2771 frame and spill based ones. */
2772 if (stores_off_frame_dead_at_return
2773 && (EDGE_COUNT (bb->succs) == 0
2774 || (single_succ_p (bb)
2775 && single_succ (bb) == EXIT_BLOCK_PTR_FOR_FN (cfun)
2776 && ! crtl->calls_eh_return)))
2778 insn_info_t i_ptr = active_local_stores;
2779 while (i_ptr)
2781 store_info_t store_info = i_ptr->store_rec;
2783 /* Skip the clobbers. */
2784 while (!store_info->is_set)
2785 store_info = store_info->next;
2786 if (store_info->alias_set && !i_ptr->cannot_delete)
2787 delete_dead_store_insn (i_ptr);
2788 else
2789 if (store_info->group_id >= 0)
2791 group_info_t group
2792 = rtx_group_vec[store_info->group_id];
2793 if (group->frame_related && !i_ptr->cannot_delete)
2794 delete_dead_store_insn (i_ptr);
2797 i_ptr = i_ptr->next_local_store;
2801 /* Get rid of the loads that were discovered in
2802 replace_read. Cselib is finished with this block. */
2803 while (deferred_change_list)
2805 deferred_change_t next = deferred_change_list->next;
2807 /* There is no reason to validate this change. That was
2808 done earlier. */
2809 *deferred_change_list->loc = deferred_change_list->reg;
2810 pool_free (deferred_change_pool, deferred_change_list);
2811 deferred_change_list = next;
2814 /* Get rid of all of the cselib based store_infos in this
2815 block and mark the containing insns as not being
2816 deletable. */
2817 ptr = bb_info->last_insn;
2818 while (ptr)
2820 if (ptr->contains_cselib_groups)
2822 store_info_t s_info = ptr->store_rec;
2823 while (s_info && !s_info->is_set)
2824 s_info = s_info->next;
2825 if (s_info
2826 && s_info->redundant_reason
2827 && s_info->redundant_reason->insn
2828 && !ptr->cannot_delete)
2830 if (dump_file && (dump_flags & TDF_DETAILS))
2831 fprintf (dump_file, "Locally deleting insn %d "
2832 "because insn %d stores the "
2833 "same value and couldn't be "
2834 "eliminated\n",
2835 INSN_UID (ptr->insn),
2836 INSN_UID (s_info->redundant_reason->insn));
2837 delete_dead_store_insn (ptr);
2839 free_store_info (ptr);
2841 else
2843 store_info_t s_info;
2845 /* Free at least positions_needed bitmaps. */
2846 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2847 if (s_info->is_large)
2849 BITMAP_FREE (s_info->positions_needed.large.bmap);
2850 s_info->is_large = false;
2853 ptr = ptr->prev_insn;
2856 free_alloc_pool (cse_store_info_pool);
2858 bb_info->regs_live = NULL;
2861 BITMAP_FREE (regs_live);
2862 cselib_finish ();
2863 rtx_group_table->empty ();
2867 /*----------------------------------------------------------------------------
2868 Second step.
2870 Assign each byte position in the stores that we are going to
2871 analyze globally to a position in the bitmaps. Returns true if
2872 there are any bit positions assigned.
2873 ----------------------------------------------------------------------------*/
2875 static void
2876 dse_step2_init (void)
2878 unsigned int i;
2879 group_info_t group;
2881 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2883 /* For all non stack related bases, we only consider a store to
2884 be deletable if there are two or more stores for that
2885 position. This is because it takes one store to make the
2886 other store redundant. However, for the stores that are
2887 stack related, we consider them if there is only one store
2888 for the position. We do this because the stack related
2889 stores can be deleted if their is no read between them and
2890 the end of the function.
2892 To make this work in the current framework, we take the stack
2893 related bases add all of the bits from store1 into store2.
2894 This has the effect of making the eligible even if there is
2895 only one store. */
2897 if (stores_off_frame_dead_at_return && group->frame_related)
2899 bitmap_ior_into (group->store2_n, group->store1_n);
2900 bitmap_ior_into (group->store2_p, group->store1_p);
2901 if (dump_file && (dump_flags & TDF_DETAILS))
2902 fprintf (dump_file, "group %d is frame related ", i);
2905 group->offset_map_size_n++;
2906 group->offset_map_n = XOBNEWVEC (&dse_obstack, int,
2907 group->offset_map_size_n);
2908 group->offset_map_size_p++;
2909 group->offset_map_p = XOBNEWVEC (&dse_obstack, int,
2910 group->offset_map_size_p);
2911 group->process_globally = false;
2912 if (dump_file && (dump_flags & TDF_DETAILS))
2914 fprintf (dump_file, "group %d(%d+%d): ", i,
2915 (int)bitmap_count_bits (group->store2_n),
2916 (int)bitmap_count_bits (group->store2_p));
2917 bitmap_print (dump_file, group->store2_n, "n ", " ");
2918 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2924 /* Init the offset tables for the normal case. */
2926 static bool
2927 dse_step2_nospill (void)
2929 unsigned int i;
2930 group_info_t group;
2931 /* Position 0 is unused because 0 is used in the maps to mean
2932 unused. */
2933 current_position = 1;
2934 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
2936 bitmap_iterator bi;
2937 unsigned int j;
2939 if (group == clear_alias_group)
2940 continue;
2942 memset (group->offset_map_n, 0, sizeof (int) * group->offset_map_size_n);
2943 memset (group->offset_map_p, 0, sizeof (int) * group->offset_map_size_p);
2944 bitmap_clear (group->group_kill);
2946 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2948 bitmap_set_bit (group->group_kill, current_position);
2949 if (bitmap_bit_p (group->escaped_n, j))
2950 bitmap_set_bit (kill_on_calls, current_position);
2951 group->offset_map_n[j] = current_position++;
2952 group->process_globally = true;
2954 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2956 bitmap_set_bit (group->group_kill, current_position);
2957 if (bitmap_bit_p (group->escaped_p, j))
2958 bitmap_set_bit (kill_on_calls, current_position);
2959 group->offset_map_p[j] = current_position++;
2960 group->process_globally = true;
2963 return current_position != 1;
2968 /*----------------------------------------------------------------------------
2969 Third step.
2971 Build the bit vectors for the transfer functions.
2972 ----------------------------------------------------------------------------*/
2975 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2976 there, return 0. */
2978 static int
2979 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2981 if (offset < 0)
2983 HOST_WIDE_INT offset_p = -offset;
2984 if (offset_p >= group_info->offset_map_size_n)
2985 return 0;
2986 return group_info->offset_map_n[offset_p];
2988 else
2990 if (offset >= group_info->offset_map_size_p)
2991 return 0;
2992 return group_info->offset_map_p[offset];
2997 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2998 may be NULL. */
3000 static void
3001 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3003 while (store_info)
3005 HOST_WIDE_INT i;
3006 group_info_t group_info
3007 = rtx_group_vec[store_info->group_id];
3008 if (group_info->process_globally)
3009 for (i = store_info->begin; i < store_info->end; i++)
3011 int index = get_bitmap_index (group_info, i);
3012 if (index != 0)
3014 bitmap_set_bit (gen, index);
3015 if (kill)
3016 bitmap_clear_bit (kill, index);
3019 store_info = store_info->next;
3024 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3025 may be NULL. */
3027 static void
3028 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3030 while (store_info)
3032 if (store_info->alias_set)
3034 int index = get_bitmap_index (clear_alias_group,
3035 store_info->alias_set);
3036 if (index != 0)
3038 bitmap_set_bit (gen, index);
3039 if (kill)
3040 bitmap_clear_bit (kill, index);
3043 store_info = store_info->next;
3048 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3049 may be NULL. */
3051 static void
3052 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3054 read_info_t read_info = insn_info->read_rec;
3055 int i;
3056 group_info_t group;
3058 /* If this insn reads the frame, kill all the frame related stores. */
3059 if (insn_info->frame_read)
3061 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3062 if (group->process_globally && group->frame_related)
3064 if (kill)
3065 bitmap_ior_into (kill, group->group_kill);
3066 bitmap_and_compl_into (gen, group->group_kill);
3069 if (insn_info->non_frame_wild_read)
3071 /* Kill all non-frame related stores. Kill all stores of variables that
3072 escape. */
3073 if (kill)
3074 bitmap_ior_into (kill, kill_on_calls);
3075 bitmap_and_compl_into (gen, kill_on_calls);
3076 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3077 if (group->process_globally && !group->frame_related)
3079 if (kill)
3080 bitmap_ior_into (kill, group->group_kill);
3081 bitmap_and_compl_into (gen, group->group_kill);
3084 while (read_info)
3086 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3088 if (group->process_globally)
3090 if (i == read_info->group_id)
3092 if (read_info->begin > read_info->end)
3094 /* Begin > end for block mode reads. */
3095 if (kill)
3096 bitmap_ior_into (kill, group->group_kill);
3097 bitmap_and_compl_into (gen, group->group_kill);
3099 else
3101 /* The groups are the same, just process the
3102 offsets. */
3103 HOST_WIDE_INT j;
3104 for (j = read_info->begin; j < read_info->end; j++)
3106 int index = get_bitmap_index (group, j);
3107 if (index != 0)
3109 if (kill)
3110 bitmap_set_bit (kill, index);
3111 bitmap_clear_bit (gen, index);
3116 else
3118 /* The groups are different, if the alias sets
3119 conflict, clear the entire group. We only need
3120 to apply this test if the read_info is a cselib
3121 read. Anything with a constant base cannot alias
3122 something else with a different constant
3123 base. */
3124 if ((read_info->group_id < 0)
3125 && canon_true_dependence (group->base_mem,
3126 GET_MODE (group->base_mem),
3127 group->canon_base_addr,
3128 read_info->mem, NULL_RTX))
3130 if (kill)
3131 bitmap_ior_into (kill, group->group_kill);
3132 bitmap_and_compl_into (gen, group->group_kill);
3138 read_info = read_info->next;
3142 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3143 may be NULL. */
3145 static void
3146 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3148 while (read_info)
3150 if (read_info->alias_set)
3152 int index = get_bitmap_index (clear_alias_group,
3153 read_info->alias_set);
3154 if (index != 0)
3156 if (kill)
3157 bitmap_set_bit (kill, index);
3158 bitmap_clear_bit (gen, index);
3162 read_info = read_info->next;
3167 /* Return the insn in BB_INFO before the first wild read or if there
3168 are no wild reads in the block, return the last insn. */
3170 static insn_info_t
3171 find_insn_before_first_wild_read (bb_info_t bb_info)
3173 insn_info_t insn_info = bb_info->last_insn;
3174 insn_info_t last_wild_read = NULL;
3176 while (insn_info)
3178 if (insn_info->wild_read)
3180 last_wild_read = insn_info->prev_insn;
3181 /* Block starts with wild read. */
3182 if (!last_wild_read)
3183 return NULL;
3186 insn_info = insn_info->prev_insn;
3189 if (last_wild_read)
3190 return last_wild_read;
3191 else
3192 return bb_info->last_insn;
3196 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3197 the block in order to build the gen and kill sets for the block.
3198 We start at ptr which may be the last insn in the block or may be
3199 the first insn with a wild read. In the latter case we are able to
3200 skip the rest of the block because it just does not matter:
3201 anything that happens is hidden by the wild read. */
3203 static void
3204 dse_step3_scan (bool for_spills, basic_block bb)
3206 bb_info_t bb_info = bb_table[bb->index];
3207 insn_info_t insn_info;
3209 if (for_spills)
3210 /* There are no wild reads in the spill case. */
3211 insn_info = bb_info->last_insn;
3212 else
3213 insn_info = find_insn_before_first_wild_read (bb_info);
3215 /* In the spill case or in the no_spill case if there is no wild
3216 read in the block, we will need a kill set. */
3217 if (insn_info == bb_info->last_insn)
3219 if (bb_info->kill)
3220 bitmap_clear (bb_info->kill);
3221 else
3222 bb_info->kill = BITMAP_ALLOC (&dse_bitmap_obstack);
3224 else
3225 if (bb_info->kill)
3226 BITMAP_FREE (bb_info->kill);
3228 while (insn_info)
3230 /* There may have been code deleted by the dce pass run before
3231 this phase. */
3232 if (insn_info->insn && INSN_P (insn_info->insn))
3234 /* Process the read(s) last. */
3235 if (for_spills)
3237 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3238 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3240 else
3242 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3243 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3247 insn_info = insn_info->prev_insn;
3252 /* Set the gen set of the exit block, and also any block with no
3253 successors that does not have a wild read. */
3255 static void
3256 dse_step3_exit_block_scan (bb_info_t bb_info)
3258 /* The gen set is all 0's for the exit block except for the
3259 frame_pointer_group. */
3261 if (stores_off_frame_dead_at_return)
3263 unsigned int i;
3264 group_info_t group;
3266 FOR_EACH_VEC_ELT (rtx_group_vec, i, group)
3268 if (group->process_globally && group->frame_related)
3269 bitmap_ior_into (bb_info->gen, group->group_kill);
3275 /* Find all of the blocks that are not backwards reachable from the
3276 exit block or any block with no successors (BB). These are the
3277 infinite loops or infinite self loops. These blocks will still
3278 have their bits set in UNREACHABLE_BLOCKS. */
3280 static void
3281 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3283 edge e;
3284 edge_iterator ei;
3286 if (bitmap_bit_p (unreachable_blocks, bb->index))
3288 bitmap_clear_bit (unreachable_blocks, bb->index);
3289 FOR_EACH_EDGE (e, ei, bb->preds)
3291 mark_reachable_blocks (unreachable_blocks, e->src);
3296 /* Build the transfer functions for the function. */
3298 static void
3299 dse_step3 (bool for_spills)
3301 basic_block bb;
3302 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block_for_fn (cfun));
3303 sbitmap_iterator sbi;
3304 bitmap all_ones = NULL;
3305 unsigned int i;
3307 bitmap_ones (unreachable_blocks);
3309 FOR_ALL_BB_FN (bb, cfun)
3311 bb_info_t bb_info = bb_table[bb->index];
3312 if (bb_info->gen)
3313 bitmap_clear (bb_info->gen);
3314 else
3315 bb_info->gen = BITMAP_ALLOC (&dse_bitmap_obstack);
3317 if (bb->index == ENTRY_BLOCK)
3319 else if (bb->index == EXIT_BLOCK)
3320 dse_step3_exit_block_scan (bb_info);
3321 else
3322 dse_step3_scan (for_spills, bb);
3323 if (EDGE_COUNT (bb->succs) == 0)
3324 mark_reachable_blocks (unreachable_blocks, bb);
3326 /* If this is the second time dataflow is run, delete the old
3327 sets. */
3328 if (bb_info->in)
3329 BITMAP_FREE (bb_info->in);
3330 if (bb_info->out)
3331 BITMAP_FREE (bb_info->out);
3334 /* For any block in an infinite loop, we must initialize the out set
3335 to all ones. This could be expensive, but almost never occurs in
3336 practice. However, it is common in regression tests. */
3337 EXECUTE_IF_SET_IN_BITMAP (unreachable_blocks, 0, i, sbi)
3339 if (bitmap_bit_p (all_blocks, i))
3341 bb_info_t bb_info = bb_table[i];
3342 if (!all_ones)
3344 unsigned int j;
3345 group_info_t group;
3347 all_ones = BITMAP_ALLOC (&dse_bitmap_obstack);
3348 FOR_EACH_VEC_ELT (rtx_group_vec, j, group)
3349 bitmap_ior_into (all_ones, group->group_kill);
3351 if (!bb_info->out)
3353 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3354 bitmap_copy (bb_info->out, all_ones);
3359 if (all_ones)
3360 BITMAP_FREE (all_ones);
3361 sbitmap_free (unreachable_blocks);
3366 /*----------------------------------------------------------------------------
3367 Fourth step.
3369 Solve the bitvector equations.
3370 ----------------------------------------------------------------------------*/
3373 /* Confluence function for blocks with no successors. Create an out
3374 set from the gen set of the exit block. This block logically has
3375 the exit block as a successor. */
3379 static void
3380 dse_confluence_0 (basic_block bb)
3382 bb_info_t bb_info = bb_table[bb->index];
3384 if (bb->index == EXIT_BLOCK)
3385 return;
3387 if (!bb_info->out)
3389 bb_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3390 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3394 /* Propagate the information from the in set of the dest of E to the
3395 out set of the src of E. If the various in or out sets are not
3396 there, that means they are all ones. */
3398 static bool
3399 dse_confluence_n (edge e)
3401 bb_info_t src_info = bb_table[e->src->index];
3402 bb_info_t dest_info = bb_table[e->dest->index];
3404 if (dest_info->in)
3406 if (src_info->out)
3407 bitmap_and_into (src_info->out, dest_info->in);
3408 else
3410 src_info->out = BITMAP_ALLOC (&dse_bitmap_obstack);
3411 bitmap_copy (src_info->out, dest_info->in);
3414 return true;
3418 /* Propagate the info from the out to the in set of BB_INDEX's basic
3419 block. There are three cases:
3421 1) The block has no kill set. In this case the kill set is all
3422 ones. It does not matter what the out set of the block is, none of
3423 the info can reach the top. The only thing that reaches the top is
3424 the gen set and we just copy the set.
3426 2) There is a kill set but no out set and bb has successors. In
3427 this case we just return. Eventually an out set will be created and
3428 it is better to wait than to create a set of ones.
3430 3) There is both a kill and out set. We apply the obvious transfer
3431 function.
3434 static bool
3435 dse_transfer_function (int bb_index)
3437 bb_info_t bb_info = bb_table[bb_index];
3439 if (bb_info->kill)
3441 if (bb_info->out)
3443 /* Case 3 above. */
3444 if (bb_info->in)
3445 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3446 bb_info->out, bb_info->kill);
3447 else
3449 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3450 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3451 bb_info->out, bb_info->kill);
3452 return true;
3455 else
3456 /* Case 2 above. */
3457 return false;
3459 else
3461 /* Case 1 above. If there is already an in set, nothing
3462 happens. */
3463 if (bb_info->in)
3464 return false;
3465 else
3467 bb_info->in = BITMAP_ALLOC (&dse_bitmap_obstack);
3468 bitmap_copy (bb_info->in, bb_info->gen);
3469 return true;
3474 /* Solve the dataflow equations. */
3476 static void
3477 dse_step4 (void)
3479 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3480 dse_confluence_n, dse_transfer_function,
3481 all_blocks, df_get_postorder (DF_BACKWARD),
3482 df_get_n_blocks (DF_BACKWARD));
3483 if (dump_file && (dump_flags & TDF_DETAILS))
3485 basic_block bb;
3487 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3488 FOR_ALL_BB_FN (bb, cfun)
3490 bb_info_t bb_info = bb_table[bb->index];
3492 df_print_bb_index (bb, dump_file);
3493 if (bb_info->in)
3494 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3495 else
3496 fprintf (dump_file, " in: *MISSING*\n");
3497 if (bb_info->gen)
3498 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3499 else
3500 fprintf (dump_file, " gen: *MISSING*\n");
3501 if (bb_info->kill)
3502 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3503 else
3504 fprintf (dump_file, " kill: *MISSING*\n");
3505 if (bb_info->out)
3506 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3507 else
3508 fprintf (dump_file, " out: *MISSING*\n\n");
3515 /*----------------------------------------------------------------------------
3516 Fifth step.
3518 Delete the stores that can only be deleted using the global information.
3519 ----------------------------------------------------------------------------*/
3522 static void
3523 dse_step5_nospill (void)
3525 basic_block bb;
3526 FOR_EACH_BB_FN (bb, cfun)
3528 bb_info_t bb_info = bb_table[bb->index];
3529 insn_info_t insn_info = bb_info->last_insn;
3530 bitmap v = bb_info->out;
3532 while (insn_info)
3534 bool deleted = false;
3535 if (dump_file && insn_info->insn)
3537 fprintf (dump_file, "starting to process insn %d\n",
3538 INSN_UID (insn_info->insn));
3539 bitmap_print (dump_file, v, " v: ", "\n");
3542 /* There may have been code deleted by the dce pass run before
3543 this phase. */
3544 if (insn_info->insn
3545 && INSN_P (insn_info->insn)
3546 && (!insn_info->cannot_delete)
3547 && (!bitmap_empty_p (v)))
3549 store_info_t store_info = insn_info->store_rec;
3551 /* Try to delete the current insn. */
3552 deleted = true;
3554 /* Skip the clobbers. */
3555 while (!store_info->is_set)
3556 store_info = store_info->next;
3558 if (store_info->alias_set)
3559 deleted = false;
3560 else
3562 HOST_WIDE_INT i;
3563 group_info_t group_info
3564 = rtx_group_vec[store_info->group_id];
3566 for (i = store_info->begin; i < store_info->end; i++)
3568 int index = get_bitmap_index (group_info, i);
3570 if (dump_file && (dump_flags & TDF_DETAILS))
3571 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3572 if (index == 0 || !bitmap_bit_p (v, index))
3574 if (dump_file && (dump_flags & TDF_DETAILS))
3575 fprintf (dump_file, "failing at i = %d\n", (int)i);
3576 deleted = false;
3577 break;
3581 if (deleted)
3583 if (dbg_cnt (dse)
3584 && check_for_inc_dec_1 (insn_info))
3586 delete_insn (insn_info->insn);
3587 insn_info->insn = NULL;
3588 globally_deleted++;
3592 /* We do want to process the local info if the insn was
3593 deleted. For instance, if the insn did a wild read, we
3594 no longer need to trash the info. */
3595 if (insn_info->insn
3596 && INSN_P (insn_info->insn)
3597 && (!deleted))
3599 scan_stores_nospill (insn_info->store_rec, v, NULL);
3600 if (insn_info->wild_read)
3602 if (dump_file && (dump_flags & TDF_DETAILS))
3603 fprintf (dump_file, "wild read\n");
3604 bitmap_clear (v);
3606 else if (insn_info->read_rec
3607 || insn_info->non_frame_wild_read)
3609 if (dump_file && !insn_info->non_frame_wild_read)
3610 fprintf (dump_file, "regular read\n");
3611 else if (dump_file && (dump_flags & TDF_DETAILS))
3612 fprintf (dump_file, "non-frame wild read\n");
3613 scan_reads_nospill (insn_info, v, NULL);
3617 insn_info = insn_info->prev_insn;
3624 /*----------------------------------------------------------------------------
3625 Sixth step.
3627 Delete stores made redundant by earlier stores (which store the same
3628 value) that couldn't be eliminated.
3629 ----------------------------------------------------------------------------*/
3631 static void
3632 dse_step6 (void)
3634 basic_block bb;
3636 FOR_ALL_BB_FN (bb, cfun)
3638 bb_info_t bb_info = bb_table[bb->index];
3639 insn_info_t insn_info = bb_info->last_insn;
3641 while (insn_info)
3643 /* There may have been code deleted by the dce pass run before
3644 this phase. */
3645 if (insn_info->insn
3646 && INSN_P (insn_info->insn)
3647 && !insn_info->cannot_delete)
3649 store_info_t s_info = insn_info->store_rec;
3651 while (s_info && !s_info->is_set)
3652 s_info = s_info->next;
3653 if (s_info
3654 && s_info->redundant_reason
3655 && s_info->redundant_reason->insn
3656 && INSN_P (s_info->redundant_reason->insn))
3658 rtx_insn *rinsn = s_info->redundant_reason->insn;
3659 if (dump_file && (dump_flags & TDF_DETAILS))
3660 fprintf (dump_file, "Locally deleting insn %d "
3661 "because insn %d stores the "
3662 "same value and couldn't be "
3663 "eliminated\n",
3664 INSN_UID (insn_info->insn),
3665 INSN_UID (rinsn));
3666 delete_dead_store_insn (insn_info);
3669 insn_info = insn_info->prev_insn;
3674 /*----------------------------------------------------------------------------
3675 Seventh step.
3677 Destroy everything left standing.
3678 ----------------------------------------------------------------------------*/
3680 static void
3681 dse_step7 (void)
3683 bitmap_obstack_release (&dse_bitmap_obstack);
3684 obstack_free (&dse_obstack, NULL);
3686 end_alias_analysis ();
3687 free (bb_table);
3688 delete rtx_group_table;
3689 rtx_group_table = NULL;
3690 rtx_group_vec.release ();
3691 BITMAP_FREE (all_blocks);
3692 BITMAP_FREE (scratch);
3694 free_alloc_pool (rtx_store_info_pool);
3695 free_alloc_pool (read_info_pool);
3696 free_alloc_pool (insn_info_pool);
3697 free_alloc_pool (bb_info_pool);
3698 free_alloc_pool (rtx_group_info_pool);
3699 free_alloc_pool (deferred_change_pool);
3703 /* -------------------------------------------------------------------------
3705 ------------------------------------------------------------------------- */
3707 /* Callback for running pass_rtl_dse. */
3709 static unsigned int
3710 rest_of_handle_dse (void)
3712 df_set_flags (DF_DEFER_INSN_RESCAN);
3714 /* Need the notes since we must track live hardregs in the forwards
3715 direction. */
3716 df_note_add_problem ();
3717 df_analyze ();
3719 dse_step0 ();
3720 dse_step1 ();
3721 dse_step2_init ();
3722 if (dse_step2_nospill ())
3724 df_set_flags (DF_LR_RUN_DCE);
3725 df_analyze ();
3726 if (dump_file && (dump_flags & TDF_DETAILS))
3727 fprintf (dump_file, "doing global processing\n");
3728 dse_step3 (false);
3729 dse_step4 ();
3730 dse_step5_nospill ();
3733 dse_step6 ();
3734 dse_step7 ();
3736 if (dump_file)
3737 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3738 locally_deleted, globally_deleted, spill_deleted);
3739 return 0;
3742 namespace {
3744 const pass_data pass_data_rtl_dse1 =
3746 RTL_PASS, /* type */
3747 "dse1", /* name */
3748 OPTGROUP_NONE, /* optinfo_flags */
3749 TV_DSE1, /* tv_id */
3750 0, /* properties_required */
3751 0, /* properties_provided */
3752 0, /* properties_destroyed */
3753 0, /* todo_flags_start */
3754 TODO_df_finish, /* todo_flags_finish */
3757 class pass_rtl_dse1 : public rtl_opt_pass
3759 public:
3760 pass_rtl_dse1 (gcc::context *ctxt)
3761 : rtl_opt_pass (pass_data_rtl_dse1, ctxt)
3764 /* opt_pass methods: */
3765 virtual bool gate (function *)
3767 return optimize > 0 && flag_dse && dbg_cnt (dse1);
3770 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3772 }; // class pass_rtl_dse1
3774 } // anon namespace
3776 rtl_opt_pass *
3777 make_pass_rtl_dse1 (gcc::context *ctxt)
3779 return new pass_rtl_dse1 (ctxt);
3782 namespace {
3784 const pass_data pass_data_rtl_dse2 =
3786 RTL_PASS, /* type */
3787 "dse2", /* name */
3788 OPTGROUP_NONE, /* optinfo_flags */
3789 TV_DSE2, /* tv_id */
3790 0, /* properties_required */
3791 0, /* properties_provided */
3792 0, /* properties_destroyed */
3793 0, /* todo_flags_start */
3794 TODO_df_finish, /* todo_flags_finish */
3797 class pass_rtl_dse2 : public rtl_opt_pass
3799 public:
3800 pass_rtl_dse2 (gcc::context *ctxt)
3801 : rtl_opt_pass (pass_data_rtl_dse2, ctxt)
3804 /* opt_pass methods: */
3805 virtual bool gate (function *)
3807 return optimize > 0 && flag_dse && dbg_cnt (dse2);
3810 virtual unsigned int execute (function *) { return rest_of_handle_dse (); }
3812 }; // class pass_rtl_dse2
3814 } // anon namespace
3816 rtl_opt_pass *
3817 make_pass_rtl_dse2 (gcc::context *ctxt)
3819 return new pass_rtl_dse2 (ctxt);