sort - Don't live-loop threads
[dragonfly.git] / contrib / gcc-4.7 / gcc / dse.c
blob7d4dbf45a1abbdeeac88d1a44b091d5fa6c3c1a9
1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
6 and Kenneth Zadeck <zadeck@naturalbridge.com>
8 This file is part of GCC.
10 GCC is free software; you can redistribute it and/or modify it under
11 the terms of the GNU General Public License as published by the Free
12 Software Foundation; either version 3, or (at your option) any later
13 version.
15 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
16 WARRANTY; without even the implied warranty of MERCHANTABILITY or
17 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 for more details.
20 You should have received a copy of the GNU General Public License
21 along with GCC; see the file COPYING3. If not see
22 <http://www.gnu.org/licenses/>. */
24 #undef BASELINE
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "hashtab.h"
30 #include "tm.h"
31 #include "rtl.h"
32 #include "tree.h"
33 #include "tm_p.h"
34 #include "regs.h"
35 #include "hard-reg-set.h"
36 #include "regset.h"
37 #include "flags.h"
38 #include "df.h"
39 #include "cselib.h"
40 #include "timevar.h"
41 #include "tree-pass.h"
42 #include "alloc-pool.h"
43 #include "alias.h"
44 #include "insn-config.h"
45 #include "expr.h"
46 #include "recog.h"
47 #include "dse.h"
48 #include "optabs.h"
49 #include "dbgcnt.h"
50 #include "target.h"
51 #include "params.h"
52 #include "tree-flow.h"
54 /* This file contains three techniques for performing Dead Store
55 Elimination (dse).
57 * The first technique performs dse locally on any base address. It
58 is based on the cselib which is a local value numbering technique.
59 This technique is local to a basic block but deals with a fairly
60 general addresses.
62 * The second technique performs dse globally but is restricted to
63 base addresses that are either constant or are relative to the
64 frame_pointer.
66 * The third technique, (which is only done after register allocation)
67 processes the spill spill slots. This differs from the second
68 technique because it takes advantage of the fact that spilling is
69 completely free from the effects of aliasing.
71 Logically, dse is a backwards dataflow problem. A store can be
72 deleted if it if cannot be reached in the backward direction by any
73 use of the value being stored. However, the local technique uses a
74 forwards scan of the basic block because cselib requires that the
75 block be processed in that order.
77 The pass is logically broken into 7 steps:
79 0) Initialization.
81 1) The local algorithm, as well as scanning the insns for the two
82 global algorithms.
84 2) Analysis to see if the global algs are necessary. In the case
85 of stores base on a constant address, there must be at least two
86 stores to that address, to make it possible to delete some of the
87 stores. In the case of stores off of the frame or spill related
88 stores, only one store to an address is necessary because those
89 stores die at the end of the function.
91 3) Set up the global dataflow equations based on processing the
92 info parsed in the first step.
94 4) Solve the dataflow equations.
96 5) Delete the insns that the global analysis has indicated are
97 unnecessary.
99 6) Delete insns that store the same value as preceeding store
100 where the earlier store couldn't be eliminated.
102 7) Cleanup.
104 This step uses cselib and canon_rtx to build the largest expression
105 possible for each address. This pass is a forwards pass through
106 each basic block. From the point of view of the global technique,
107 the first pass could examine a block in either direction. The
108 forwards ordering is to accommodate cselib.
110 We a simplifying assumption: addresses fall into four broad
111 categories:
113 1) base has rtx_varies_p == false, offset is constant.
114 2) base has rtx_varies_p == false, offset variable.
115 3) base has rtx_varies_p == true, offset constant.
116 4) base has rtx_varies_p == true, offset variable.
118 The local passes are able to process all 4 kinds of addresses. The
119 global pass only handles (1).
121 The global problem is formulated as follows:
123 A store, S1, to address A, where A is not relative to the stack
124 frame, can be eliminated if all paths from S1 to the end of the
125 of the function contain another store to A before a read to A.
127 If the address A is relative to the stack frame, a store S2 to A
128 can be eliminated if there are no paths from S1 that reach the
129 end of the function that read A before another store to A. In
130 this case S2 can be deleted if there are paths to from S2 to the
131 end of the function that have no reads or writes to A. This
132 second case allows stores to the stack frame to be deleted that
133 would otherwise die when the function returns. This cannot be
134 done if stores_off_frame_dead_at_return is not true. See the doc
135 for that variable for when this variable is false.
137 The global problem is formulated as a backwards set union
138 dataflow problem where the stores are the gens and reads are the
139 kills. Set union problems are rare and require some special
140 handling given our representation of bitmaps. A straightforward
141 implementation of requires a lot of bitmaps filled with 1s.
142 These are expensive and cumbersome in our bitmap formulation so
143 care has been taken to avoid large vectors filled with 1s. See
144 the comments in bb_info and in the dataflow confluence functions
145 for details.
147 There are two places for further enhancements to this algorithm:
149 1) The original dse which was embedded in a pass called flow also
150 did local address forwarding. For example in
152 A <- r100
153 ... <- A
155 flow would replace the right hand side of the second insn with a
156 reference to r100. Most of the information is available to add this
157 to this pass. It has not done it because it is a lot of work in
158 the case that either r100 is assigned to between the first and
159 second insn and/or the second insn is a load of part of the value
160 stored by the first insn.
162 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
163 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
164 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
165 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
167 2) The cleaning up of spill code is quite profitable. It currently
168 depends on reading tea leaves and chicken entrails left by reload.
169 This pass depends on reload creating a singleton alias set for each
170 spill slot and telling the next dse pass which of these alias sets
171 are the singletons. Rather than analyze the addresses of the
172 spills, dse's spill processing just does analysis of the loads and
173 stores that use those alias sets. There are three cases where this
174 falls short:
176 a) Reload sometimes creates the slot for one mode of access, and
177 then inserts loads and/or stores for a smaller mode. In this
178 case, the current code just punts on the slot. The proper thing
179 to do is to back out and use one bit vector position for each
180 byte of the entity associated with the slot. This depends on
181 KNOWING that reload always generates the accesses for each of the
182 bytes in some canonical (read that easy to understand several
183 passes after reload happens) way.
185 b) Reload sometimes decides that spill slot it allocated was not
186 large enough for the mode and goes back and allocates more slots
187 with the same mode and alias set. The backout in this case is a
188 little more graceful than (a). In this case the slot is unmarked
189 as being a spill slot and if final address comes out to be based
190 off the frame pointer, the global algorithm handles this slot.
192 c) For any pass that may prespill, there is currently no
193 mechanism to tell the dse pass that the slot being used has the
194 special properties that reload uses. It may be that all that is
195 required is to have those passes make the same calls that reload
196 does, assuming that the alias sets can be manipulated in the same
197 way. */
199 /* There are limits to the size of constant offsets we model for the
200 global problem. There are certainly test cases, that exceed this
201 limit, however, it is unlikely that there are important programs
202 that really have constant offsets this size. */
203 #define MAX_OFFSET (64 * 1024)
206 static bitmap scratch = NULL;
207 struct insn_info;
209 /* This structure holds information about a candidate store. */
210 struct store_info
213 /* False means this is a clobber. */
214 bool is_set;
216 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
217 bool is_large;
219 /* The id of the mem group of the base address. If rtx_varies_p is
220 true, this is -1. Otherwise, it is the index into the group
221 table. */
222 int group_id;
224 /* This is the cselib value. */
225 cselib_val *cse_base;
227 /* This canonized mem. */
228 rtx mem;
230 /* Canonized MEM address for use by canon_true_dependence. */
231 rtx mem_addr;
233 /* If this is non-zero, it is the alias set of a spill location. */
234 alias_set_type alias_set;
236 /* The offset of the first and byte before the last byte associated
237 with the operation. */
238 HOST_WIDE_INT begin, end;
240 union
242 /* A bitmask as wide as the number of bytes in the word that
243 contains a 1 if the byte may be needed. The store is unused if
244 all of the bits are 0. This is used if IS_LARGE is false. */
245 unsigned HOST_WIDE_INT small_bitmask;
247 struct
249 /* A bitmap with one bit per byte. Cleared bit means the position
250 is needed. Used if IS_LARGE is false. */
251 bitmap bmap;
253 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
254 equal to END - BEGIN, the whole store is unused. */
255 int count;
256 } large;
257 } positions_needed;
259 /* The next store info for this insn. */
260 struct store_info *next;
262 /* The right hand side of the store. This is used if there is a
263 subsequent reload of the mems address somewhere later in the
264 basic block. */
265 rtx rhs;
267 /* If rhs is or holds a constant, this contains that constant,
268 otherwise NULL. */
269 rtx const_rhs;
271 /* Set if this store stores the same constant value as REDUNDANT_REASON
272 insn stored. These aren't eliminated early, because doing that
273 might prevent the earlier larger store to be eliminated. */
274 struct insn_info *redundant_reason;
277 /* Return a bitmask with the first N low bits set. */
279 static unsigned HOST_WIDE_INT
280 lowpart_bitmask (int n)
282 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
283 return mask >> (HOST_BITS_PER_WIDE_INT - n);
286 typedef struct store_info *store_info_t;
287 static alloc_pool cse_store_info_pool;
288 static alloc_pool rtx_store_info_pool;
290 /* This structure holds information about a load. These are only
291 built for rtx bases. */
292 struct read_info
294 /* The id of the mem group of the base address. */
295 int group_id;
297 /* If this is non-zero, it is the alias set of a spill location. */
298 alias_set_type alias_set;
300 /* The offset of the first and byte after the last byte associated
301 with the operation. If begin == end == 0, the read did not have
302 a constant offset. */
303 int begin, end;
305 /* The mem being read. */
306 rtx mem;
308 /* The next read_info for this insn. */
309 struct read_info *next;
311 typedef struct read_info *read_info_t;
312 static alloc_pool read_info_pool;
315 /* One of these records is created for each insn. */
317 struct insn_info
319 /* Set true if the insn contains a store but the insn itself cannot
320 be deleted. This is set if the insn is a parallel and there is
321 more than one non dead output or if the insn is in some way
322 volatile. */
323 bool cannot_delete;
325 /* This field is only used by the global algorithm. It is set true
326 if the insn contains any read of mem except for a (1). This is
327 also set if the insn is a call or has a clobber mem. If the insn
328 contains a wild read, the use_rec will be null. */
329 bool wild_read;
331 /* This is true only for CALL instructions which could potentially read
332 any non-frame memory location. This field is used by the global
333 algorithm. */
334 bool non_frame_wild_read;
336 /* This field is only used for the processing of const functions.
337 These functions cannot read memory, but they can read the stack
338 because that is where they may get their parms. We need to be
339 this conservative because, like the store motion pass, we don't
340 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
341 Moreover, we need to distinguish two cases:
342 1. Before reload (register elimination), the stores related to
343 outgoing arguments are stack pointer based and thus deemed
344 of non-constant base in this pass. This requires special
345 handling but also means that the frame pointer based stores
346 need not be killed upon encountering a const function call.
347 2. After reload, the stores related to outgoing arguments can be
348 either stack pointer or hard frame pointer based. This means
349 that we have no other choice than also killing all the frame
350 pointer based stores upon encountering a const function call.
351 This field is set after reload for const function calls. Having
352 this set is less severe than a wild read, it just means that all
353 the frame related stores are killed rather than all the stores. */
354 bool frame_read;
356 /* This field is only used for the processing of const functions.
357 It is set if the insn may contain a stack pointer based store. */
358 bool stack_pointer_based;
360 /* This is true if any of the sets within the store contains a
361 cselib base. Such stores can only be deleted by the local
362 algorithm. */
363 bool contains_cselib_groups;
365 /* The insn. */
366 rtx insn;
368 /* The list of mem sets or mem clobbers that are contained in this
369 insn. If the insn is deletable, it contains only one mem set.
370 But it could also contain clobbers. Insns that contain more than
371 one mem set are not deletable, but each of those mems are here in
372 order to provide info to delete other insns. */
373 store_info_t store_rec;
375 /* The linked list of mem uses in this insn. Only the reads from
376 rtx bases are listed here. The reads to cselib bases are
377 completely processed during the first scan and so are never
378 created. */
379 read_info_t read_rec;
381 /* The live fixed registers. We assume only fixed registers can
382 cause trouble by being clobbered from an expanded pattern;
383 storing only the live fixed registers (rather than all registers)
384 means less memory needs to be allocated / copied for the individual
385 stores. */
386 regset fixed_regs_live;
388 /* The prev insn in the basic block. */
389 struct insn_info * prev_insn;
391 /* The linked list of insns that are in consideration for removal in
392 the forwards pass thru the basic block. This pointer may be
393 trash as it is not cleared when a wild read occurs. The only
394 time it is guaranteed to be correct is when the traversal starts
395 at active_local_stores. */
396 struct insn_info * next_local_store;
399 typedef struct insn_info *insn_info_t;
400 static alloc_pool insn_info_pool;
402 /* The linked list of stores that are under consideration in this
403 basic block. */
404 static insn_info_t active_local_stores;
405 static int active_local_stores_len;
407 struct bb_info
410 /* Pointer to the insn info for the last insn in the block. These
411 are linked so this is how all of the insns are reached. During
412 scanning this is the current insn being scanned. */
413 insn_info_t last_insn;
415 /* The info for the global dataflow problem. */
418 /* This is set if the transfer function should and in the wild_read
419 bitmap before applying the kill and gen sets. That vector knocks
420 out most of the bits in the bitmap and thus speeds up the
421 operations. */
422 bool apply_wild_read;
424 /* The following 4 bitvectors hold information about which positions
425 of which stores are live or dead. They are indexed by
426 get_bitmap_index. */
428 /* The set of store positions that exist in this block before a wild read. */
429 bitmap gen;
431 /* The set of load positions that exist in this block above the
432 same position of a store. */
433 bitmap kill;
435 /* The set of stores that reach the top of the block without being
436 killed by a read.
438 Do not represent the in if it is all ones. Note that this is
439 what the bitvector should logically be initialized to for a set
440 intersection problem. However, like the kill set, this is too
441 expensive. So initially, the in set will only be created for the
442 exit block and any block that contains a wild read. */
443 bitmap in;
445 /* The set of stores that reach the bottom of the block from it's
446 successors.
448 Do not represent the in if it is all ones. Note that this is
449 what the bitvector should logically be initialized to for a set
450 intersection problem. However, like the kill and in set, this is
451 too expensive. So what is done is that the confluence operator
452 just initializes the vector from one of the out sets of the
453 successors of the block. */
454 bitmap out;
456 /* The following bitvector is indexed by the reg number. It
457 contains the set of regs that are live at the current instruction
458 being processed. While it contains info for all of the
459 registers, only the hard registers are actually examined. It is used
460 to assure that shift and/or add sequences that are inserted do not
461 accidently clobber live hard regs. */
462 bitmap regs_live;
465 typedef struct bb_info *bb_info_t;
466 static alloc_pool bb_info_pool;
468 /* Table to hold all bb_infos. */
469 static bb_info_t *bb_table;
471 /* There is a group_info for each rtx base that is used to reference
472 memory. There are also not many of the rtx bases because they are
473 very limited in scope. */
475 struct group_info
477 /* The actual base of the address. */
478 rtx rtx_base;
480 /* The sequential id of the base. This allows us to have a
481 canonical ordering of these that is not based on addresses. */
482 int id;
484 /* True if there are any positions that are to be processed
485 globally. */
486 bool process_globally;
488 /* True if the base of this group is either the frame_pointer or
489 hard_frame_pointer. */
490 bool frame_related;
492 /* A mem wrapped around the base pointer for the group in order to do
493 read dependency. It must be given BLKmode in order to encompass all
494 the possible offsets from the base. */
495 rtx base_mem;
497 /* Canonized version of base_mem's address. */
498 rtx canon_base_addr;
500 /* These two sets of two bitmaps are used to keep track of how many
501 stores are actually referencing that position from this base. We
502 only do this for rtx bases as this will be used to assign
503 positions in the bitmaps for the global problem. Bit N is set in
504 store1 on the first store for offset N. Bit N is set in store2
505 for the second store to offset N. This is all we need since we
506 only care about offsets that have two or more stores for them.
508 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
509 for 0 and greater offsets.
511 There is one special case here, for stores into the stack frame,
512 we will or store1 into store2 before deciding which stores look
513 at globally. This is because stores to the stack frame that have
514 no other reads before the end of the function can also be
515 deleted. */
516 bitmap store1_n, store1_p, store2_n, store2_p;
518 /* These bitmaps keep track of offsets in this group escape this function.
519 An offset escapes if it corresponds to a named variable whose
520 addressable flag is set. */
521 bitmap escaped_n, escaped_p;
523 /* The positions in this bitmap have the same assignments as the in,
524 out, gen and kill bitmaps. This bitmap is all zeros except for
525 the positions that are occupied by stores for this group. */
526 bitmap group_kill;
528 /* The offset_map is used to map the offsets from this base into
529 positions in the global bitmaps. It is only created after all of
530 the all of stores have been scanned and we know which ones we
531 care about. */
532 int *offset_map_n, *offset_map_p;
533 int offset_map_size_n, offset_map_size_p;
535 typedef struct group_info *group_info_t;
536 typedef const struct group_info *const_group_info_t;
537 static alloc_pool rtx_group_info_pool;
539 /* Tables of group_info structures, hashed by base value. */
540 static htab_t rtx_group_table;
542 /* Index into the rtx_group_vec. */
543 static int rtx_group_next_id;
545 DEF_VEC_P(group_info_t);
546 DEF_VEC_ALLOC_P(group_info_t,heap);
548 static VEC(group_info_t,heap) *rtx_group_vec;
551 /* This structure holds the set of changes that are being deferred
552 when removing read operation. See replace_read. */
553 struct deferred_change
556 /* The mem that is being replaced. */
557 rtx *loc;
559 /* The reg it is being replaced with. */
560 rtx reg;
562 struct deferred_change *next;
565 typedef struct deferred_change *deferred_change_t;
566 static alloc_pool deferred_change_pool;
568 static deferred_change_t deferred_change_list = NULL;
570 /* This are used to hold the alias sets of spill variables. Since
571 these are never aliased and there may be a lot of them, it makes
572 sense to treat them specially. This bitvector is only allocated in
573 calls from dse_record_singleton_alias_set which currently is only
574 made during reload1. So when dse is called before reload this
575 mechanism does nothing. */
577 static bitmap clear_alias_sets = NULL;
579 /* The set of clear_alias_sets that have been disqualified because
580 there are loads or stores using a different mode than the alias set
581 was registered with. */
582 static bitmap disqualified_clear_alias_sets = NULL;
584 /* The group that holds all of the clear_alias_sets. */
585 static group_info_t clear_alias_group;
587 /* The modes of the clear_alias_sets. */
588 static htab_t clear_alias_mode_table;
590 /* Hash table element to look up the mode for an alias set. */
591 struct clear_alias_mode_holder
593 alias_set_type alias_set;
594 enum machine_mode mode;
597 static alloc_pool clear_alias_mode_pool;
599 /* This is true except if cfun->stdarg -- i.e. we cannot do
600 this for vararg functions because they play games with the frame. */
601 static bool stores_off_frame_dead_at_return;
603 /* Counter for stats. */
604 static int globally_deleted;
605 static int locally_deleted;
606 static int spill_deleted;
608 static bitmap all_blocks;
610 /* Locations that are killed by calls in the global phase. */
611 static bitmap kill_on_calls;
613 /* The number of bits used in the global bitmaps. */
614 static unsigned int current_position;
617 static bool gate_dse (void);
618 static bool gate_dse1 (void);
619 static bool gate_dse2 (void);
622 /*----------------------------------------------------------------------------
623 Zeroth step.
625 Initialization.
626 ----------------------------------------------------------------------------*/
628 /* Hashtable callbacks for maintaining the "bases" field of
629 store_group_info, given that the addresses are function invariants. */
631 static int
632 clear_alias_mode_eq (const void *p1, const void *p2)
634 const struct clear_alias_mode_holder * h1
635 = (const struct clear_alias_mode_holder *) p1;
636 const struct clear_alias_mode_holder * h2
637 = (const struct clear_alias_mode_holder *) p2;
638 return h1->alias_set == h2->alias_set;
642 static hashval_t
643 clear_alias_mode_hash (const void *p)
645 const struct clear_alias_mode_holder *holder
646 = (const struct clear_alias_mode_holder *) p;
647 return holder->alias_set;
651 /* Find the entry associated with ALIAS_SET. */
653 static struct clear_alias_mode_holder *
654 clear_alias_set_lookup (alias_set_type alias_set)
656 struct clear_alias_mode_holder tmp_holder;
657 void **slot;
659 tmp_holder.alias_set = alias_set;
660 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
661 gcc_assert (*slot);
663 return (struct clear_alias_mode_holder *) *slot;
667 /* Hashtable callbacks for maintaining the "bases" field of
668 store_group_info, given that the addresses are function invariants. */
670 static int
671 invariant_group_base_eq (const void *p1, const void *p2)
673 const_group_info_t gi1 = (const_group_info_t) p1;
674 const_group_info_t gi2 = (const_group_info_t) p2;
675 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
679 static hashval_t
680 invariant_group_base_hash (const void *p)
682 const_group_info_t gi = (const_group_info_t) p;
683 int do_not_record;
684 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
688 /* Get the GROUP for BASE. Add a new group if it is not there. */
690 static group_info_t
691 get_group_info (rtx base)
693 struct group_info tmp_gi;
694 group_info_t gi;
695 void **slot;
697 if (base)
699 /* Find the store_base_info structure for BASE, creating a new one
700 if necessary. */
701 tmp_gi.rtx_base = base;
702 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
703 gi = (group_info_t) *slot;
705 else
707 if (!clear_alias_group)
709 clear_alias_group = gi =
710 (group_info_t) pool_alloc (rtx_group_info_pool);
711 memset (gi, 0, sizeof (struct group_info));
712 gi->id = rtx_group_next_id++;
713 gi->store1_n = BITMAP_ALLOC (NULL);
714 gi->store1_p = BITMAP_ALLOC (NULL);
715 gi->store2_n = BITMAP_ALLOC (NULL);
716 gi->store2_p = BITMAP_ALLOC (NULL);
717 gi->escaped_p = BITMAP_ALLOC (NULL);
718 gi->escaped_n = BITMAP_ALLOC (NULL);
719 gi->group_kill = BITMAP_ALLOC (NULL);
720 gi->process_globally = false;
721 gi->offset_map_size_n = 0;
722 gi->offset_map_size_p = 0;
723 gi->offset_map_n = NULL;
724 gi->offset_map_p = NULL;
725 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
727 return clear_alias_group;
730 if (gi == NULL)
732 *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
733 gi->rtx_base = base;
734 gi->id = rtx_group_next_id++;
735 gi->base_mem = gen_rtx_MEM (BLKmode, base);
736 gi->canon_base_addr = canon_rtx (base);
737 gi->store1_n = BITMAP_ALLOC (NULL);
738 gi->store1_p = BITMAP_ALLOC (NULL);
739 gi->store2_n = BITMAP_ALLOC (NULL);
740 gi->store2_p = BITMAP_ALLOC (NULL);
741 gi->escaped_p = BITMAP_ALLOC (NULL);
742 gi->escaped_n = BITMAP_ALLOC (NULL);
743 gi->group_kill = BITMAP_ALLOC (NULL);
744 gi->process_globally = false;
745 gi->frame_related =
746 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
747 gi->offset_map_size_n = 0;
748 gi->offset_map_size_p = 0;
749 gi->offset_map_n = NULL;
750 gi->offset_map_p = NULL;
751 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
754 return gi;
758 /* Initialization of data structures. */
760 static void
761 dse_step0 (void)
763 locally_deleted = 0;
764 globally_deleted = 0;
765 spill_deleted = 0;
767 scratch = BITMAP_ALLOC (NULL);
768 kill_on_calls = BITMAP_ALLOC (NULL);
770 rtx_store_info_pool
771 = create_alloc_pool ("rtx_store_info_pool",
772 sizeof (struct store_info), 100);
773 read_info_pool
774 = create_alloc_pool ("read_info_pool",
775 sizeof (struct read_info), 100);
776 insn_info_pool
777 = create_alloc_pool ("insn_info_pool",
778 sizeof (struct insn_info), 100);
779 bb_info_pool
780 = create_alloc_pool ("bb_info_pool",
781 sizeof (struct bb_info), 100);
782 rtx_group_info_pool
783 = create_alloc_pool ("rtx_group_info_pool",
784 sizeof (struct group_info), 100);
785 deferred_change_pool
786 = create_alloc_pool ("deferred_change_pool",
787 sizeof (struct deferred_change), 10);
789 rtx_group_table = htab_create (11, invariant_group_base_hash,
790 invariant_group_base_eq, NULL);
792 bb_table = XCNEWVEC (bb_info_t, last_basic_block);
793 rtx_group_next_id = 0;
795 stores_off_frame_dead_at_return = !cfun->stdarg;
797 init_alias_analysis ();
799 if (clear_alias_sets)
800 clear_alias_group = get_group_info (NULL);
801 else
802 clear_alias_group = NULL;
807 /*----------------------------------------------------------------------------
808 First step.
810 Scan all of the insns. Any random ordering of the blocks is fine.
811 Each block is scanned in forward order to accommodate cselib which
812 is used to remove stores with non-constant bases.
813 ----------------------------------------------------------------------------*/
815 /* Delete all of the store_info recs from INSN_INFO. */
817 static void
818 free_store_info (insn_info_t insn_info)
820 store_info_t store_info = insn_info->store_rec;
821 while (store_info)
823 store_info_t next = store_info->next;
824 if (store_info->is_large)
825 BITMAP_FREE (store_info->positions_needed.large.bmap);
826 if (store_info->cse_base)
827 pool_free (cse_store_info_pool, store_info);
828 else
829 pool_free (rtx_store_info_pool, store_info);
830 store_info = next;
833 insn_info->cannot_delete = true;
834 insn_info->contains_cselib_groups = false;
835 insn_info->store_rec = NULL;
838 typedef struct
840 rtx first, current;
841 regset fixed_regs_live;
842 bool failure;
843 } note_add_store_info;
845 /* Callback for emit_inc_dec_insn_before via note_stores.
846 Check if a register is clobbered which is live afterwards. */
848 static void
849 note_add_store (rtx loc, const_rtx expr ATTRIBUTE_UNUSED, void *data)
851 rtx insn;
852 note_add_store_info *info = (note_add_store_info *) data;
853 int r, n;
855 if (!REG_P (loc))
856 return;
858 /* If this register is referenced by the current or an earlier insn,
859 that's OK. E.g. this applies to the register that is being incremented
860 with this addition. */
861 for (insn = info->first;
862 insn != NEXT_INSN (info->current);
863 insn = NEXT_INSN (insn))
864 if (reg_referenced_p (loc, PATTERN (insn)))
865 return;
867 /* If we come here, we have a clobber of a register that's only OK
868 if that register is not live. If we don't have liveness information
869 available, fail now. */
870 if (!info->fixed_regs_live)
872 info->failure = true;
873 return;
875 /* Now check if this is a live fixed register. */
876 r = REGNO (loc);
877 n = hard_regno_nregs[r][GET_MODE (loc)];
878 while (--n >= 0)
879 if (REGNO_REG_SET_P (info->fixed_regs_live, r+n))
880 info->failure = true;
883 /* Callback for for_each_inc_dec that emits an INSN that sets DEST to
884 SRC + SRCOFF before insn ARG. */
886 static int
887 emit_inc_dec_insn_before (rtx mem ATTRIBUTE_UNUSED,
888 rtx op ATTRIBUTE_UNUSED,
889 rtx dest, rtx src, rtx srcoff, void *arg)
891 insn_info_t insn_info = (insn_info_t) arg;
892 rtx insn = insn_info->insn, new_insn, cur;
893 note_add_store_info info;
895 /* We can reuse all operands without copying, because we are about
896 to delete the insn that contained it. */
897 if (srcoff)
899 start_sequence ();
900 emit_insn (gen_add3_insn (dest, src, srcoff));
901 new_insn = get_insns ();
902 end_sequence ();
904 else
905 new_insn = gen_move_insn (dest, src);
906 info.first = new_insn;
907 info.fixed_regs_live = insn_info->fixed_regs_live;
908 info.failure = false;
909 for (cur = new_insn; cur; cur = NEXT_INSN (cur))
911 info.current = cur;
912 note_stores (PATTERN (cur), note_add_store, &info);
915 /* If a failure was flagged above, return 1 so that for_each_inc_dec will
916 return it immediately, communicating the failure to its caller. */
917 if (info.failure)
918 return 1;
920 emit_insn_before (new_insn, insn);
922 return -1;
925 /* Before we delete INSN_INFO->INSN, make sure that the auto inc/dec, if it
926 is there, is split into a separate insn.
927 Return true on success (or if there was nothing to do), false on failure. */
929 static bool
930 check_for_inc_dec_1 (insn_info_t insn_info)
932 rtx insn = insn_info->insn;
933 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
934 if (note)
935 return for_each_inc_dec (&insn, emit_inc_dec_insn_before, insn_info) == 0;
936 return true;
940 /* Entry point for postreload. If you work on reload_cse, or you need this
941 anywhere else, consider if you can provide register liveness information
942 and add a parameter to this function so that it can be passed down in
943 insn_info.fixed_regs_live. */
944 bool
945 check_for_inc_dec (rtx insn)
947 struct insn_info insn_info;
948 rtx note;
950 insn_info.insn = insn;
951 insn_info.fixed_regs_live = NULL;
952 note = find_reg_note (insn, REG_INC, NULL_RTX);
953 if (note)
954 return for_each_inc_dec (&insn, emit_inc_dec_insn_before, &insn_info) == 0;
955 return true;
958 /* Delete the insn and free all of the fields inside INSN_INFO. */
960 static void
961 delete_dead_store_insn (insn_info_t insn_info)
963 read_info_t read_info;
965 if (!dbg_cnt (dse))
966 return;
968 if (!check_for_inc_dec_1 (insn_info))
969 return;
970 if (dump_file)
972 fprintf (dump_file, "Locally deleting insn %d ",
973 INSN_UID (insn_info->insn));
974 if (insn_info->store_rec->alias_set)
975 fprintf (dump_file, "alias set %d\n",
976 (int) insn_info->store_rec->alias_set);
977 else
978 fprintf (dump_file, "\n");
981 free_store_info (insn_info);
982 read_info = insn_info->read_rec;
984 while (read_info)
986 read_info_t next = read_info->next;
987 pool_free (read_info_pool, read_info);
988 read_info = next;
990 insn_info->read_rec = NULL;
992 delete_insn (insn_info->insn);
993 locally_deleted++;
994 insn_info->insn = NULL;
996 insn_info->wild_read = false;
999 /* Return whether DECL, a local variable, can possibly escape the current
1000 function scope. */
1002 static bool
1003 local_variable_can_escape (tree decl)
1005 if (TREE_ADDRESSABLE (decl))
1006 return true;
1008 /* If this is a partitioned variable, we need to consider all the variables
1009 in the partition. This is necessary because a store into one of them can
1010 be replaced with a store into another and this may not change the outcome
1011 of the escape analysis. */
1012 if (cfun->gimple_df->decls_to_pointers != NULL)
1014 void *namep
1015 = pointer_map_contains (cfun->gimple_df->decls_to_pointers, decl);
1016 if (namep)
1017 return TREE_ADDRESSABLE (*(tree *)namep);
1020 return false;
1023 /* Return whether EXPR can possibly escape the current function scope. */
1025 static bool
1026 can_escape (tree expr)
1028 tree base;
1029 if (!expr)
1030 return true;
1031 base = get_base_address (expr);
1032 if (DECL_P (base)
1033 && !may_be_aliased (base)
1034 && !(TREE_CODE (base) == VAR_DECL
1035 && !DECL_EXTERNAL (base)
1036 && !TREE_STATIC (base)
1037 && local_variable_can_escape (base)))
1038 return false;
1039 return true;
1042 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
1043 OFFSET and WIDTH. */
1045 static void
1046 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width,
1047 tree expr)
1049 HOST_WIDE_INT i;
1050 bool expr_escapes = can_escape (expr);
1051 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
1052 for (i=offset; i<offset+width; i++)
1054 bitmap store1;
1055 bitmap store2;
1056 bitmap escaped;
1057 int ai;
1058 if (i < 0)
1060 store1 = group->store1_n;
1061 store2 = group->store2_n;
1062 escaped = group->escaped_n;
1063 ai = -i;
1065 else
1067 store1 = group->store1_p;
1068 store2 = group->store2_p;
1069 escaped = group->escaped_p;
1070 ai = i;
1073 if (!bitmap_set_bit (store1, ai))
1074 bitmap_set_bit (store2, ai);
1075 else
1077 if (i < 0)
1079 if (group->offset_map_size_n < ai)
1080 group->offset_map_size_n = ai;
1082 else
1084 if (group->offset_map_size_p < ai)
1085 group->offset_map_size_p = ai;
1088 if (expr_escapes)
1089 bitmap_set_bit (escaped, ai);
1093 static void
1094 reset_active_stores (void)
1096 active_local_stores = NULL;
1097 active_local_stores_len = 0;
1100 /* Free all READ_REC of the LAST_INSN of BB_INFO. */
1102 static void
1103 free_read_records (bb_info_t bb_info)
1105 insn_info_t insn_info = bb_info->last_insn;
1106 read_info_t *ptr = &insn_info->read_rec;
1107 while (*ptr)
1109 read_info_t next = (*ptr)->next;
1110 if ((*ptr)->alias_set == 0)
1112 pool_free (read_info_pool, *ptr);
1113 *ptr = next;
1115 else
1116 ptr = &(*ptr)->next;
1120 /* Set the BB_INFO so that the last insn is marked as a wild read. */
1122 static void
1123 add_wild_read (bb_info_t bb_info)
1125 insn_info_t insn_info = bb_info->last_insn;
1126 insn_info->wild_read = true;
1127 free_read_records (bb_info);
1128 reset_active_stores ();
1131 /* Set the BB_INFO so that the last insn is marked as a wild read of
1132 non-frame locations. */
1134 static void
1135 add_non_frame_wild_read (bb_info_t bb_info)
1137 insn_info_t insn_info = bb_info->last_insn;
1138 insn_info->non_frame_wild_read = true;
1139 free_read_records (bb_info);
1140 reset_active_stores ();
1143 /* Return true if X is a constant or one of the registers that behave
1144 as a constant over the life of a function. This is equivalent to
1145 !rtx_varies_p for memory addresses. */
1147 static bool
1148 const_or_frame_p (rtx x)
1150 switch (GET_CODE (x))
1152 case CONST:
1153 case CONST_INT:
1154 case CONST_DOUBLE:
1155 case CONST_VECTOR:
1156 case SYMBOL_REF:
1157 case LABEL_REF:
1158 return true;
1160 case REG:
1161 /* Note that we have to test for the actual rtx used for the frame
1162 and arg pointers and not just the register number in case we have
1163 eliminated the frame and/or arg pointer and are using it
1164 for pseudos. */
1165 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1166 /* The arg pointer varies if it is not a fixed register. */
1167 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1168 || x == pic_offset_table_rtx)
1169 return true;
1170 return false;
1172 default:
1173 return false;
1177 /* Take all reasonable action to put the address of MEM into the form
1178 that we can do analysis on.
1180 The gold standard is to get the address into the form: address +
1181 OFFSET where address is something that rtx_varies_p considers a
1182 constant. When we can get the address in this form, we can do
1183 global analysis on it. Note that for constant bases, address is
1184 not actually returned, only the group_id. The address can be
1185 obtained from that.
1187 If that fails, we try cselib to get a value we can at least use
1188 locally. If that fails we return false.
1190 The GROUP_ID is set to -1 for cselib bases and the index of the
1191 group for non_varying bases.
1193 FOR_READ is true if this is a mem read and false if not. */
1195 static bool
1196 canon_address (rtx mem,
1197 alias_set_type *alias_set_out,
1198 int *group_id,
1199 HOST_WIDE_INT *offset,
1200 cselib_val **base)
1202 enum machine_mode address_mode
1203 = targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
1204 rtx mem_address = XEXP (mem, 0);
1205 rtx expanded_address, address;
1206 int expanded;
1208 /* Make sure that cselib is has initialized all of the operands of
1209 the address before asking it to do the subst. */
1211 if (clear_alias_sets)
1213 /* If this is a spill, do not do any further processing. */
1214 alias_set_type alias_set = MEM_ALIAS_SET (mem);
1215 if (dump_file)
1216 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1217 if (bitmap_bit_p (clear_alias_sets, alias_set))
1219 struct clear_alias_mode_holder *entry
1220 = clear_alias_set_lookup (alias_set);
1222 /* If the modes do not match, we cannot process this set. */
1223 if (entry->mode != GET_MODE (mem))
1225 if (dump_file)
1226 fprintf (dump_file,
1227 "disqualifying alias set %d, (%s) != (%s)\n",
1228 (int) alias_set, GET_MODE_NAME (entry->mode),
1229 GET_MODE_NAME (GET_MODE (mem)));
1231 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1232 return false;
1235 *alias_set_out = alias_set;
1236 *group_id = clear_alias_group->id;
1237 return true;
1241 *alias_set_out = 0;
1243 cselib_lookup (mem_address, address_mode, 1, GET_MODE (mem));
1245 if (dump_file)
1247 fprintf (dump_file, " mem: ");
1248 print_inline_rtx (dump_file, mem_address, 0);
1249 fprintf (dump_file, "\n");
1252 /* First see if just canon_rtx (mem_address) is const or frame,
1253 if not, try cselib_expand_value_rtx and call canon_rtx on that. */
1254 address = NULL_RTX;
1255 for (expanded = 0; expanded < 2; expanded++)
1257 if (expanded)
1259 /* Use cselib to replace all of the reg references with the full
1260 expression. This will take care of the case where we have
1262 r_x = base + offset;
1263 val = *r_x;
1265 by making it into
1267 val = *(base + offset); */
1269 expanded_address = cselib_expand_value_rtx (mem_address,
1270 scratch, 5);
1272 /* If this fails, just go with the address from first
1273 iteration. */
1274 if (!expanded_address)
1275 break;
1277 else
1278 expanded_address = mem_address;
1280 /* Split the address into canonical BASE + OFFSET terms. */
1281 address = canon_rtx (expanded_address);
1283 *offset = 0;
1285 if (dump_file)
1287 if (expanded)
1289 fprintf (dump_file, "\n after cselib_expand address: ");
1290 print_inline_rtx (dump_file, expanded_address, 0);
1291 fprintf (dump_file, "\n");
1294 fprintf (dump_file, "\n after canon_rtx address: ");
1295 print_inline_rtx (dump_file, address, 0);
1296 fprintf (dump_file, "\n");
1299 if (GET_CODE (address) == CONST)
1300 address = XEXP (address, 0);
1302 if (GET_CODE (address) == PLUS
1303 && CONST_INT_P (XEXP (address, 1)))
1305 *offset = INTVAL (XEXP (address, 1));
1306 address = XEXP (address, 0);
1309 if (ADDR_SPACE_GENERIC_P (MEM_ADDR_SPACE (mem))
1310 && const_or_frame_p (address))
1312 group_info_t group = get_group_info (address);
1314 if (dump_file)
1315 fprintf (dump_file, " gid=%d offset=%d \n",
1316 group->id, (int)*offset);
1317 *base = NULL;
1318 *group_id = group->id;
1319 return true;
1323 *base = cselib_lookup (address, address_mode, true, GET_MODE (mem));
1324 *group_id = -1;
1326 if (*base == NULL)
1328 if (dump_file)
1329 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1330 return false;
1332 if (dump_file)
1333 fprintf (dump_file, " varying cselib base=%u:%u offset = %d\n",
1334 (*base)->uid, (*base)->hash, (int)*offset);
1335 return true;
1339 /* Clear the rhs field from the active_local_stores array. */
1341 static void
1342 clear_rhs_from_active_local_stores (void)
1344 insn_info_t ptr = active_local_stores;
1346 while (ptr)
1348 store_info_t store_info = ptr->store_rec;
1349 /* Skip the clobbers. */
1350 while (!store_info->is_set)
1351 store_info = store_info->next;
1353 store_info->rhs = NULL;
1354 store_info->const_rhs = NULL;
1356 ptr = ptr->next_local_store;
1361 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1363 static inline void
1364 set_position_unneeded (store_info_t s_info, int pos)
1366 if (__builtin_expect (s_info->is_large, false))
1368 if (bitmap_set_bit (s_info->positions_needed.large.bmap, pos))
1369 s_info->positions_needed.large.count++;
1371 else
1372 s_info->positions_needed.small_bitmask
1373 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1376 /* Mark the whole store S_INFO as unneeded. */
1378 static inline void
1379 set_all_positions_unneeded (store_info_t s_info)
1381 if (__builtin_expect (s_info->is_large, false))
1383 int pos, end = s_info->end - s_info->begin;
1384 for (pos = 0; pos < end; pos++)
1385 bitmap_set_bit (s_info->positions_needed.large.bmap, pos);
1386 s_info->positions_needed.large.count = end;
1388 else
1389 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1392 /* Return TRUE if any bytes from S_INFO store are needed. */
1394 static inline bool
1395 any_positions_needed_p (store_info_t s_info)
1397 if (__builtin_expect (s_info->is_large, false))
1398 return (s_info->positions_needed.large.count
1399 < s_info->end - s_info->begin);
1400 else
1401 return (s_info->positions_needed.small_bitmask
1402 != (unsigned HOST_WIDE_INT) 0);
1405 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1406 store are needed. */
1408 static inline bool
1409 all_positions_needed_p (store_info_t s_info, int start, int width)
1411 if (__builtin_expect (s_info->is_large, false))
1413 int end = start + width;
1414 while (start < end)
1415 if (bitmap_bit_p (s_info->positions_needed.large.bmap, start++))
1416 return false;
1417 return true;
1419 else
1421 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1422 return (s_info->positions_needed.small_bitmask & mask) == mask;
1427 static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
1428 HOST_WIDE_INT, basic_block, bool);
1431 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1432 there is a candidate store, after adding it to the appropriate
1433 local store group if so. */
1435 static int
1436 record_store (rtx body, bb_info_t bb_info)
1438 rtx mem, rhs, const_rhs, mem_addr;
1439 HOST_WIDE_INT offset = 0;
1440 HOST_WIDE_INT width = 0;
1441 alias_set_type spill_alias_set;
1442 insn_info_t insn_info = bb_info->last_insn;
1443 store_info_t store_info = NULL;
1444 int group_id;
1445 cselib_val *base = NULL;
1446 insn_info_t ptr, last, redundant_reason;
1447 bool store_is_unused;
1449 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1450 return 0;
1452 mem = SET_DEST (body);
1454 /* If this is not used, then this cannot be used to keep the insn
1455 from being deleted. On the other hand, it does provide something
1456 that can be used to prove that another store is dead. */
1457 store_is_unused
1458 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1460 /* Check whether that value is a suitable memory location. */
1461 if (!MEM_P (mem))
1463 /* If the set or clobber is unused, then it does not effect our
1464 ability to get rid of the entire insn. */
1465 if (!store_is_unused)
1466 insn_info->cannot_delete = true;
1467 return 0;
1470 /* At this point we know mem is a mem. */
1471 if (GET_MODE (mem) == BLKmode)
1473 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1475 if (dump_file)
1476 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1477 add_wild_read (bb_info);
1478 insn_info->cannot_delete = true;
1479 return 0;
1481 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1482 as memset (addr, 0, 36); */
1483 else if (!MEM_SIZE_KNOWN_P (mem)
1484 || MEM_SIZE (mem) <= 0
1485 || MEM_SIZE (mem) > MAX_OFFSET
1486 || GET_CODE (body) != SET
1487 || !CONST_INT_P (SET_SRC (body)))
1489 if (!store_is_unused)
1491 /* If the set or clobber is unused, then it does not effect our
1492 ability to get rid of the entire insn. */
1493 insn_info->cannot_delete = true;
1494 clear_rhs_from_active_local_stores ();
1496 return 0;
1500 /* We can still process a volatile mem, we just cannot delete it. */
1501 if (MEM_VOLATILE_P (mem))
1502 insn_info->cannot_delete = true;
1504 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1506 clear_rhs_from_active_local_stores ();
1507 return 0;
1510 if (GET_MODE (mem) == BLKmode)
1511 width = MEM_SIZE (mem);
1512 else
1514 width = GET_MODE_SIZE (GET_MODE (mem));
1515 gcc_assert ((unsigned) width <= HOST_BITS_PER_WIDE_INT);
1518 if (spill_alias_set)
1520 bitmap store1 = clear_alias_group->store1_p;
1521 bitmap store2 = clear_alias_group->store2_p;
1523 gcc_assert (GET_MODE (mem) != BLKmode);
1525 if (!bitmap_set_bit (store1, spill_alias_set))
1526 bitmap_set_bit (store2, spill_alias_set);
1528 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1529 clear_alias_group->offset_map_size_p = spill_alias_set;
1531 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1533 if (dump_file)
1534 fprintf (dump_file, " processing spill store %d(%s)\n",
1535 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1537 else if (group_id >= 0)
1539 /* In the restrictive case where the base is a constant or the
1540 frame pointer we can do global analysis. */
1542 group_info_t group
1543 = VEC_index (group_info_t, rtx_group_vec, group_id);
1544 tree expr = MEM_EXPR (mem);
1546 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1547 set_usage_bits (group, offset, width, expr);
1549 if (dump_file)
1550 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1551 group_id, (int)offset, (int)(offset+width));
1553 else
1555 rtx base_term = find_base_term (XEXP (mem, 0));
1556 if (!base_term
1557 || (GET_CODE (base_term) == ADDRESS
1558 && GET_MODE (base_term) == Pmode
1559 && XEXP (base_term, 0) == stack_pointer_rtx))
1560 insn_info->stack_pointer_based = true;
1561 insn_info->contains_cselib_groups = true;
1563 store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1564 group_id = -1;
1566 if (dump_file)
1567 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1568 (int)offset, (int)(offset+width));
1571 const_rhs = rhs = NULL_RTX;
1572 if (GET_CODE (body) == SET
1573 /* No place to keep the value after ra. */
1574 && !reload_completed
1575 && (REG_P (SET_SRC (body))
1576 || GET_CODE (SET_SRC (body)) == SUBREG
1577 || CONSTANT_P (SET_SRC (body)))
1578 && !MEM_VOLATILE_P (mem)
1579 /* Sometimes the store and reload is used for truncation and
1580 rounding. */
1581 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1583 rhs = SET_SRC (body);
1584 if (CONSTANT_P (rhs))
1585 const_rhs = rhs;
1586 else if (body == PATTERN (insn_info->insn))
1588 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1589 if (tem && CONSTANT_P (XEXP (tem, 0)))
1590 const_rhs = XEXP (tem, 0);
1592 if (const_rhs == NULL_RTX && REG_P (rhs))
1594 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1596 if (tem && CONSTANT_P (tem))
1597 const_rhs = tem;
1601 /* Check to see if this stores causes some other stores to be
1602 dead. */
1603 ptr = active_local_stores;
1604 last = NULL;
1605 redundant_reason = NULL;
1606 mem = canon_rtx (mem);
1607 /* For alias_set != 0 canon_true_dependence should be never called. */
1608 if (spill_alias_set)
1609 mem_addr = NULL_RTX;
1610 else
1612 if (group_id < 0)
1613 mem_addr = base->val_rtx;
1614 else
1616 group_info_t group
1617 = VEC_index (group_info_t, rtx_group_vec, group_id);
1618 mem_addr = group->canon_base_addr;
1620 if (offset)
1621 mem_addr = plus_constant (mem_addr, offset);
1624 while (ptr)
1626 insn_info_t next = ptr->next_local_store;
1627 store_info_t s_info = ptr->store_rec;
1628 bool del = true;
1630 /* Skip the clobbers. We delete the active insn if this insn
1631 shadows the set. To have been put on the active list, it
1632 has exactly on set. */
1633 while (!s_info->is_set)
1634 s_info = s_info->next;
1636 if (s_info->alias_set != spill_alias_set)
1637 del = false;
1638 else if (s_info->alias_set)
1640 struct clear_alias_mode_holder *entry
1641 = clear_alias_set_lookup (s_info->alias_set);
1642 /* Generally, spills cannot be processed if and of the
1643 references to the slot have a different mode. But if
1644 we are in the same block and mode is exactly the same
1645 between this store and one before in the same block,
1646 we can still delete it. */
1647 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1648 && (GET_MODE (mem) == entry->mode))
1650 del = true;
1651 set_all_positions_unneeded (s_info);
1653 if (dump_file)
1654 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1655 INSN_UID (ptr->insn), (int) s_info->alias_set);
1657 else if ((s_info->group_id == group_id)
1658 && (s_info->cse_base == base))
1660 HOST_WIDE_INT i;
1661 if (dump_file)
1662 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1663 INSN_UID (ptr->insn), s_info->group_id,
1664 (int)s_info->begin, (int)s_info->end);
1666 /* Even if PTR won't be eliminated as unneeded, if both
1667 PTR and this insn store the same constant value, we might
1668 eliminate this insn instead. */
1669 if (s_info->const_rhs
1670 && const_rhs
1671 && offset >= s_info->begin
1672 && offset + width <= s_info->end
1673 && all_positions_needed_p (s_info, offset - s_info->begin,
1674 width))
1676 if (GET_MODE (mem) == BLKmode)
1678 if (GET_MODE (s_info->mem) == BLKmode
1679 && s_info->const_rhs == const_rhs)
1680 redundant_reason = ptr;
1682 else if (s_info->const_rhs == const0_rtx
1683 && const_rhs == const0_rtx)
1684 redundant_reason = ptr;
1685 else
1687 rtx val;
1688 start_sequence ();
1689 val = get_stored_val (s_info, GET_MODE (mem),
1690 offset, offset + width,
1691 BLOCK_FOR_INSN (insn_info->insn),
1692 true);
1693 if (get_insns () != NULL)
1694 val = NULL_RTX;
1695 end_sequence ();
1696 if (val && rtx_equal_p (val, const_rhs))
1697 redundant_reason = ptr;
1701 for (i = MAX (offset, s_info->begin);
1702 i < offset + width && i < s_info->end;
1703 i++)
1704 set_position_unneeded (s_info, i - s_info->begin);
1706 else if (s_info->rhs)
1707 /* Need to see if it is possible for this store to overwrite
1708 the value of store_info. If it is, set the rhs to NULL to
1709 keep it from being used to remove a load. */
1711 if (canon_true_dependence (s_info->mem,
1712 GET_MODE (s_info->mem),
1713 s_info->mem_addr,
1714 mem, mem_addr))
1716 s_info->rhs = NULL;
1717 s_info->const_rhs = NULL;
1721 /* An insn can be deleted if every position of every one of
1722 its s_infos is zero. */
1723 if (any_positions_needed_p (s_info))
1724 del = false;
1726 if (del)
1728 insn_info_t insn_to_delete = ptr;
1730 active_local_stores_len--;
1731 if (last)
1732 last->next_local_store = ptr->next_local_store;
1733 else
1734 active_local_stores = ptr->next_local_store;
1736 if (!insn_to_delete->cannot_delete)
1737 delete_dead_store_insn (insn_to_delete);
1739 else
1740 last = ptr;
1742 ptr = next;
1745 /* Finish filling in the store_info. */
1746 store_info->next = insn_info->store_rec;
1747 insn_info->store_rec = store_info;
1748 store_info->mem = mem;
1749 store_info->alias_set = spill_alias_set;
1750 store_info->mem_addr = mem_addr;
1751 store_info->cse_base = base;
1752 if (width > HOST_BITS_PER_WIDE_INT)
1754 store_info->is_large = true;
1755 store_info->positions_needed.large.count = 0;
1756 store_info->positions_needed.large.bmap = BITMAP_ALLOC (NULL);
1758 else
1760 store_info->is_large = false;
1761 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1763 store_info->group_id = group_id;
1764 store_info->begin = offset;
1765 store_info->end = offset + width;
1766 store_info->is_set = GET_CODE (body) == SET;
1767 store_info->rhs = rhs;
1768 store_info->const_rhs = const_rhs;
1769 store_info->redundant_reason = redundant_reason;
1771 /* If this is a clobber, we return 0. We will only be able to
1772 delete this insn if there is only one store USED store, but we
1773 can use the clobber to delete other stores earlier. */
1774 return store_info->is_set ? 1 : 0;
1778 static void
1779 dump_insn_info (const char * start, insn_info_t insn_info)
1781 fprintf (dump_file, "%s insn=%d %s\n", start,
1782 INSN_UID (insn_info->insn),
1783 insn_info->store_rec ? "has store" : "naked");
1787 /* If the modes are different and the value's source and target do not
1788 line up, we need to extract the value from lower part of the rhs of
1789 the store, shift it, and then put it into a form that can be shoved
1790 into the read_insn. This function generates a right SHIFT of a
1791 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1792 shift sequence is returned or NULL if we failed to find a
1793 shift. */
1795 static rtx
1796 find_shift_sequence (int access_size,
1797 store_info_t store_info,
1798 enum machine_mode read_mode,
1799 int shift, bool speed, bool require_cst)
1801 enum machine_mode store_mode = GET_MODE (store_info->mem);
1802 enum machine_mode new_mode;
1803 rtx read_reg = NULL;
1805 /* Some machines like the x86 have shift insns for each size of
1806 operand. Other machines like the ppc or the ia-64 may only have
1807 shift insns that shift values within 32 or 64 bit registers.
1808 This loop tries to find the smallest shift insn that will right
1809 justify the value we want to read but is available in one insn on
1810 the machine. */
1812 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1813 MODE_INT);
1814 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1815 new_mode = GET_MODE_WIDER_MODE (new_mode))
1817 rtx target, new_reg, shift_seq, insn, new_lhs;
1818 int cost;
1820 /* If a constant was stored into memory, try to simplify it here,
1821 otherwise the cost of the shift might preclude this optimization
1822 e.g. at -Os, even when no actual shift will be needed. */
1823 if (store_info->const_rhs)
1825 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1826 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1827 store_mode, byte);
1828 if (ret && CONSTANT_P (ret))
1830 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1831 ret, GEN_INT (shift));
1832 if (ret && CONSTANT_P (ret))
1834 byte = subreg_lowpart_offset (read_mode, new_mode);
1835 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1836 if (ret && CONSTANT_P (ret)
1837 && set_src_cost (ret, speed) <= COSTS_N_INSNS (1))
1838 return ret;
1843 if (require_cst)
1844 return NULL_RTX;
1846 /* Try a wider mode if truncating the store mode to NEW_MODE
1847 requires a real instruction. */
1848 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1849 && !TRULY_NOOP_TRUNCATION_MODES_P (new_mode, store_mode))
1850 continue;
1852 /* Also try a wider mode if the necessary punning is either not
1853 desirable or not possible. */
1854 if (!CONSTANT_P (store_info->rhs)
1855 && !MODES_TIEABLE_P (new_mode, store_mode))
1856 continue;
1858 new_reg = gen_reg_rtx (new_mode);
1860 start_sequence ();
1862 /* In theory we could also check for an ashr. Ian Taylor knows
1863 of one dsp where the cost of these two was not the same. But
1864 this really is a rare case anyway. */
1865 target = expand_binop (new_mode, lshr_optab, new_reg,
1866 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1868 shift_seq = get_insns ();
1869 end_sequence ();
1871 if (target != new_reg || shift_seq == NULL)
1872 continue;
1874 cost = 0;
1875 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1876 if (INSN_P (insn))
1877 cost += insn_rtx_cost (PATTERN (insn), speed);
1879 /* The computation up to here is essentially independent
1880 of the arguments and could be precomputed. It may
1881 not be worth doing so. We could precompute if
1882 worthwhile or at least cache the results. The result
1883 technically depends on both SHIFT and ACCESS_SIZE,
1884 but in practice the answer will depend only on ACCESS_SIZE. */
1886 if (cost > COSTS_N_INSNS (1))
1887 continue;
1889 new_lhs = extract_low_bits (new_mode, store_mode,
1890 copy_rtx (store_info->rhs));
1891 if (new_lhs == NULL_RTX)
1892 continue;
1894 /* We found an acceptable shift. Generate a move to
1895 take the value from the store and put it into the
1896 shift pseudo, then shift it, then generate another
1897 move to put in into the target of the read. */
1898 emit_move_insn (new_reg, new_lhs);
1899 emit_insn (shift_seq);
1900 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1901 break;
1904 return read_reg;
1908 /* Call back for note_stores to find the hard regs set or clobbered by
1909 insn. Data is a bitmap of the hardregs set so far. */
1911 static void
1912 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1914 bitmap regs_set = (bitmap) data;
1916 if (REG_P (x)
1917 && HARD_REGISTER_P (x))
1919 unsigned int regno = REGNO (x);
1920 bitmap_set_range (regs_set, regno,
1921 hard_regno_nregs[regno][GET_MODE (x)]);
1925 /* Helper function for replace_read and record_store.
1926 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1927 to one before READ_END bytes read in READ_MODE. Return NULL
1928 if not successful. If REQUIRE_CST is true, return always constant. */
1930 static rtx
1931 get_stored_val (store_info_t store_info, enum machine_mode read_mode,
1932 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1933 basic_block bb, bool require_cst)
1935 enum machine_mode store_mode = GET_MODE (store_info->mem);
1936 int shift;
1937 int access_size; /* In bytes. */
1938 rtx read_reg;
1940 /* To get here the read is within the boundaries of the write so
1941 shift will never be negative. Start out with the shift being in
1942 bytes. */
1943 if (store_mode == BLKmode)
1944 shift = 0;
1945 else if (BYTES_BIG_ENDIAN)
1946 shift = store_info->end - read_end;
1947 else
1948 shift = read_begin - store_info->begin;
1950 access_size = shift + GET_MODE_SIZE (read_mode);
1952 /* From now on it is bits. */
1953 shift *= BITS_PER_UNIT;
1955 if (shift)
1956 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1957 optimize_bb_for_speed_p (bb),
1958 require_cst);
1959 else if (store_mode == BLKmode)
1961 /* The store is a memset (addr, const_val, const_size). */
1962 gcc_assert (CONST_INT_P (store_info->rhs));
1963 store_mode = int_mode_for_mode (read_mode);
1964 if (store_mode == BLKmode)
1965 read_reg = NULL_RTX;
1966 else if (store_info->rhs == const0_rtx)
1967 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1968 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1969 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1970 read_reg = NULL_RTX;
1971 else
1973 unsigned HOST_WIDE_INT c
1974 = INTVAL (store_info->rhs)
1975 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1976 int shift = BITS_PER_UNIT;
1977 while (shift < HOST_BITS_PER_WIDE_INT)
1979 c |= (c << shift);
1980 shift <<= 1;
1982 read_reg = gen_int_mode (c, store_mode);
1983 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1986 else if (store_info->const_rhs
1987 && (require_cst
1988 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1989 read_reg = extract_low_bits (read_mode, store_mode,
1990 copy_rtx (store_info->const_rhs));
1991 else
1992 read_reg = extract_low_bits (read_mode, store_mode,
1993 copy_rtx (store_info->rhs));
1994 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1995 read_reg = NULL_RTX;
1996 return read_reg;
1999 /* Take a sequence of:
2000 A <- r1
2002 ... <- A
2004 and change it into
2005 r2 <- r1
2006 A <- r1
2008 ... <- r2
2012 r3 <- extract (r1)
2013 r3 <- r3 >> shift
2014 r2 <- extract (r3)
2015 ... <- r2
2019 r2 <- extract (r1)
2020 ... <- r2
2022 Depending on the alignment and the mode of the store and
2023 subsequent load.
2026 The STORE_INFO and STORE_INSN are for the store and READ_INFO
2027 and READ_INSN are for the read. Return true if the replacement
2028 went ok. */
2030 static bool
2031 replace_read (store_info_t store_info, insn_info_t store_insn,
2032 read_info_t read_info, insn_info_t read_insn, rtx *loc,
2033 bitmap regs_live)
2035 enum machine_mode store_mode = GET_MODE (store_info->mem);
2036 enum machine_mode read_mode = GET_MODE (read_info->mem);
2037 rtx insns, this_insn, read_reg;
2038 basic_block bb;
2040 if (!dbg_cnt (dse))
2041 return false;
2043 /* Create a sequence of instructions to set up the read register.
2044 This sequence goes immediately before the store and its result
2045 is read by the load.
2047 We need to keep this in perspective. We are replacing a read
2048 with a sequence of insns, but the read will almost certainly be
2049 in cache, so it is not going to be an expensive one. Thus, we
2050 are not willing to do a multi insn shift or worse a subroutine
2051 call to get rid of the read. */
2052 if (dump_file)
2053 fprintf (dump_file, "trying to replace %smode load in insn %d"
2054 " from %smode store in insn %d\n",
2055 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
2056 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
2057 start_sequence ();
2058 bb = BLOCK_FOR_INSN (read_insn->insn);
2059 read_reg = get_stored_val (store_info,
2060 read_mode, read_info->begin, read_info->end,
2061 bb, false);
2062 if (read_reg == NULL_RTX)
2064 end_sequence ();
2065 if (dump_file)
2066 fprintf (dump_file, " -- could not extract bits of stored value\n");
2067 return false;
2069 /* Force the value into a new register so that it won't be clobbered
2070 between the store and the load. */
2071 read_reg = copy_to_mode_reg (read_mode, read_reg);
2072 insns = get_insns ();
2073 end_sequence ();
2075 if (insns != NULL_RTX)
2077 /* Now we have to scan the set of new instructions to see if the
2078 sequence contains and sets of hardregs that happened to be
2079 live at this point. For instance, this can happen if one of
2080 the insns sets the CC and the CC happened to be live at that
2081 point. This does occasionally happen, see PR 37922. */
2082 bitmap regs_set = BITMAP_ALLOC (NULL);
2084 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
2085 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
2087 bitmap_and_into (regs_set, regs_live);
2088 if (!bitmap_empty_p (regs_set))
2090 if (dump_file)
2092 fprintf (dump_file,
2093 "abandoning replacement because sequence clobbers live hardregs:");
2094 df_print_regset (dump_file, regs_set);
2097 BITMAP_FREE (regs_set);
2098 return false;
2100 BITMAP_FREE (regs_set);
2103 if (validate_change (read_insn->insn, loc, read_reg, 0))
2105 deferred_change_t deferred_change =
2106 (deferred_change_t) pool_alloc (deferred_change_pool);
2108 /* Insert this right before the store insn where it will be safe
2109 from later insns that might change it before the read. */
2110 emit_insn_before (insns, store_insn->insn);
2112 /* And now for the kludge part: cselib croaks if you just
2113 return at this point. There are two reasons for this:
2115 1) Cselib has an idea of how many pseudos there are and
2116 that does not include the new ones we just added.
2118 2) Cselib does not know about the move insn we added
2119 above the store_info, and there is no way to tell it
2120 about it, because it has "moved on".
2122 Problem (1) is fixable with a certain amount of engineering.
2123 Problem (2) is requires starting the bb from scratch. This
2124 could be expensive.
2126 So we are just going to have to lie. The move/extraction
2127 insns are not really an issue, cselib did not see them. But
2128 the use of the new pseudo read_insn is a real problem because
2129 cselib has not scanned this insn. The way that we solve this
2130 problem is that we are just going to put the mem back for now
2131 and when we are finished with the block, we undo this. We
2132 keep a table of mems to get rid of. At the end of the basic
2133 block we can put them back. */
2135 *loc = read_info->mem;
2136 deferred_change->next = deferred_change_list;
2137 deferred_change_list = deferred_change;
2138 deferred_change->loc = loc;
2139 deferred_change->reg = read_reg;
2141 /* Get rid of the read_info, from the point of view of the
2142 rest of dse, play like this read never happened. */
2143 read_insn->read_rec = read_info->next;
2144 pool_free (read_info_pool, read_info);
2145 if (dump_file)
2147 fprintf (dump_file, " -- replaced the loaded MEM with ");
2148 print_simple_rtl (dump_file, read_reg);
2149 fprintf (dump_file, "\n");
2151 return true;
2153 else
2155 if (dump_file)
2157 fprintf (dump_file, " -- replacing the loaded MEM with ");
2158 print_simple_rtl (dump_file, read_reg);
2159 fprintf (dump_file, " led to an invalid instruction\n");
2161 return false;
2165 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
2166 if LOC is a mem and if it is look at the address and kill any
2167 appropriate stores that may be active. */
2169 static int
2170 check_mem_read_rtx (rtx *loc, void *data)
2172 rtx mem = *loc, mem_addr;
2173 bb_info_t bb_info;
2174 insn_info_t insn_info;
2175 HOST_WIDE_INT offset = 0;
2176 HOST_WIDE_INT width = 0;
2177 alias_set_type spill_alias_set = 0;
2178 cselib_val *base = NULL;
2179 int group_id;
2180 read_info_t read_info;
2182 if (!mem || !MEM_P (mem))
2183 return 0;
2185 bb_info = (bb_info_t) data;
2186 insn_info = bb_info->last_insn;
2188 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2189 || (MEM_VOLATILE_P (mem)))
2191 if (dump_file)
2192 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2193 add_wild_read (bb_info);
2194 insn_info->cannot_delete = true;
2195 return 0;
2198 /* If it is reading readonly mem, then there can be no conflict with
2199 another write. */
2200 if (MEM_READONLY_P (mem))
2201 return 0;
2203 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2205 if (dump_file)
2206 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2207 add_wild_read (bb_info);
2208 return 0;
2211 if (GET_MODE (mem) == BLKmode)
2212 width = -1;
2213 else
2214 width = GET_MODE_SIZE (GET_MODE (mem));
2216 read_info = (read_info_t) pool_alloc (read_info_pool);
2217 read_info->group_id = group_id;
2218 read_info->mem = mem;
2219 read_info->alias_set = spill_alias_set;
2220 read_info->begin = offset;
2221 read_info->end = offset + width;
2222 read_info->next = insn_info->read_rec;
2223 insn_info->read_rec = read_info;
2224 /* For alias_set != 0 canon_true_dependence should be never called. */
2225 if (spill_alias_set)
2226 mem_addr = NULL_RTX;
2227 else
2229 if (group_id < 0)
2230 mem_addr = base->val_rtx;
2231 else
2233 group_info_t group
2234 = VEC_index (group_info_t, rtx_group_vec, group_id);
2235 mem_addr = group->canon_base_addr;
2237 if (offset)
2238 mem_addr = plus_constant (mem_addr, offset);
2241 /* We ignore the clobbers in store_info. The is mildly aggressive,
2242 but there really should not be a clobber followed by a read. */
2244 if (spill_alias_set)
2246 insn_info_t i_ptr = active_local_stores;
2247 insn_info_t last = NULL;
2249 if (dump_file)
2250 fprintf (dump_file, " processing spill load %d\n",
2251 (int) spill_alias_set);
2253 while (i_ptr)
2255 store_info_t store_info = i_ptr->store_rec;
2257 /* Skip the clobbers. */
2258 while (!store_info->is_set)
2259 store_info = store_info->next;
2261 if (store_info->alias_set == spill_alias_set)
2263 if (dump_file)
2264 dump_insn_info ("removing from active", i_ptr);
2266 active_local_stores_len--;
2267 if (last)
2268 last->next_local_store = i_ptr->next_local_store;
2269 else
2270 active_local_stores = i_ptr->next_local_store;
2272 else
2273 last = i_ptr;
2274 i_ptr = i_ptr->next_local_store;
2277 else if (group_id >= 0)
2279 /* This is the restricted case where the base is a constant or
2280 the frame pointer and offset is a constant. */
2281 insn_info_t i_ptr = active_local_stores;
2282 insn_info_t last = NULL;
2284 if (dump_file)
2286 if (width == -1)
2287 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2288 group_id);
2289 else
2290 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2291 group_id, (int)offset, (int)(offset+width));
2294 while (i_ptr)
2296 bool remove = false;
2297 store_info_t store_info = i_ptr->store_rec;
2299 /* Skip the clobbers. */
2300 while (!store_info->is_set)
2301 store_info = store_info->next;
2303 /* There are three cases here. */
2304 if (store_info->group_id < 0)
2305 /* We have a cselib store followed by a read from a
2306 const base. */
2307 remove
2308 = canon_true_dependence (store_info->mem,
2309 GET_MODE (store_info->mem),
2310 store_info->mem_addr,
2311 mem, mem_addr);
2313 else if (group_id == store_info->group_id)
2315 /* This is a block mode load. We may get lucky and
2316 canon_true_dependence may save the day. */
2317 if (width == -1)
2318 remove
2319 = canon_true_dependence (store_info->mem,
2320 GET_MODE (store_info->mem),
2321 store_info->mem_addr,
2322 mem, mem_addr);
2324 /* If this read is just reading back something that we just
2325 stored, rewrite the read. */
2326 else
2328 if (store_info->rhs
2329 && offset >= store_info->begin
2330 && offset + width <= store_info->end
2331 && all_positions_needed_p (store_info,
2332 offset - store_info->begin,
2333 width)
2334 && replace_read (store_info, i_ptr, read_info,
2335 insn_info, loc, bb_info->regs_live))
2336 return 0;
2338 /* The bases are the same, just see if the offsets
2339 overlap. */
2340 if ((offset < store_info->end)
2341 && (offset + width > store_info->begin))
2342 remove = true;
2346 /* else
2347 The else case that is missing here is that the
2348 bases are constant but different. There is nothing
2349 to do here because there is no overlap. */
2351 if (remove)
2353 if (dump_file)
2354 dump_insn_info ("removing from active", i_ptr);
2356 active_local_stores_len--;
2357 if (last)
2358 last->next_local_store = i_ptr->next_local_store;
2359 else
2360 active_local_stores = i_ptr->next_local_store;
2362 else
2363 last = i_ptr;
2364 i_ptr = i_ptr->next_local_store;
2367 else
2369 insn_info_t i_ptr = active_local_stores;
2370 insn_info_t last = NULL;
2371 if (dump_file)
2373 fprintf (dump_file, " processing cselib load mem:");
2374 print_inline_rtx (dump_file, mem, 0);
2375 fprintf (dump_file, "\n");
2378 while (i_ptr)
2380 bool remove = false;
2381 store_info_t store_info = i_ptr->store_rec;
2383 if (dump_file)
2384 fprintf (dump_file, " processing cselib load against insn %d\n",
2385 INSN_UID (i_ptr->insn));
2387 /* Skip the clobbers. */
2388 while (!store_info->is_set)
2389 store_info = store_info->next;
2391 /* If this read is just reading back something that we just
2392 stored, rewrite the read. */
2393 if (store_info->rhs
2394 && store_info->group_id == -1
2395 && store_info->cse_base == base
2396 && width != -1
2397 && offset >= store_info->begin
2398 && offset + width <= store_info->end
2399 && all_positions_needed_p (store_info,
2400 offset - store_info->begin, width)
2401 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2402 bb_info->regs_live))
2403 return 0;
2405 if (!store_info->alias_set)
2406 remove = canon_true_dependence (store_info->mem,
2407 GET_MODE (store_info->mem),
2408 store_info->mem_addr,
2409 mem, mem_addr);
2411 if (remove)
2413 if (dump_file)
2414 dump_insn_info ("removing from active", i_ptr);
2416 active_local_stores_len--;
2417 if (last)
2418 last->next_local_store = i_ptr->next_local_store;
2419 else
2420 active_local_stores = i_ptr->next_local_store;
2422 else
2423 last = i_ptr;
2424 i_ptr = i_ptr->next_local_store;
2427 return 0;
2430 /* A for_each_rtx callback in which DATA points the INSN_INFO for
2431 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2432 true for any part of *LOC. */
2434 static void
2435 check_mem_read_use (rtx *loc, void *data)
2437 for_each_rtx (loc, check_mem_read_rtx, data);
2441 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2442 So far it only handles arguments passed in registers. */
2444 static bool
2445 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2447 CUMULATIVE_ARGS args_so_far_v;
2448 cumulative_args_t args_so_far;
2449 tree arg;
2450 int idx;
2452 INIT_CUMULATIVE_ARGS (args_so_far_v, TREE_TYPE (fn), NULL_RTX, 0, 3);
2453 args_so_far = pack_cumulative_args (&args_so_far_v);
2455 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2456 for (idx = 0;
2457 arg != void_list_node && idx < nargs;
2458 arg = TREE_CHAIN (arg), idx++)
2460 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2461 rtx reg, link, tmp;
2462 reg = targetm.calls.function_arg (args_so_far, mode, NULL_TREE, true);
2463 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2464 || GET_MODE_CLASS (mode) != MODE_INT)
2465 return false;
2467 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2468 link;
2469 link = XEXP (link, 1))
2470 if (GET_CODE (XEXP (link, 0)) == USE)
2472 args[idx] = XEXP (XEXP (link, 0), 0);
2473 if (REG_P (args[idx])
2474 && REGNO (args[idx]) == REGNO (reg)
2475 && (GET_MODE (args[idx]) == mode
2476 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2477 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2478 <= UNITS_PER_WORD)
2479 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2480 > GET_MODE_SIZE (mode)))))
2481 break;
2483 if (!link)
2484 return false;
2486 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2487 if (GET_MODE (args[idx]) != mode)
2489 if (!tmp || !CONST_INT_P (tmp))
2490 return false;
2491 tmp = gen_int_mode (INTVAL (tmp), mode);
2493 if (tmp)
2494 args[idx] = tmp;
2496 targetm.calls.function_arg_advance (args_so_far, mode, NULL_TREE, true);
2498 if (arg != void_list_node || idx != nargs)
2499 return false;
2500 return true;
2503 /* Return a bitmap of the fixed registers contained in IN. */
2505 static bitmap
2506 copy_fixed_regs (const_bitmap in)
2508 bitmap ret;
2510 ret = ALLOC_REG_SET (NULL);
2511 bitmap_and (ret, in, fixed_reg_set_regset);
2512 return ret;
2515 /* Apply record_store to all candidate stores in INSN. Mark INSN
2516 if some part of it is not a candidate store and assigns to a
2517 non-register target. */
2519 static void
2520 scan_insn (bb_info_t bb_info, rtx insn)
2522 rtx body;
2523 insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2524 int mems_found = 0;
2525 memset (insn_info, 0, sizeof (struct insn_info));
2527 if (dump_file)
2528 fprintf (dump_file, "\n**scanning insn=%d\n",
2529 INSN_UID (insn));
2531 insn_info->prev_insn = bb_info->last_insn;
2532 insn_info->insn = insn;
2533 bb_info->last_insn = insn_info;
2535 if (DEBUG_INSN_P (insn))
2537 insn_info->cannot_delete = true;
2538 return;
2541 /* Cselib clears the table for this case, so we have to essentially
2542 do the same. */
2543 if (NONJUMP_INSN_P (insn)
2544 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2545 && MEM_VOLATILE_P (PATTERN (insn)))
2547 add_wild_read (bb_info);
2548 insn_info->cannot_delete = true;
2549 return;
2552 /* Look at all of the uses in the insn. */
2553 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2555 if (CALL_P (insn))
2557 bool const_call;
2558 tree memset_call = NULL_TREE;
2560 insn_info->cannot_delete = true;
2562 /* Const functions cannot do anything bad i.e. read memory,
2563 however, they can read their parameters which may have
2564 been pushed onto the stack.
2565 memset and bzero don't read memory either. */
2566 const_call = RTL_CONST_CALL_P (insn);
2567 if (!const_call)
2569 rtx call = PATTERN (insn);
2570 if (GET_CODE (call) == PARALLEL)
2571 call = XVECEXP (call, 0, 0);
2572 if (GET_CODE (call) == SET)
2573 call = SET_SRC (call);
2574 if (GET_CODE (call) == CALL
2575 && MEM_P (XEXP (call, 0))
2576 && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2578 rtx symbol = XEXP (XEXP (call, 0), 0);
2579 if (SYMBOL_REF_DECL (symbol)
2580 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2582 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2583 == BUILT_IN_NORMAL
2584 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2585 == BUILT_IN_MEMSET))
2586 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2587 memset_call = SYMBOL_REF_DECL (symbol);
2591 if (const_call || memset_call)
2593 insn_info_t i_ptr = active_local_stores;
2594 insn_info_t last = NULL;
2596 if (dump_file)
2597 fprintf (dump_file, "%s call %d\n",
2598 const_call ? "const" : "memset", INSN_UID (insn));
2600 /* See the head comment of the frame_read field. */
2601 if (reload_completed)
2602 insn_info->frame_read = true;
2604 /* Loop over the active stores and remove those which are
2605 killed by the const function call. */
2606 while (i_ptr)
2608 bool remove_store = false;
2610 /* The stack pointer based stores are always killed. */
2611 if (i_ptr->stack_pointer_based)
2612 remove_store = true;
2614 /* If the frame is read, the frame related stores are killed. */
2615 else if (insn_info->frame_read)
2617 store_info_t store_info = i_ptr->store_rec;
2619 /* Skip the clobbers. */
2620 while (!store_info->is_set)
2621 store_info = store_info->next;
2623 if (store_info->group_id >= 0
2624 && VEC_index (group_info_t, rtx_group_vec,
2625 store_info->group_id)->frame_related)
2626 remove_store = true;
2629 if (remove_store)
2631 if (dump_file)
2632 dump_insn_info ("removing from active", i_ptr);
2634 active_local_stores_len--;
2635 if (last)
2636 last->next_local_store = i_ptr->next_local_store;
2637 else
2638 active_local_stores = i_ptr->next_local_store;
2640 else
2641 last = i_ptr;
2643 i_ptr = i_ptr->next_local_store;
2646 if (memset_call)
2648 rtx args[3];
2649 if (get_call_args (insn, memset_call, args, 3)
2650 && CONST_INT_P (args[1])
2651 && CONST_INT_P (args[2])
2652 && INTVAL (args[2]) > 0)
2654 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2655 set_mem_size (mem, INTVAL (args[2]));
2656 body = gen_rtx_SET (VOIDmode, mem, args[1]);
2657 mems_found += record_store (body, bb_info);
2658 if (dump_file)
2659 fprintf (dump_file, "handling memset as BLKmode store\n");
2660 if (mems_found == 1)
2662 if (active_local_stores_len++
2663 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2665 active_local_stores_len = 1;
2666 active_local_stores = NULL;
2668 insn_info->fixed_regs_live
2669 = copy_fixed_regs (bb_info->regs_live);
2670 insn_info->next_local_store = active_local_stores;
2671 active_local_stores = insn_info;
2677 else
2678 /* Every other call, including pure functions, may read any memory
2679 that is not relative to the frame. */
2680 add_non_frame_wild_read (bb_info);
2682 return;
2685 /* Assuming that there are sets in these insns, we cannot delete
2686 them. */
2687 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2688 || volatile_refs_p (PATTERN (insn))
2689 || insn_could_throw_p (insn)
2690 || (RTX_FRAME_RELATED_P (insn))
2691 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2692 insn_info->cannot_delete = true;
2694 body = PATTERN (insn);
2695 if (GET_CODE (body) == PARALLEL)
2697 int i;
2698 for (i = 0; i < XVECLEN (body, 0); i++)
2699 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2701 else
2702 mems_found += record_store (body, bb_info);
2704 if (dump_file)
2705 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2706 mems_found, insn_info->cannot_delete ? "true" : "false");
2708 /* If we found some sets of mems, add it into the active_local_stores so
2709 that it can be locally deleted if found dead or used for
2710 replace_read and redundant constant store elimination. Otherwise mark
2711 it as cannot delete. This simplifies the processing later. */
2712 if (mems_found == 1)
2714 if (active_local_stores_len++
2715 >= PARAM_VALUE (PARAM_MAX_DSE_ACTIVE_LOCAL_STORES))
2717 active_local_stores_len = 1;
2718 active_local_stores = NULL;
2720 insn_info->fixed_regs_live = copy_fixed_regs (bb_info->regs_live);
2721 insn_info->next_local_store = active_local_stores;
2722 active_local_stores = insn_info;
2724 else
2725 insn_info->cannot_delete = true;
2729 /* Remove BASE from the set of active_local_stores. This is a
2730 callback from cselib that is used to get rid of the stores in
2731 active_local_stores. */
2733 static void
2734 remove_useless_values (cselib_val *base)
2736 insn_info_t insn_info = active_local_stores;
2737 insn_info_t last = NULL;
2739 while (insn_info)
2741 store_info_t store_info = insn_info->store_rec;
2742 bool del = false;
2744 /* If ANY of the store_infos match the cselib group that is
2745 being deleted, then the insn can not be deleted. */
2746 while (store_info)
2748 if ((store_info->group_id == -1)
2749 && (store_info->cse_base == base))
2751 del = true;
2752 break;
2754 store_info = store_info->next;
2757 if (del)
2759 active_local_stores_len--;
2760 if (last)
2761 last->next_local_store = insn_info->next_local_store;
2762 else
2763 active_local_stores = insn_info->next_local_store;
2764 free_store_info (insn_info);
2766 else
2767 last = insn_info;
2769 insn_info = insn_info->next_local_store;
2774 /* Do all of step 1. */
2776 static void
2777 dse_step1 (void)
2779 basic_block bb;
2780 bitmap regs_live = BITMAP_ALLOC (NULL);
2782 cselib_init (0);
2783 all_blocks = BITMAP_ALLOC (NULL);
2784 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2785 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2787 FOR_ALL_BB (bb)
2789 insn_info_t ptr;
2790 bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2792 memset (bb_info, 0, sizeof (struct bb_info));
2793 bitmap_set_bit (all_blocks, bb->index);
2794 bb_info->regs_live = regs_live;
2796 bitmap_copy (regs_live, DF_LR_IN (bb));
2797 df_simulate_initialize_forwards (bb, regs_live);
2799 bb_table[bb->index] = bb_info;
2800 cselib_discard_hook = remove_useless_values;
2802 if (bb->index >= NUM_FIXED_BLOCKS)
2804 rtx insn;
2806 cse_store_info_pool
2807 = create_alloc_pool ("cse_store_info_pool",
2808 sizeof (struct store_info), 100);
2809 active_local_stores = NULL;
2810 active_local_stores_len = 0;
2811 cselib_clear_table ();
2813 /* Scan the insns. */
2814 FOR_BB_INSNS (bb, insn)
2816 if (INSN_P (insn))
2817 scan_insn (bb_info, insn);
2818 cselib_process_insn (insn);
2819 if (INSN_P (insn))
2820 df_simulate_one_insn_forwards (bb, insn, regs_live);
2823 /* This is something of a hack, because the global algorithm
2824 is supposed to take care of the case where stores go dead
2825 at the end of the function. However, the global
2826 algorithm must take a more conservative view of block
2827 mode reads than the local alg does. So to get the case
2828 where you have a store to the frame followed by a non
2829 overlapping block more read, we look at the active local
2830 stores at the end of the function and delete all of the
2831 frame and spill based ones. */
2832 if (stores_off_frame_dead_at_return
2833 && (EDGE_COUNT (bb->succs) == 0
2834 || (single_succ_p (bb)
2835 && single_succ (bb) == EXIT_BLOCK_PTR
2836 && ! crtl->calls_eh_return)))
2838 insn_info_t i_ptr = active_local_stores;
2839 while (i_ptr)
2841 store_info_t store_info = i_ptr->store_rec;
2843 /* Skip the clobbers. */
2844 while (!store_info->is_set)
2845 store_info = store_info->next;
2846 if (store_info->alias_set && !i_ptr->cannot_delete)
2847 delete_dead_store_insn (i_ptr);
2848 else
2849 if (store_info->group_id >= 0)
2851 group_info_t group
2852 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2853 if (group->frame_related && !i_ptr->cannot_delete)
2854 delete_dead_store_insn (i_ptr);
2857 i_ptr = i_ptr->next_local_store;
2861 /* Get rid of the loads that were discovered in
2862 replace_read. Cselib is finished with this block. */
2863 while (deferred_change_list)
2865 deferred_change_t next = deferred_change_list->next;
2867 /* There is no reason to validate this change. That was
2868 done earlier. */
2869 *deferred_change_list->loc = deferred_change_list->reg;
2870 pool_free (deferred_change_pool, deferred_change_list);
2871 deferred_change_list = next;
2874 /* Get rid of all of the cselib based store_infos in this
2875 block and mark the containing insns as not being
2876 deletable. */
2877 ptr = bb_info->last_insn;
2878 while (ptr)
2880 if (ptr->contains_cselib_groups)
2882 store_info_t s_info = ptr->store_rec;
2883 while (s_info && !s_info->is_set)
2884 s_info = s_info->next;
2885 if (s_info
2886 && s_info->redundant_reason
2887 && s_info->redundant_reason->insn
2888 && !ptr->cannot_delete)
2890 if (dump_file)
2891 fprintf (dump_file, "Locally deleting insn %d "
2892 "because insn %d stores the "
2893 "same value and couldn't be "
2894 "eliminated\n",
2895 INSN_UID (ptr->insn),
2896 INSN_UID (s_info->redundant_reason->insn));
2897 delete_dead_store_insn (ptr);
2899 if (s_info)
2900 s_info->redundant_reason = NULL;
2901 free_store_info (ptr);
2903 else
2905 store_info_t s_info;
2907 /* Free at least positions_needed bitmaps. */
2908 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2909 if (s_info->is_large)
2911 BITMAP_FREE (s_info->positions_needed.large.bmap);
2912 s_info->is_large = false;
2915 ptr = ptr->prev_insn;
2918 free_alloc_pool (cse_store_info_pool);
2920 bb_info->regs_live = NULL;
2923 BITMAP_FREE (regs_live);
2924 cselib_finish ();
2925 htab_empty (rtx_group_table);
2929 /*----------------------------------------------------------------------------
2930 Second step.
2932 Assign each byte position in the stores that we are going to
2933 analyze globally to a position in the bitmaps. Returns true if
2934 there are any bit positions assigned.
2935 ----------------------------------------------------------------------------*/
2937 static void
2938 dse_step2_init (void)
2940 unsigned int i;
2941 group_info_t group;
2943 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
2945 /* For all non stack related bases, we only consider a store to
2946 be deletable if there are two or more stores for that
2947 position. This is because it takes one store to make the
2948 other store redundant. However, for the stores that are
2949 stack related, we consider them if there is only one store
2950 for the position. We do this because the stack related
2951 stores can be deleted if their is no read between them and
2952 the end of the function.
2954 To make this work in the current framework, we take the stack
2955 related bases add all of the bits from store1 into store2.
2956 This has the effect of making the eligible even if there is
2957 only one store. */
2959 if (stores_off_frame_dead_at_return && group->frame_related)
2961 bitmap_ior_into (group->store2_n, group->store1_n);
2962 bitmap_ior_into (group->store2_p, group->store1_p);
2963 if (dump_file)
2964 fprintf (dump_file, "group %d is frame related ", i);
2967 group->offset_map_size_n++;
2968 group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2969 group->offset_map_size_p++;
2970 group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2971 group->process_globally = false;
2972 if (dump_file)
2974 fprintf (dump_file, "group %d(%d+%d): ", i,
2975 (int)bitmap_count_bits (group->store2_n),
2976 (int)bitmap_count_bits (group->store2_p));
2977 bitmap_print (dump_file, group->store2_n, "n ", " ");
2978 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2984 /* Init the offset tables for the normal case. */
2986 static bool
2987 dse_step2_nospill (void)
2989 unsigned int i;
2990 group_info_t group;
2991 /* Position 0 is unused because 0 is used in the maps to mean
2992 unused. */
2993 current_position = 1;
2994 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
2996 bitmap_iterator bi;
2997 unsigned int j;
2999 if (group == clear_alias_group)
3000 continue;
3002 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
3003 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
3004 bitmap_clear (group->group_kill);
3006 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
3008 bitmap_set_bit (group->group_kill, current_position);
3009 if (bitmap_bit_p (group->escaped_n, j))
3010 bitmap_set_bit (kill_on_calls, current_position);
3011 group->offset_map_n[j] = current_position++;
3012 group->process_globally = true;
3014 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3016 bitmap_set_bit (group->group_kill, current_position);
3017 if (bitmap_bit_p (group->escaped_p, j))
3018 bitmap_set_bit (kill_on_calls, current_position);
3019 group->offset_map_p[j] = current_position++;
3020 group->process_globally = true;
3023 return current_position != 1;
3027 /* Init the offset tables for the spill case. */
3029 static bool
3030 dse_step2_spill (void)
3032 unsigned int j;
3033 group_info_t group = clear_alias_group;
3034 bitmap_iterator bi;
3036 /* Position 0 is unused because 0 is used in the maps to mean
3037 unused. */
3038 current_position = 1;
3040 if (dump_file)
3042 bitmap_print (dump_file, clear_alias_sets,
3043 "clear alias sets ", "\n");
3044 bitmap_print (dump_file, disqualified_clear_alias_sets,
3045 "disqualified clear alias sets ", "\n");
3048 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
3049 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
3050 bitmap_clear (group->group_kill);
3052 /* Remove the disqualified positions from the store2_p set. */
3053 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
3055 /* We do not need to process the store2_n set because
3056 alias_sets are always positive. */
3057 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
3059 bitmap_set_bit (group->group_kill, current_position);
3060 group->offset_map_p[j] = current_position++;
3061 group->process_globally = true;
3064 return current_position != 1;
3069 /*----------------------------------------------------------------------------
3070 Third step.
3072 Build the bit vectors for the transfer functions.
3073 ----------------------------------------------------------------------------*/
3076 /* Note that this is NOT a general purpose function. Any mem that has
3077 an alias set registered here expected to be COMPLETELY unaliased:
3078 i.e it's addresses are not and need not be examined.
3080 It is known that all references to this address will have this
3081 alias set and there are NO other references to this address in the
3082 function.
3084 Currently the only place that is known to be clean enough to use
3085 this interface is the code that assigns the spill locations.
3087 All of the mems that have alias_sets registered are subjected to a
3088 very powerful form of dse where function calls, volatile reads and
3089 writes, and reads from random location are not taken into account.
3091 It is also assumed that these locations go dead when the function
3092 returns. This assumption could be relaxed if there were found to
3093 be places that this assumption was not correct.
3095 The MODE is passed in and saved. The mode of each load or store to
3096 a mem with ALIAS_SET is checked against MEM. If the size of that
3097 load or store is different from MODE, processing is halted on this
3098 alias set. For the vast majority of aliases sets, all of the loads
3099 and stores will use the same mode. But vectors are treated
3100 differently: the alias set is established for the entire vector,
3101 but reload will insert loads and stores for individual elements and
3102 we do not necessarily have the information to track those separate
3103 elements. So when we see a mode mismatch, we just bail. */
3106 void
3107 dse_record_singleton_alias_set (alias_set_type alias_set,
3108 enum machine_mode mode)
3110 struct clear_alias_mode_holder tmp_holder;
3111 struct clear_alias_mode_holder *entry;
3112 void **slot;
3114 /* If we are not going to run dse, we need to return now or there
3115 will be problems with allocating the bitmaps. */
3116 if ((!gate_dse()) || !alias_set)
3117 return;
3119 if (!clear_alias_sets)
3121 clear_alias_sets = BITMAP_ALLOC (NULL);
3122 disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
3123 clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
3124 clear_alias_mode_eq, NULL);
3125 clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
3126 sizeof (struct clear_alias_mode_holder), 100);
3129 bitmap_set_bit (clear_alias_sets, alias_set);
3131 tmp_holder.alias_set = alias_set;
3133 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
3134 gcc_assert (*slot == NULL);
3136 *slot = entry =
3137 (struct clear_alias_mode_holder *) pool_alloc (clear_alias_mode_pool);
3138 entry->alias_set = alias_set;
3139 entry->mode = mode;
3143 /* Remove ALIAS_SET from the sets of stack slots being considered. */
3145 void
3146 dse_invalidate_singleton_alias_set (alias_set_type alias_set)
3148 if ((!gate_dse()) || !alias_set)
3149 return;
3151 bitmap_clear_bit (clear_alias_sets, alias_set);
3155 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
3156 there, return 0. */
3158 static int
3159 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
3161 if (offset < 0)
3163 HOST_WIDE_INT offset_p = -offset;
3164 if (offset_p >= group_info->offset_map_size_n)
3165 return 0;
3166 return group_info->offset_map_n[offset_p];
3168 else
3170 if (offset >= group_info->offset_map_size_p)
3171 return 0;
3172 return group_info->offset_map_p[offset];
3177 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3178 may be NULL. */
3180 static void
3181 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
3183 while (store_info)
3185 HOST_WIDE_INT i;
3186 group_info_t group_info
3187 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3188 if (group_info->process_globally)
3189 for (i = store_info->begin; i < store_info->end; i++)
3191 int index = get_bitmap_index (group_info, i);
3192 if (index != 0)
3194 bitmap_set_bit (gen, index);
3195 if (kill)
3196 bitmap_clear_bit (kill, index);
3199 store_info = store_info->next;
3204 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
3205 may be NULL. */
3207 static void
3208 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
3210 while (store_info)
3212 if (store_info->alias_set)
3214 int index = get_bitmap_index (clear_alias_group,
3215 store_info->alias_set);
3216 if (index != 0)
3218 bitmap_set_bit (gen, index);
3219 if (kill)
3220 bitmap_clear_bit (kill, index);
3223 store_info = store_info->next;
3228 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3229 may be NULL. */
3231 static void
3232 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3234 read_info_t read_info = insn_info->read_rec;
3235 int i;
3236 group_info_t group;
3238 /* If this insn reads the frame, kill all the frame related stores. */
3239 if (insn_info->frame_read)
3241 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3242 if (group->process_globally && group->frame_related)
3244 if (kill)
3245 bitmap_ior_into (kill, group->group_kill);
3246 bitmap_and_compl_into (gen, group->group_kill);
3249 if (insn_info->non_frame_wild_read)
3251 /* Kill all non-frame related stores. Kill all stores of variables that
3252 escape. */
3253 if (kill)
3254 bitmap_ior_into (kill, kill_on_calls);
3255 bitmap_and_compl_into (gen, kill_on_calls);
3256 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3257 if (group->process_globally && !group->frame_related)
3259 if (kill)
3260 bitmap_ior_into (kill, group->group_kill);
3261 bitmap_and_compl_into (gen, group->group_kill);
3264 while (read_info)
3266 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3268 if (group->process_globally)
3270 if (i == read_info->group_id)
3272 if (read_info->begin > read_info->end)
3274 /* Begin > end for block mode reads. */
3275 if (kill)
3276 bitmap_ior_into (kill, group->group_kill);
3277 bitmap_and_compl_into (gen, group->group_kill);
3279 else
3281 /* The groups are the same, just process the
3282 offsets. */
3283 HOST_WIDE_INT j;
3284 for (j = read_info->begin; j < read_info->end; j++)
3286 int index = get_bitmap_index (group, j);
3287 if (index != 0)
3289 if (kill)
3290 bitmap_set_bit (kill, index);
3291 bitmap_clear_bit (gen, index);
3296 else
3298 /* The groups are different, if the alias sets
3299 conflict, clear the entire group. We only need
3300 to apply this test if the read_info is a cselib
3301 read. Anything with a constant base cannot alias
3302 something else with a different constant
3303 base. */
3304 if ((read_info->group_id < 0)
3305 && canon_true_dependence (group->base_mem,
3306 GET_MODE (group->base_mem),
3307 group->canon_base_addr,
3308 read_info->mem, NULL_RTX))
3310 if (kill)
3311 bitmap_ior_into (kill, group->group_kill);
3312 bitmap_and_compl_into (gen, group->group_kill);
3318 read_info = read_info->next;
3322 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3323 may be NULL. */
3325 static void
3326 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3328 while (read_info)
3330 if (read_info->alias_set)
3332 int index = get_bitmap_index (clear_alias_group,
3333 read_info->alias_set);
3334 if (index != 0)
3336 if (kill)
3337 bitmap_set_bit (kill, index);
3338 bitmap_clear_bit (gen, index);
3342 read_info = read_info->next;
3347 /* Return the insn in BB_INFO before the first wild read or if there
3348 are no wild reads in the block, return the last insn. */
3350 static insn_info_t
3351 find_insn_before_first_wild_read (bb_info_t bb_info)
3353 insn_info_t insn_info = bb_info->last_insn;
3354 insn_info_t last_wild_read = NULL;
3356 while (insn_info)
3358 if (insn_info->wild_read)
3360 last_wild_read = insn_info->prev_insn;
3361 /* Block starts with wild read. */
3362 if (!last_wild_read)
3363 return NULL;
3366 insn_info = insn_info->prev_insn;
3369 if (last_wild_read)
3370 return last_wild_read;
3371 else
3372 return bb_info->last_insn;
3376 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3377 the block in order to build the gen and kill sets for the block.
3378 We start at ptr which may be the last insn in the block or may be
3379 the first insn with a wild read. In the latter case we are able to
3380 skip the rest of the block because it just does not matter:
3381 anything that happens is hidden by the wild read. */
3383 static void
3384 dse_step3_scan (bool for_spills, basic_block bb)
3386 bb_info_t bb_info = bb_table[bb->index];
3387 insn_info_t insn_info;
3389 if (for_spills)
3390 /* There are no wild reads in the spill case. */
3391 insn_info = bb_info->last_insn;
3392 else
3393 insn_info = find_insn_before_first_wild_read (bb_info);
3395 /* In the spill case or in the no_spill case if there is no wild
3396 read in the block, we will need a kill set. */
3397 if (insn_info == bb_info->last_insn)
3399 if (bb_info->kill)
3400 bitmap_clear (bb_info->kill);
3401 else
3402 bb_info->kill = BITMAP_ALLOC (NULL);
3404 else
3405 if (bb_info->kill)
3406 BITMAP_FREE (bb_info->kill);
3408 while (insn_info)
3410 /* There may have been code deleted by the dce pass run before
3411 this phase. */
3412 if (insn_info->insn && INSN_P (insn_info->insn))
3414 /* Process the read(s) last. */
3415 if (for_spills)
3417 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3418 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3420 else
3422 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3423 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3427 insn_info = insn_info->prev_insn;
3432 /* Set the gen set of the exit block, and also any block with no
3433 successors that does not have a wild read. */
3435 static void
3436 dse_step3_exit_block_scan (bb_info_t bb_info)
3438 /* The gen set is all 0's for the exit block except for the
3439 frame_pointer_group. */
3441 if (stores_off_frame_dead_at_return)
3443 unsigned int i;
3444 group_info_t group;
3446 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3448 if (group->process_globally && group->frame_related)
3449 bitmap_ior_into (bb_info->gen, group->group_kill);
3455 /* Find all of the blocks that are not backwards reachable from the
3456 exit block or any block with no successors (BB). These are the
3457 infinite loops or infinite self loops. These blocks will still
3458 have their bits set in UNREACHABLE_BLOCKS. */
3460 static void
3461 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3463 edge e;
3464 edge_iterator ei;
3466 if (TEST_BIT (unreachable_blocks, bb->index))
3468 RESET_BIT (unreachable_blocks, bb->index);
3469 FOR_EACH_EDGE (e, ei, bb->preds)
3471 mark_reachable_blocks (unreachable_blocks, e->src);
3476 /* Build the transfer functions for the function. */
3478 static void
3479 dse_step3 (bool for_spills)
3481 basic_block bb;
3482 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
3483 sbitmap_iterator sbi;
3484 bitmap all_ones = NULL;
3485 unsigned int i;
3487 sbitmap_ones (unreachable_blocks);
3489 FOR_ALL_BB (bb)
3491 bb_info_t bb_info = bb_table[bb->index];
3492 if (bb_info->gen)
3493 bitmap_clear (bb_info->gen);
3494 else
3495 bb_info->gen = BITMAP_ALLOC (NULL);
3497 if (bb->index == ENTRY_BLOCK)
3499 else if (bb->index == EXIT_BLOCK)
3500 dse_step3_exit_block_scan (bb_info);
3501 else
3502 dse_step3_scan (for_spills, bb);
3503 if (EDGE_COUNT (bb->succs) == 0)
3504 mark_reachable_blocks (unreachable_blocks, bb);
3506 /* If this is the second time dataflow is run, delete the old
3507 sets. */
3508 if (bb_info->in)
3509 BITMAP_FREE (bb_info->in);
3510 if (bb_info->out)
3511 BITMAP_FREE (bb_info->out);
3514 /* For any block in an infinite loop, we must initialize the out set
3515 to all ones. This could be expensive, but almost never occurs in
3516 practice. However, it is common in regression tests. */
3517 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
3519 if (bitmap_bit_p (all_blocks, i))
3521 bb_info_t bb_info = bb_table[i];
3522 if (!all_ones)
3524 unsigned int j;
3525 group_info_t group;
3527 all_ones = BITMAP_ALLOC (NULL);
3528 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, j, group)
3529 bitmap_ior_into (all_ones, group->group_kill);
3531 if (!bb_info->out)
3533 bb_info->out = BITMAP_ALLOC (NULL);
3534 bitmap_copy (bb_info->out, all_ones);
3539 if (all_ones)
3540 BITMAP_FREE (all_ones);
3541 sbitmap_free (unreachable_blocks);
3546 /*----------------------------------------------------------------------------
3547 Fourth step.
3549 Solve the bitvector equations.
3550 ----------------------------------------------------------------------------*/
3553 /* Confluence function for blocks with no successors. Create an out
3554 set from the gen set of the exit block. This block logically has
3555 the exit block as a successor. */
3559 static void
3560 dse_confluence_0 (basic_block bb)
3562 bb_info_t bb_info = bb_table[bb->index];
3564 if (bb->index == EXIT_BLOCK)
3565 return;
3567 if (!bb_info->out)
3569 bb_info->out = BITMAP_ALLOC (NULL);
3570 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3574 /* Propagate the information from the in set of the dest of E to the
3575 out set of the src of E. If the various in or out sets are not
3576 there, that means they are all ones. */
3578 static bool
3579 dse_confluence_n (edge e)
3581 bb_info_t src_info = bb_table[e->src->index];
3582 bb_info_t dest_info = bb_table[e->dest->index];
3584 if (dest_info->in)
3586 if (src_info->out)
3587 bitmap_and_into (src_info->out, dest_info->in);
3588 else
3590 src_info->out = BITMAP_ALLOC (NULL);
3591 bitmap_copy (src_info->out, dest_info->in);
3594 return true;
3598 /* Propagate the info from the out to the in set of BB_INDEX's basic
3599 block. There are three cases:
3601 1) The block has no kill set. In this case the kill set is all
3602 ones. It does not matter what the out set of the block is, none of
3603 the info can reach the top. The only thing that reaches the top is
3604 the gen set and we just copy the set.
3606 2) There is a kill set but no out set and bb has successors. In
3607 this case we just return. Eventually an out set will be created and
3608 it is better to wait than to create a set of ones.
3610 3) There is both a kill and out set. We apply the obvious transfer
3611 function.
3614 static bool
3615 dse_transfer_function (int bb_index)
3617 bb_info_t bb_info = bb_table[bb_index];
3619 if (bb_info->kill)
3621 if (bb_info->out)
3623 /* Case 3 above. */
3624 if (bb_info->in)
3625 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3626 bb_info->out, bb_info->kill);
3627 else
3629 bb_info->in = BITMAP_ALLOC (NULL);
3630 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3631 bb_info->out, bb_info->kill);
3632 return true;
3635 else
3636 /* Case 2 above. */
3637 return false;
3639 else
3641 /* Case 1 above. If there is already an in set, nothing
3642 happens. */
3643 if (bb_info->in)
3644 return false;
3645 else
3647 bb_info->in = BITMAP_ALLOC (NULL);
3648 bitmap_copy (bb_info->in, bb_info->gen);
3649 return true;
3654 /* Solve the dataflow equations. */
3656 static void
3657 dse_step4 (void)
3659 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3660 dse_confluence_n, dse_transfer_function,
3661 all_blocks, df_get_postorder (DF_BACKWARD),
3662 df_get_n_blocks (DF_BACKWARD));
3663 if (dump_file)
3665 basic_block bb;
3667 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3668 FOR_ALL_BB (bb)
3670 bb_info_t bb_info = bb_table[bb->index];
3672 df_print_bb_index (bb, dump_file);
3673 if (bb_info->in)
3674 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3675 else
3676 fprintf (dump_file, " in: *MISSING*\n");
3677 if (bb_info->gen)
3678 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3679 else
3680 fprintf (dump_file, " gen: *MISSING*\n");
3681 if (bb_info->kill)
3682 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3683 else
3684 fprintf (dump_file, " kill: *MISSING*\n");
3685 if (bb_info->out)
3686 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3687 else
3688 fprintf (dump_file, " out: *MISSING*\n\n");
3695 /*----------------------------------------------------------------------------
3696 Fifth step.
3698 Delete the stores that can only be deleted using the global information.
3699 ----------------------------------------------------------------------------*/
3702 static void
3703 dse_step5_nospill (void)
3705 basic_block bb;
3706 FOR_EACH_BB (bb)
3708 bb_info_t bb_info = bb_table[bb->index];
3709 insn_info_t insn_info = bb_info->last_insn;
3710 bitmap v = bb_info->out;
3712 while (insn_info)
3714 bool deleted = false;
3715 if (dump_file && insn_info->insn)
3717 fprintf (dump_file, "starting to process insn %d\n",
3718 INSN_UID (insn_info->insn));
3719 bitmap_print (dump_file, v, " v: ", "\n");
3722 /* There may have been code deleted by the dce pass run before
3723 this phase. */
3724 if (insn_info->insn
3725 && INSN_P (insn_info->insn)
3726 && (!insn_info->cannot_delete)
3727 && (!bitmap_empty_p (v)))
3729 store_info_t store_info = insn_info->store_rec;
3731 /* Try to delete the current insn. */
3732 deleted = true;
3734 /* Skip the clobbers. */
3735 while (!store_info->is_set)
3736 store_info = store_info->next;
3738 if (store_info->alias_set)
3739 deleted = false;
3740 else
3742 HOST_WIDE_INT i;
3743 group_info_t group_info
3744 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3746 for (i = store_info->begin; i < store_info->end; i++)
3748 int index = get_bitmap_index (group_info, i);
3750 if (dump_file)
3751 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3752 if (index == 0 || !bitmap_bit_p (v, index))
3754 if (dump_file)
3755 fprintf (dump_file, "failing at i = %d\n", (int)i);
3756 deleted = false;
3757 break;
3761 if (deleted)
3763 if (dbg_cnt (dse)
3764 && check_for_inc_dec_1 (insn_info))
3766 delete_insn (insn_info->insn);
3767 insn_info->insn = NULL;
3768 globally_deleted++;
3772 /* We do want to process the local info if the insn was
3773 deleted. For instance, if the insn did a wild read, we
3774 no longer need to trash the info. */
3775 if (insn_info->insn
3776 && INSN_P (insn_info->insn)
3777 && (!deleted))
3779 scan_stores_nospill (insn_info->store_rec, v, NULL);
3780 if (insn_info->wild_read)
3782 if (dump_file)
3783 fprintf (dump_file, "wild read\n");
3784 bitmap_clear (v);
3786 else if (insn_info->read_rec
3787 || insn_info->non_frame_wild_read)
3789 if (dump_file && !insn_info->non_frame_wild_read)
3790 fprintf (dump_file, "regular read\n");
3791 else if (dump_file)
3792 fprintf (dump_file, "non-frame wild read\n");
3793 scan_reads_nospill (insn_info, v, NULL);
3797 insn_info = insn_info->prev_insn;
3803 static void
3804 dse_step5_spill (void)
3806 basic_block bb;
3807 FOR_EACH_BB (bb)
3809 bb_info_t bb_info = bb_table[bb->index];
3810 insn_info_t insn_info = bb_info->last_insn;
3811 bitmap v = bb_info->out;
3813 while (insn_info)
3815 bool deleted = false;
3816 /* There may have been code deleted by the dce pass run before
3817 this phase. */
3818 if (insn_info->insn
3819 && INSN_P (insn_info->insn)
3820 && (!insn_info->cannot_delete)
3821 && (!bitmap_empty_p (v)))
3823 /* Try to delete the current insn. */
3824 store_info_t store_info = insn_info->store_rec;
3825 deleted = true;
3827 while (store_info)
3829 if (store_info->alias_set)
3831 int index = get_bitmap_index (clear_alias_group,
3832 store_info->alias_set);
3833 if (index == 0 || !bitmap_bit_p (v, index))
3835 deleted = false;
3836 break;
3839 else
3840 deleted = false;
3841 store_info = store_info->next;
3843 if (deleted && dbg_cnt (dse)
3844 && check_for_inc_dec_1 (insn_info))
3846 if (dump_file)
3847 fprintf (dump_file, "Spill deleting insn %d\n",
3848 INSN_UID (insn_info->insn));
3849 delete_insn (insn_info->insn);
3850 spill_deleted++;
3851 insn_info->insn = NULL;
3855 if (insn_info->insn
3856 && INSN_P (insn_info->insn)
3857 && (!deleted))
3859 scan_stores_spill (insn_info->store_rec, v, NULL);
3860 scan_reads_spill (insn_info->read_rec, v, NULL);
3863 insn_info = insn_info->prev_insn;
3870 /*----------------------------------------------------------------------------
3871 Sixth step.
3873 Delete stores made redundant by earlier stores (which store the same
3874 value) that couldn't be eliminated.
3875 ----------------------------------------------------------------------------*/
3877 static void
3878 dse_step6 (void)
3880 basic_block bb;
3882 FOR_ALL_BB (bb)
3884 bb_info_t bb_info = bb_table[bb->index];
3885 insn_info_t insn_info = bb_info->last_insn;
3887 while (insn_info)
3889 /* There may have been code deleted by the dce pass run before
3890 this phase. */
3891 if (insn_info->insn
3892 && INSN_P (insn_info->insn)
3893 && !insn_info->cannot_delete)
3895 store_info_t s_info = insn_info->store_rec;
3897 while (s_info && !s_info->is_set)
3898 s_info = s_info->next;
3899 if (s_info
3900 && s_info->redundant_reason
3901 && s_info->redundant_reason->insn
3902 && INSN_P (s_info->redundant_reason->insn))
3904 rtx rinsn = s_info->redundant_reason->insn;
3905 if (dump_file)
3906 fprintf (dump_file, "Locally deleting insn %d "
3907 "because insn %d stores the "
3908 "same value and couldn't be "
3909 "eliminated\n",
3910 INSN_UID (insn_info->insn),
3911 INSN_UID (rinsn));
3912 delete_dead_store_insn (insn_info);
3915 insn_info = insn_info->prev_insn;
3920 /*----------------------------------------------------------------------------
3921 Seventh step.
3923 Destroy everything left standing.
3924 ----------------------------------------------------------------------------*/
3926 static void
3927 dse_step7 (bool global_done)
3929 unsigned int i;
3930 group_info_t group;
3931 basic_block bb;
3933 FOR_EACH_VEC_ELT (group_info_t, rtx_group_vec, i, group)
3935 free (group->offset_map_n);
3936 free (group->offset_map_p);
3937 BITMAP_FREE (group->store1_n);
3938 BITMAP_FREE (group->store1_p);
3939 BITMAP_FREE (group->store2_n);
3940 BITMAP_FREE (group->store2_p);
3941 BITMAP_FREE (group->escaped_n);
3942 BITMAP_FREE (group->escaped_p);
3943 BITMAP_FREE (group->group_kill);
3946 if (global_done)
3947 FOR_ALL_BB (bb)
3949 bb_info_t bb_info = bb_table[bb->index];
3950 BITMAP_FREE (bb_info->gen);
3951 if (bb_info->kill)
3952 BITMAP_FREE (bb_info->kill);
3953 if (bb_info->in)
3954 BITMAP_FREE (bb_info->in);
3955 if (bb_info->out)
3956 BITMAP_FREE (bb_info->out);
3959 if (clear_alias_sets)
3961 BITMAP_FREE (clear_alias_sets);
3962 BITMAP_FREE (disqualified_clear_alias_sets);
3963 free_alloc_pool (clear_alias_mode_pool);
3964 htab_delete (clear_alias_mode_table);
3967 end_alias_analysis ();
3968 free (bb_table);
3969 htab_delete (rtx_group_table);
3970 VEC_free (group_info_t, heap, rtx_group_vec);
3971 BITMAP_FREE (all_blocks);
3972 BITMAP_FREE (scratch);
3973 BITMAP_FREE (kill_on_calls);
3975 free_alloc_pool (rtx_store_info_pool);
3976 free_alloc_pool (read_info_pool);
3977 free_alloc_pool (insn_info_pool);
3978 free_alloc_pool (bb_info_pool);
3979 free_alloc_pool (rtx_group_info_pool);
3980 free_alloc_pool (deferred_change_pool);
3984 /* -------------------------------------------------------------------------
3986 ------------------------------------------------------------------------- */
3988 /* Callback for running pass_rtl_dse. */
3990 static unsigned int
3991 rest_of_handle_dse (void)
3993 bool did_global = false;
3995 df_set_flags (DF_DEFER_INSN_RESCAN);
3997 /* Need the notes since we must track live hardregs in the forwards
3998 direction. */
3999 df_note_add_problem ();
4000 df_analyze ();
4002 dse_step0 ();
4003 dse_step1 ();
4004 dse_step2_init ();
4005 if (dse_step2_nospill ())
4007 df_set_flags (DF_LR_RUN_DCE);
4008 df_analyze ();
4009 did_global = true;
4010 if (dump_file)
4011 fprintf (dump_file, "doing global processing\n");
4012 dse_step3 (false);
4013 dse_step4 ();
4014 dse_step5_nospill ();
4017 /* For the instance of dse that runs after reload, we make a special
4018 pass to process the spills. These are special in that they are
4019 totally transparent, i.e, there is no aliasing issues that need
4020 to be considered. This means that the wild reads that kill
4021 everything else do not apply here. */
4022 if (clear_alias_sets && dse_step2_spill ())
4024 if (!did_global)
4026 df_set_flags (DF_LR_RUN_DCE);
4027 df_analyze ();
4029 did_global = true;
4030 if (dump_file)
4031 fprintf (dump_file, "doing global spill processing\n");
4032 dse_step3 (true);
4033 dse_step4 ();
4034 dse_step5_spill ();
4037 dse_step6 ();
4038 dse_step7 (did_global);
4040 if (dump_file)
4041 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
4042 locally_deleted, globally_deleted, spill_deleted);
4043 return 0;
4046 static bool
4047 gate_dse (void)
4049 return gate_dse1 () || gate_dse2 ();
4052 static bool
4053 gate_dse1 (void)
4055 return optimize > 0 && flag_dse
4056 && dbg_cnt (dse1);
4059 static bool
4060 gate_dse2 (void)
4062 return optimize > 0 && flag_dse
4063 && dbg_cnt (dse2);
4066 struct rtl_opt_pass pass_rtl_dse1 =
4069 RTL_PASS,
4070 "dse1", /* name */
4071 gate_dse1, /* gate */
4072 rest_of_handle_dse, /* execute */
4073 NULL, /* sub */
4074 NULL, /* next */
4075 0, /* static_pass_number */
4076 TV_DSE1, /* tv_id */
4077 0, /* properties_required */
4078 0, /* properties_provided */
4079 0, /* properties_destroyed */
4080 0, /* todo_flags_start */
4081 TODO_df_finish | TODO_verify_rtl_sharing |
4082 TODO_ggc_collect /* todo_flags_finish */
4086 struct rtl_opt_pass pass_rtl_dse2 =
4089 RTL_PASS,
4090 "dse2", /* name */
4091 gate_dse2, /* gate */
4092 rest_of_handle_dse, /* execute */
4093 NULL, /* sub */
4094 NULL, /* next */
4095 0, /* static_pass_number */
4096 TV_DSE2, /* tv_id */
4097 0, /* properties_required */
4098 0, /* properties_provided */
4099 0, /* properties_destroyed */
4100 0, /* todo_flags_start */
4101 TODO_df_finish | TODO_verify_rtl_sharing |
4102 TODO_ggc_collect /* todo_flags_finish */