EnumSet*.class: Regenerate
[official-gcc.git] / gcc / dse.c
blobf8859f6c91d10b13be0182292a771f3c60960e62
1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hashtab.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "flags.h"
35 #include "df.h"
36 #include "cselib.h"
37 #include "timevar.h"
38 #include "tree-pass.h"
39 #include "alloc-pool.h"
40 #include "alias.h"
41 #include "insn-config.h"
42 #include "expr.h"
43 #include "recog.h"
44 #include "dse.h"
45 #include "dbgcnt.h"
47 /* This file contains three techniques for performing Dead Store
48 Elimination (dse).
50 * The first technique performs dse locally on any base address. It
51 is based on the cselib which is a local value numbering technique.
52 This technique is local to a basic block but deals with a fairly
53 general addresses.
55 * The second technique performs dse globally but is restricted to
56 base addresses that are either constant or are relative to the
57 frame_pointer.
59 * The third technique, (which is only done after register allocation)
60 processes the spill spill slots. This differs from the second
61 technique because it takes advantage of the fact that spilling is
62 completely free from the effects of aliasing.
64 Logically, dse is a backwards dataflow problem. A store can be
65 deleted if it if cannot be reached in the backward direction by any
66 use of the value being stored. However, the local technique uses a
67 forwards scan of the basic block because cselib requires that the
68 block be processed in that order.
70 The pass is logically broken into 7 steps:
72 0) Initialization.
74 1) The local algorithm, as well as scanning the insns for the two
75 global algorithms.
77 2) Analysis to see if the global algs are necessary. In the case
78 of stores base on a constant address, there must be at least two
79 stores to that address, to make it possible to delete some of the
80 stores. In the case of stores off of the frame or spill related
81 stores, only one store to an address is necessary because those
82 stores die at the end of the function.
84 3) Set up the global dataflow equations based on processing the
85 info parsed in the first step.
87 4) Solve the dataflow equations.
89 5) Delete the insns that the global analysis has indicated are
90 unnecessary.
92 6) Cleanup.
94 This step uses cselib and canon_rtx to build the largest expression
95 possible for each address. This pass is a forwards pass through
96 each basic block. From the point of view of the global technique,
97 the first pass could examine a block in either direction. The
98 forwards ordering is to accommodate cselib.
100 We a simplifying assumption: addresses fall into four broad
101 categories:
103 1) base has rtx_varies_p == false, offset is constant.
104 2) base has rtx_varies_p == false, offset variable.
105 3) base has rtx_varies_p == true, offset constant.
106 4) base has rtx_varies_p == true, offset variable.
108 The local passes are able to process all 4 kinds of addresses. The
109 global pass only handles (1).
111 The global problem is formulated as follows:
113 A store, S1, to address A, where A is not relative to the stack
114 frame, can be eliminated if all paths from S1 to the end of the
115 of the function contain another store to A before a read to A.
117 If the address A is relative to the stack frame, a store S2 to A
118 can be eliminated if there are no paths from S1 that reach the
119 end of the function that read A before another store to A. In
120 this case S2 can be deleted if there are paths to from S2 to the
121 end of the function that have no reads or writes to A. This
122 second case allows stores to the stack frame to be deleted that
123 would otherwise die when the function returns. This cannot be
124 done if stores_off_frame_dead_at_return is not true. See the doc
125 for that variable for when this variable is false.
127 The global problem is formulated as a backwards set union
128 dataflow problem where the stores are the gens and reads are the
129 kills. Set union problems are rare and require some special
130 handling given our representation of bitmaps. A straightforward
131 implementation of requires a lot of bitmaps filled with 1s.
132 These are expensive and cumbersome in our bitmap formulation so
133 care has been taken to avoid large vectors filled with 1s. See
134 the comments in bb_info and in the dataflow confluence functions
135 for details.
137 There are two places for further enhancements to this algorithm:
139 1) The original dse which was embedded in a pass called flow also
140 did local address forwarding. For example in
142 A <- r100
143 ... <- A
145 flow would replace the right hand side of the second insn with a
146 reference to r100. Most of the information is available to add this
147 to this pass. It has not done it because it is a lot of work in
148 the case that either r100 is assigned to between the first and
149 second insn and/or the second insn is a load of part of the value
150 stored by the first insn.
152 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
153 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
154 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
155 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
157 2) The cleaning up of spill code is quite profitable. It currently
158 depends on reading tea leaves and chicken entrails left by reload.
159 This pass depends on reload creating a singleton alias set for each
160 spill slot and telling the next dse pass which of these alias sets
161 are the singletons. Rather than analyze the addresses of the
162 spills, dse's spill processing just does analysis of the loads and
163 stores that use those alias sets. There are three cases where this
164 falls short:
166 a) Reload sometimes creates the slot for one mode of access, and
167 then inserts loads and/or stores for a smaller mode. In this
168 case, the current code just punts on the slot. The proper thing
169 to do is to back out and use one bit vector position for each
170 byte of the entity associated with the slot. This depends on
171 KNOWING that reload always generates the accesses for each of the
172 bytes in some canonical (read that easy to understand several
173 passes after reload happens) way.
175 b) Reload sometimes decides that spill slot it allocated was not
176 large enough for the mode and goes back and allocates more slots
177 with the same mode and alias set. The backout in this case is a
178 little more graceful than (a). In this case the slot is unmarked
179 as being a spill slot and if final address comes out to be based
180 off the frame pointer, the global algorithm handles this slot.
182 c) For any pass that may prespill, there is currently no
183 mechanism to tell the dse pass that the slot being used has the
184 special properties that reload uses. It may be that all that is
185 required is to have those passes make the same calls that reload
186 does, assuming that the alias sets can be manipulated in the same
187 way. */
189 /* There are limits to the size of constant offsets we model for the
190 global problem. There are certainly test cases, that exceed this
191 limit, however, it is unlikely that there are important programs
192 that really have constant offsets this size. */
193 #define MAX_OFFSET (64 * 1024)
196 static bitmap scratch = NULL;
197 struct insn_info;
199 /* This structure holds information about a candidate store. */
200 struct store_info
203 /* False means this is a clobber. */
204 bool is_set;
206 /* The id of the mem group of the base address. If rtx_varies_p is
207 true, this is -1. Otherwise, it is the index into the group
208 table. */
209 int group_id;
211 /* This is the cselib value. */
212 cselib_val *cse_base;
214 /* This canonized mem. */
215 rtx mem;
217 /* The result of get_addr on mem. */
218 rtx mem_addr;
220 /* If this is non-zero, it is the alias set of a spill location. */
221 alias_set_type alias_set;
223 /* The offset of the first and byte before the last byte associated
224 with the operation. */
225 int begin, end;
227 /* An bitmask as wide as the number of bytes in the word that
228 contains a 1 if the byte may be needed. The store is unused if
229 all of the bits are 0. */
230 long positions_needed;
232 /* The next store info for this insn. */
233 struct store_info *next;
235 /* The right hand side of the store. This is used if there is a
236 subsequent reload of the mems address somewhere later in the
237 basic block. */
238 rtx rhs;
241 typedef struct store_info *store_info_t;
242 static alloc_pool cse_store_info_pool;
243 static alloc_pool rtx_store_info_pool;
245 /* This structure holds information about a load. These are only
246 built for rtx bases. */
247 struct read_info
249 /* The id of the mem group of the base address. */
250 int group_id;
252 /* If this is non-zero, it is the alias set of a spill location. */
253 alias_set_type alias_set;
255 /* The offset of the first and byte after the last byte associated
256 with the operation. If begin == end == 0, the read did not have
257 a constant offset. */
258 int begin, end;
260 /* The mem being read. */
261 rtx mem;
263 /* The next read_info for this insn. */
264 struct read_info *next;
266 typedef struct read_info *read_info_t;
267 static alloc_pool read_info_pool;
270 /* One of these records is created for each insn. */
272 struct insn_info
274 /* Set true if the insn contains a store but the insn itself cannot
275 be deleted. This is set if the insn is a parallel and there is
276 more than one non dead output or if the insn is in some way
277 volatile. */
278 bool cannot_delete;
280 /* This field is only used by the global algorithm. It is set true
281 if the insn contains any read of mem except for a (1). This is
282 also set if the insn is a call or has a clobber mem. If the insn
283 contains a wild read, the use_rec will be null. */
284 bool wild_read;
286 /* This field is set for const function calls. Const functions
287 cannot read memory, but they can read the stack because that is
288 where they may get their parms. So having this set is less
289 severe than a wild read, it just means that all of the stores to
290 the stack are killed rather than all stores. */
291 bool stack_read;
293 /* This is true if any of the sets within the store contains a
294 cselib base. Such stores can only be deleted by the local
295 algorithm. */
296 bool contains_cselib_groups;
298 /* The insn. */
299 rtx insn;
301 /* The list of mem sets or mem clobbers that are contained in this
302 insn. If the insn is deletable, it contains only one mem set.
303 But it could also contain clobbers. Insns that contain more than
304 one mem set are not deletable, but each of those mems are here in
305 order to provide info to delete other insns. */
306 store_info_t store_rec;
308 /* The linked list of mem uses in this insn. Only the reads from
309 rtx bases are listed here. The reads to cselib bases are
310 completely processed during the first scan and so are never
311 created. */
312 read_info_t read_rec;
314 /* The prev insn in the basic block. */
315 struct insn_info * prev_insn;
317 /* The linked list of insns that are in consideration for removal in
318 the forwards pass thru the basic block. This pointer may be
319 trash as it is not cleared when a wild read occurs. The only
320 time it is guaranteed to be correct is when the traveral starts
321 at active_local_stores. */
322 struct insn_info * next_local_store;
325 typedef struct insn_info *insn_info_t;
326 static alloc_pool insn_info_pool;
328 /* The linked list of stores that are under consideration in this
329 basic block. */
330 static insn_info_t active_local_stores;
332 struct bb_info
335 /* Pointer to the insn info for the last insn in the block. These
336 are linked so this is how all of the insns are reached. During
337 scanning this is the current insn being scanned. */
338 insn_info_t last_insn;
340 /* The info for the global dataflow problem. */
343 /* This is set if the transfer function should and in the wild_read
344 bitmap before applying the kill and gen sets. That vector knocks
345 out most of the bits in the bitmap and thus speeds up the
346 operations. */
347 bool apply_wild_read;
349 /* The set of store positions that exist in this block before a wild read. */
350 bitmap gen;
352 /* The set of load positions that exist in this block above the
353 same position of a store. */
354 bitmap kill;
356 /* The set of stores that reach the top of the block without being
357 killed by a read.
359 Do not represent the in if it is all ones. Note that this is
360 what the bitvector should logically be initialized to for a set
361 intersection problem. However, like the kill set, this is too
362 expensive. So initially, the in set will only be created for the
363 exit block and any block that contains a wild read. */
364 bitmap in;
366 /* The set of stores that reach the bottom of the block from it's
367 successors.
369 Do not represent the in if it is all ones. Note that this is
370 what the bitvector should logically be initialized to for a set
371 intersection problem. However, like the kill and in set, this is
372 too expensive. So what is done is that the confluence operator
373 just initializes the vector from one of the out sets of the
374 successors of the block. */
375 bitmap out;
378 typedef struct bb_info *bb_info_t;
379 static alloc_pool bb_info_pool;
381 /* Table to hold all bb_infos. */
382 static bb_info_t *bb_table;
384 /* There is a group_info for each rtx base that is used to reference
385 memory. There are also not many of the rtx bases because they are
386 very limited in scope. */
388 struct group_info
390 /* The actual base of the address. */
391 rtx rtx_base;
393 /* The sequential id of the base. This allows us to have a
394 canonical ordering of these that is not based on addresses. */
395 int id;
397 /* A mem wrapped around the base pointer for the group in order to
398 do read dependency. */
399 rtx base_mem;
401 /* Canonized version of base_mem, most likely the same thing. */
402 rtx canon_base_mem;
404 /* These two sets of two bitmaps are used to keep track of how many
405 stores are actually referencing that position from this base. We
406 only do this for rtx bases as this will be used to assign
407 positions in the bitmaps for the global problem. Bit N is set in
408 store1 on the first store for offset N. Bit N is set in store2
409 for the second store to offset N. This is all we need since we
410 only care about offsets that have two or more stores for them.
412 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
413 for 0 and greater offsets.
415 There is one special case here, for stores into the stack frame,
416 we will or store1 into store2 before deciding which stores look
417 at globally. This is because stores to the stack frame that have
418 no other reads before the end of the function can also be
419 deleted. */
420 bitmap store1_n, store1_p, store2_n, store2_p;
422 /* The positions in this bitmap have the same assignments as the in,
423 out, gen and kill bitmaps. This bitmap is all zeros except for
424 the positions that are occupied by stores for this group. */
425 bitmap group_kill;
427 /* True if there are any positions that are to be processed
428 globally. */
429 bool process_globally;
431 /* True if the base of this group is either the frame_pointer or
432 hard_frame_pointer. */
433 bool frame_related;
435 /* The offset_map is used to map the offsets from this base into
436 positions in the global bitmaps. It is only created after all of
437 the all of stores have been scanned and we know which ones we
438 care about. */
439 int *offset_map_n, *offset_map_p;
440 int offset_map_size_n, offset_map_size_p;
442 typedef struct group_info *group_info_t;
443 typedef const struct group_info *const_group_info_t;
444 static alloc_pool rtx_group_info_pool;
446 /* Tables of group_info structures, hashed by base value. */
447 static htab_t rtx_group_table;
449 /* Index into the rtx_group_vec. */
450 static int rtx_group_next_id;
452 DEF_VEC_P(group_info_t);
453 DEF_VEC_ALLOC_P(group_info_t,heap);
455 static VEC(group_info_t,heap) *rtx_group_vec;
458 /* This structure holds the set of changes that are being deferred
459 when removing read operation. See replace_read. */
460 struct deferred_change
463 /* The mem that is being replaced. */
464 rtx *loc;
466 /* The reg it is being replaced with. */
467 rtx reg;
469 struct deferred_change *next;
472 typedef struct deferred_change *deferred_change_t;
473 static alloc_pool deferred_change_pool;
475 static deferred_change_t deferred_change_list = NULL;
477 /* This are used to hold the alias sets of spill variables. Since
478 these are never aliased and there may be a lot of them, it makes
479 sense to treat them specially. This bitvector is only allocated in
480 calls from dse_record_singleton_alias_set which currently is only
481 made during reload1. So when dse is called before reload this
482 mechanism does nothing. */
484 static bitmap clear_alias_sets = NULL;
486 /* The set of clear_alias_sets that have been disqualified because
487 there are loads or stores using a different mode than the alias set
488 was registered with. */
489 static bitmap disqualified_clear_alias_sets = NULL;
491 /* The group that holds all of the clear_alias_sets. */
492 static group_info_t clear_alias_group;
494 /* The modes of the clear_alias_sets. */
495 static htab_t clear_alias_mode_table;
497 /* Hash table element to look up the mode for an alias set. */
498 struct clear_alias_mode_holder
500 alias_set_type alias_set;
501 enum machine_mode mode;
504 static alloc_pool clear_alias_mode_pool;
506 /* This is true except for two cases:
507 (1) current_function_stdarg -- i.e. we cannot do this
508 for vararg functions because they play games with the frame.
509 (2) In ada, it is sometimes not safe to do assume that any stores
510 based off the stack frame go dead at the exit to a function. */
511 static bool stores_off_frame_dead_at_return;
513 /* Counter for stats. */
514 static int globally_deleted;
515 static int locally_deleted;
516 static int spill_deleted;
518 static bitmap all_blocks;
520 /* The number of bits used in the global bitmaps. */
521 static unsigned int current_position;
524 static bool gate_dse (void);
527 /*----------------------------------------------------------------------------
528 Zeroth step.
530 Initialization.
531 ----------------------------------------------------------------------------*/
533 /* Hashtable callbacks for maintaining the "bases" field of
534 store_group_info, given that the addresses are function invariants. */
536 static int
537 clear_alias_mode_eq (const void *p1, const void *p2)
539 const struct clear_alias_mode_holder * h1
540 = (const struct clear_alias_mode_holder *) p1;
541 const struct clear_alias_mode_holder * h2
542 = (const struct clear_alias_mode_holder *) p2;
543 return h1->alias_set == h2->alias_set;
547 static hashval_t
548 clear_alias_mode_hash (const void *p)
550 const struct clear_alias_mode_holder *holder
551 = (const struct clear_alias_mode_holder *) p;
552 return holder->alias_set;
556 /* Find the entry associated with ALIAS_SET. */
558 static struct clear_alias_mode_holder *
559 clear_alias_set_lookup (alias_set_type alias_set)
561 struct clear_alias_mode_holder tmp_holder;
562 void **slot;
564 tmp_holder.alias_set = alias_set;
565 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
566 gcc_assert (*slot);
568 return *slot;
572 /* Hashtable callbacks for maintaining the "bases" field of
573 store_group_info, given that the addresses are function invariants. */
575 static int
576 invariant_group_base_eq (const void *p1, const void *p2)
578 const_group_info_t gi1 = (const_group_info_t) p1;
579 const_group_info_t gi2 = (const_group_info_t) p2;
580 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
584 static hashval_t
585 invariant_group_base_hash (const void *p)
587 const_group_info_t gi = (const_group_info_t) p;
588 int do_not_record;
589 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
593 /* Get the GROUP for BASE. Add a new group if it is not there. */
595 static group_info_t
596 get_group_info (rtx base)
598 struct group_info tmp_gi;
599 group_info_t gi;
600 void **slot;
602 if (base)
604 /* Find the store_base_info structure for BASE, creating a new one
605 if necessary. */
606 tmp_gi.rtx_base = base;
607 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
608 gi = (group_info_t) *slot;
610 else
612 if (!clear_alias_group)
614 clear_alias_group = gi = pool_alloc (rtx_group_info_pool);
615 memset (gi, 0, sizeof (struct group_info));
616 gi->id = rtx_group_next_id++;
617 gi->store1_n = BITMAP_ALLOC (NULL);
618 gi->store1_p = BITMAP_ALLOC (NULL);
619 gi->store2_n = BITMAP_ALLOC (NULL);
620 gi->store2_p = BITMAP_ALLOC (NULL);
621 gi->group_kill = BITMAP_ALLOC (NULL);
622 gi->process_globally = false;
623 gi->offset_map_size_n = 0;
624 gi->offset_map_size_p = 0;
625 gi->offset_map_n = NULL;
626 gi->offset_map_p = NULL;
627 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
629 return clear_alias_group;
632 if (gi == NULL)
634 *slot = gi = pool_alloc (rtx_group_info_pool);
635 gi->rtx_base = base;
636 gi->id = rtx_group_next_id++;
637 gi->base_mem = gen_rtx_MEM (QImode, base);
638 gi->canon_base_mem = canon_rtx (gi->base_mem);
639 gi->store1_n = BITMAP_ALLOC (NULL);
640 gi->store1_p = BITMAP_ALLOC (NULL);
641 gi->store2_n = BITMAP_ALLOC (NULL);
642 gi->store2_p = BITMAP_ALLOC (NULL);
643 gi->group_kill = BITMAP_ALLOC (NULL);
644 gi->process_globally = false;
645 gi->frame_related =
646 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
647 gi->offset_map_size_n = 0;
648 gi->offset_map_size_p = 0;
649 gi->offset_map_n = NULL;
650 gi->offset_map_p = NULL;
651 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
654 return gi;
658 /* Initialization of data structures. */
660 static void
661 dse_step0 (void)
663 locally_deleted = 0;
664 globally_deleted = 0;
665 spill_deleted = 0;
667 scratch = BITMAP_ALLOC (NULL);
669 rtx_store_info_pool
670 = create_alloc_pool ("rtx_store_info_pool",
671 sizeof (struct store_info), 100);
672 read_info_pool
673 = create_alloc_pool ("read_info_pool",
674 sizeof (struct read_info), 100);
675 insn_info_pool
676 = create_alloc_pool ("insn_info_pool",
677 sizeof (struct insn_info), 100);
678 bb_info_pool
679 = create_alloc_pool ("bb_info_pool",
680 sizeof (struct bb_info), 100);
681 rtx_group_info_pool
682 = create_alloc_pool ("rtx_group_info_pool",
683 sizeof (struct group_info), 100);
684 deferred_change_pool
685 = create_alloc_pool ("deferred_change_pool",
686 sizeof (struct deferred_change), 10);
688 rtx_group_table = htab_create (11, invariant_group_base_hash,
689 invariant_group_base_eq, NULL);
691 bb_table = XCNEWVEC (bb_info_t, last_basic_block);
692 rtx_group_next_id = 0;
694 stores_off_frame_dead_at_return =
695 (!(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
696 && (TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))))
697 && (!current_function_stdarg);
699 init_alias_analysis ();
701 if (clear_alias_sets)
702 clear_alias_group = get_group_info (NULL);
703 else
704 clear_alias_group = NULL;
709 /*----------------------------------------------------------------------------
710 First step.
712 Scan all of the insns. Any random ordering of the blocks is fine.
713 Each block is scanned in forward order to accommodate cselib which
714 is used to remove stores with non-constant bases.
715 ----------------------------------------------------------------------------*/
717 /* Delete all of the store_info recs from INSN_INFO. */
719 static void
720 free_store_info (insn_info_t insn_info)
722 store_info_t store_info = insn_info->store_rec;
723 while (store_info)
725 store_info_t next = store_info->next;
726 if (store_info->cse_base)
727 pool_free (cse_store_info_pool, store_info);
728 else
729 pool_free (rtx_store_info_pool, store_info);
730 store_info = next;
733 insn_info->cannot_delete = true;
734 insn_info->contains_cselib_groups = false;
735 insn_info->store_rec = NULL;
739 struct insn_size {
740 int size;
741 rtx insn;
745 /* Add an insn to do the add inside a x if it is a
746 PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
747 the size of the mode of the MEM that this is inside of. */
749 static int
750 replace_inc_dec (rtx *r, void *d)
752 rtx x = *r;
753 struct insn_size *data = (struct insn_size *)d;
754 switch (GET_CODE (x))
756 case PRE_INC:
757 case POST_INC:
759 rtx r1 = XEXP (x, 0);
760 rtx c = gen_int_mode (Pmode, data->size);
761 add_insn_before (data->insn,
762 gen_rtx_SET (Pmode, r1,
763 gen_rtx_PLUS (Pmode, r1, c)),
764 NULL);
765 return -1;
768 case PRE_DEC:
769 case POST_DEC:
771 rtx r1 = XEXP (x, 0);
772 rtx c = gen_int_mode (Pmode, -data->size);
773 add_insn_before (data->insn,
774 gen_rtx_SET (Pmode, r1,
775 gen_rtx_PLUS (Pmode, r1, c)),
776 NULL);
777 return -1;
780 case PRE_MODIFY:
781 case POST_MODIFY:
783 /* We can reuse the add because we are about to delete the
784 insn that contained it. */
785 rtx add = XEXP (x, 0);
786 rtx r1 = XEXP (add, 0);
787 add_insn_before (data->insn,
788 gen_rtx_SET (Pmode, r1, add), NULL);
789 return -1;
792 default:
793 return 0;
798 /* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
799 and generate an add to replace that. */
801 static int
802 replace_inc_dec_mem (rtx *r, void *d)
804 rtx x = *r;
805 if (GET_CODE (x) == MEM)
807 struct insn_size data;
809 data.size = GET_MODE_SIZE (GET_MODE (x));
810 data.insn = (rtx)d;
812 for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
814 return -1;
816 return 0;
819 /* Before we delete INSN, make sure that the auto inc/dec, if it is
820 there, is split into a separate insn. */
822 static void
823 check_for_inc_dec (rtx insn)
825 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
826 if (note)
827 for_each_rtx (&insn, replace_inc_dec_mem, insn);
831 /* Delete the insn and free all of the fields inside INSN_INFO. */
833 static void
834 delete_dead_store_insn (insn_info_t insn_info)
836 read_info_t read_info;
838 if (!dbg_cnt (dse))
839 return;
841 check_for_inc_dec (insn_info->insn);
842 if (dump_file)
844 fprintf (dump_file, "Locally deleting insn %d ",
845 INSN_UID (insn_info->insn));
846 if (insn_info->store_rec->alias_set)
847 fprintf (dump_file, "alias set %d\n",
848 (int) insn_info->store_rec->alias_set);
849 else
850 fprintf (dump_file, "\n");
853 free_store_info (insn_info);
854 read_info = insn_info->read_rec;
856 while (read_info)
858 read_info_t next = read_info->next;
859 pool_free (read_info_pool, read_info);
860 read_info = next;
862 insn_info->read_rec = NULL;
864 delete_insn (insn_info->insn);
865 locally_deleted++;
866 insn_info->insn = NULL;
868 insn_info->wild_read = false;
872 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
873 OFFSET and WIDTH. */
875 static void
876 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width)
878 HOST_WIDE_INT i;
880 if ((offset > -MAX_OFFSET) && (offset < MAX_OFFSET))
881 for (i=offset; i<offset+width; i++)
883 bitmap store1;
884 bitmap store2;
885 int ai;
886 if (i < 0)
888 store1 = group->store1_n;
889 store2 = group->store2_n;
890 ai = -i;
892 else
894 store1 = group->store1_p;
895 store2 = group->store2_p;
896 ai = i;
899 if (bitmap_bit_p (store1, ai))
900 bitmap_set_bit (store2, ai);
901 else
903 bitmap_set_bit (store1, ai);
904 if (i < 0)
906 if (group->offset_map_size_n < ai)
907 group->offset_map_size_n = ai;
909 else
911 if (group->offset_map_size_p < ai)
912 group->offset_map_size_p = ai;
919 /* Set the BB_INFO so that the last insn is marked as a wild read. */
921 static void
922 add_wild_read (bb_info_t bb_info)
924 insn_info_t insn_info = bb_info->last_insn;
925 read_info_t *ptr = &insn_info->read_rec;
927 while (*ptr)
929 read_info_t next = (*ptr)->next;
930 if ((*ptr)->alias_set == 0)
932 pool_free (read_info_pool, *ptr);
933 *ptr = next;
935 else
936 ptr = &(*ptr)->next;
938 insn_info->wild_read = true;
939 active_local_stores = NULL;
943 /* Return true if X is a constant or one of the registers that behaves
944 as a constant over the life of a function. */
946 static bool
947 const_or_frame_p (rtx x)
949 switch (GET_CODE (x))
951 case MEM:
952 return MEM_READONLY_P (x);
954 case CONST:
955 case CONST_INT:
956 case CONST_DOUBLE:
957 case CONST_VECTOR:
958 case SYMBOL_REF:
959 case LABEL_REF:
960 return true;
962 case REG:
963 /* Note that we have to test for the actual rtx used for the frame
964 and arg pointers and not just the register number in case we have
965 eliminated the frame and/or arg pointer and are using it
966 for pseudos. */
967 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
968 /* The arg pointer varies if it is not a fixed register. */
969 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
970 || x == pic_offset_table_rtx)
971 return true;
972 return false;
974 default:
975 return false;
979 /* Take all reasonable action to put the address of MEM into the form
980 that we can do analysis on.
982 The gold standard is to get the address into the form: address +
983 OFFSET where address is something that rtx_varies_p considers a
984 constant. When we can get the address in this form, we can do
985 global analysis on it. Note that for constant bases, address is
986 not actually returned, only the group_id. The address can be
987 obtained from that.
989 If that fails, we try cselib to get a value we can at least use
990 locally. If that fails we return false.
992 The GROUP_ID is set to -1 for cselib bases and the index of the
993 group for non_varying bases.
995 FOR_READ is true if this is a mem read and false if not. */
997 static bool
998 canon_address (rtx mem,
999 alias_set_type *alias_set_out,
1000 int *group_id,
1001 HOST_WIDE_INT *offset,
1002 cselib_val **base)
1004 rtx mem_address = XEXP (mem, 0);
1005 rtx expanded_address, address;
1006 /* Make sure that cselib is has initialized all of the operands of
1007 the address before asking it to do the subst. */
1009 if (clear_alias_sets)
1011 /* If this is a spill, do not do any further processing. */
1012 alias_set_type alias_set = MEM_ALIAS_SET (mem);
1013 if (dump_file)
1014 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1015 if (bitmap_bit_p (clear_alias_sets, alias_set))
1017 struct clear_alias_mode_holder *entry
1018 = clear_alias_set_lookup (alias_set);
1020 /* If the modes do not match, we cannot process this set. */
1021 if (entry->mode != GET_MODE (mem))
1023 if (dump_file)
1024 fprintf (dump_file,
1025 "disqualifying alias set %d, (%s) != (%s)\n",
1026 (int) alias_set, GET_MODE_NAME (entry->mode),
1027 GET_MODE_NAME (GET_MODE (mem)));
1029 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1030 return false;
1033 *alias_set_out = alias_set;
1034 *group_id = clear_alias_group->id;
1035 return true;
1039 *alias_set_out = 0;
1041 cselib_lookup (mem_address, Pmode, 1);
1043 if (dump_file)
1045 fprintf (dump_file, " mem: ");
1046 print_inline_rtx (dump_file, mem_address, 0);
1047 fprintf (dump_file, "\n");
1050 /* Use cselib to replace all of the reg references with the full
1051 expression. This will take care of the case where we have
1053 r_x = base + offset;
1054 val = *r_x;
1056 by making it into
1058 val = *(base + offset);
1061 expanded_address = cselib_expand_value_rtx (mem_address, scratch, 5);
1063 /* If this fails, just go with the mem_address. */
1064 if (!expanded_address)
1065 expanded_address = mem_address;
1067 /* Split the address into canonical BASE + OFFSET terms. */
1068 address = canon_rtx (expanded_address);
1070 *offset = 0;
1072 if (dump_file)
1074 fprintf (dump_file, "\n after cselib_expand address: ");
1075 print_inline_rtx (dump_file, expanded_address, 0);
1076 fprintf (dump_file, "\n");
1078 fprintf (dump_file, "\n after canon_rtx address: ");
1079 print_inline_rtx (dump_file, address, 0);
1080 fprintf (dump_file, "\n");
1083 if (GET_CODE (address) == CONST)
1084 address = XEXP (address, 0);
1086 if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
1088 *offset = INTVAL (XEXP (address, 1));
1089 address = XEXP (address, 0);
1092 if (const_or_frame_p (address))
1094 group_info_t group = get_group_info (address);
1096 if (dump_file)
1097 fprintf (dump_file, " gid=%d offset=%d \n", group->id, (int)*offset);
1098 *base = NULL;
1099 *group_id = group->id;
1101 else
1103 *base = cselib_lookup (address, Pmode, true);
1104 *group_id = -1;
1106 if (*base == NULL)
1108 if (dump_file)
1109 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1110 return false;
1112 if (dump_file)
1113 fprintf (dump_file, " varying cselib base=%d offset = %d\n",
1114 (*base)->value, (int)*offset);
1116 return true;
1120 /* Clear the rhs field from the active_local_stores array. */
1122 static void
1123 clear_rhs_from_active_local_stores (void)
1125 insn_info_t ptr = active_local_stores;
1127 while (ptr)
1129 store_info_t store_info = ptr->store_rec;
1130 /* Skip the clobbers. */
1131 while (!store_info->is_set)
1132 store_info = store_info->next;
1134 store_info->rhs = NULL;
1136 ptr = ptr->next_local_store;
1141 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1142 there is a candidate store, after adding it to the appropriate
1143 local store group if so. */
1145 static int
1146 record_store (rtx body, bb_info_t bb_info)
1148 rtx mem;
1149 HOST_WIDE_INT offset = 0;
1150 HOST_WIDE_INT width = 0;
1151 alias_set_type spill_alias_set;
1152 insn_info_t insn_info = bb_info->last_insn;
1153 store_info_t store_info = NULL;
1154 int group_id;
1155 cselib_val *base = NULL;
1156 insn_info_t ptr, last;
1157 bool store_is_unused;
1159 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1160 return 0;
1162 /* If this is not used, then this cannot be used to keep the insn
1163 from being deleted. On the other hand, it does provide something
1164 that can be used to prove that another store is dead. */
1165 store_is_unused
1166 = (find_reg_note (insn_info->insn, REG_UNUSED, body) != NULL);
1168 /* Check whether that value is a suitable memory location. */
1169 mem = SET_DEST (body);
1170 if (!MEM_P (mem))
1172 /* If the set or clobber is unused, then it does not effect our
1173 ability to get rid of the entire insn. */
1174 if (!store_is_unused)
1175 insn_info->cannot_delete = true;
1176 return 0;
1179 /* At this point we know mem is a mem. */
1180 if (GET_MODE (mem) == BLKmode)
1182 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1184 if (dump_file)
1185 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1186 add_wild_read (bb_info);
1187 insn_info->cannot_delete = true;
1189 else if (!store_is_unused)
1191 /* If the set or clobber is unused, then it does not effect our
1192 ability to get rid of the entire insn. */
1193 insn_info->cannot_delete = true;
1194 clear_rhs_from_active_local_stores ();
1196 return 0;
1199 /* We can still process a volatile mem, we just cannot delete it. */
1200 if (MEM_VOLATILE_P (mem))
1201 insn_info->cannot_delete = true;
1203 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1205 clear_rhs_from_active_local_stores ();
1206 return 0;
1209 width = GET_MODE_SIZE (GET_MODE (mem));
1211 if (spill_alias_set)
1213 bitmap store1 = clear_alias_group->store1_p;
1214 bitmap store2 = clear_alias_group->store2_p;
1216 if (bitmap_bit_p (store1, spill_alias_set))
1217 bitmap_set_bit (store2, spill_alias_set);
1218 else
1219 bitmap_set_bit (store1, spill_alias_set);
1221 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1222 clear_alias_group->offset_map_size_p = spill_alias_set;
1224 store_info = pool_alloc (rtx_store_info_pool);
1226 if (dump_file)
1227 fprintf (dump_file, " processing spill store %d(%s)\n",
1228 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1230 else if (group_id >= 0)
1232 /* In the restrictive case where the base is a constant or the
1233 frame pointer we can do global analysis. */
1235 group_info_t group
1236 = VEC_index (group_info_t, rtx_group_vec, group_id);
1238 store_info = pool_alloc (rtx_store_info_pool);
1239 set_usage_bits (group, offset, width);
1241 if (dump_file)
1242 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1243 group_id, (int)offset, (int)(offset+width));
1245 else
1247 store_info = pool_alloc (cse_store_info_pool);
1248 insn_info->contains_cselib_groups = true;
1249 group_id = -1;
1251 if (dump_file)
1252 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1253 (int)offset, (int)(offset+width));
1256 /* Check to see if this stores causes some other stores to be
1257 dead. */
1258 ptr = active_local_stores;
1259 last = NULL;
1261 while (ptr)
1263 insn_info_t next = ptr->next_local_store;
1264 store_info_t s_info = ptr->store_rec;
1265 bool delete = true;
1267 /* Skip the clobbers. We delete the active insn if this insn
1268 shadows the set. To have been put on the active list, it
1269 has exactly on set. */
1270 while (!s_info->is_set)
1271 s_info = s_info->next;
1273 if (s_info->alias_set != spill_alias_set)
1274 delete = false;
1275 else if (s_info->alias_set)
1277 struct clear_alias_mode_holder *entry
1278 = clear_alias_set_lookup (s_info->alias_set);
1279 /* Generally, spills cannot be processed if and of the
1280 references to the slot have a different mode. But if
1281 we are in the same block and mode is exactly the same
1282 between this store and one before in the same block,
1283 we can still delete it. */
1284 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1285 && (GET_MODE (mem) == entry->mode))
1287 delete = true;
1288 s_info->positions_needed = 0;
1290 if (dump_file)
1291 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1292 INSN_UID (ptr->insn), (int) s_info->alias_set);
1294 else if ((s_info->group_id == group_id)
1295 && (s_info->cse_base == base))
1297 HOST_WIDE_INT i;
1298 if (dump_file)
1299 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1300 INSN_UID (ptr->insn), s_info->group_id,
1301 (int)s_info->begin, (int)s_info->end);
1302 for (i = offset; i < offset+width; i++)
1303 if (i >= s_info->begin && i < s_info->end)
1304 s_info->positions_needed &= ~(1L << (i - s_info->begin));
1306 else if (s_info->rhs)
1307 /* Need to see if it is possible for this store to overwrite
1308 the value of store_info. If it is, set the rhs to NULL to
1309 keep it from being used to remove a load. */
1311 if (canon_true_dependence (s_info->mem,
1312 GET_MODE (s_info->mem),
1313 s_info->mem_addr,
1314 mem, rtx_varies_p))
1315 s_info->rhs = NULL;
1318 /* An insn can be deleted if every position of every one of
1319 its s_infos is zero. */
1320 if (s_info->positions_needed != 0)
1321 delete = false;
1323 if (delete)
1325 insn_info_t insn_to_delete = ptr;
1327 if (last)
1328 last->next_local_store = ptr->next_local_store;
1329 else
1330 active_local_stores = ptr->next_local_store;
1332 delete_dead_store_insn (insn_to_delete);
1334 else
1335 last = ptr;
1337 ptr = next;
1340 gcc_assert ((unsigned) width < sizeof (store_info->positions_needed) * CHAR_BIT);
1342 /* Finish filling in the store_info. */
1343 store_info->next = insn_info->store_rec;
1344 insn_info->store_rec = store_info;
1345 store_info->mem = canon_rtx (mem);
1346 store_info->alias_set = spill_alias_set;
1347 store_info->mem_addr = get_addr (XEXP (mem, 0));
1348 store_info->cse_base = base;
1349 store_info->positions_needed = (1L << width) - 1;
1350 store_info->group_id = group_id;
1351 store_info->begin = offset;
1352 store_info->end = offset + width;
1353 store_info->is_set = GET_CODE (body) == SET;
1355 if (store_info->is_set
1356 /* No place to keep the value after ra. */
1357 && !reload_completed
1358 /* The careful reviewer may wish to comment my checking that the
1359 rhs of a store is always a reg. */
1360 && REG_P (SET_SRC (body))
1361 /* Sometimes the store and reload is used for truncation and
1362 rounding. */
1363 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1364 store_info->rhs = SET_SRC (body);
1365 else
1366 store_info->rhs = NULL;
1368 /* If this is a clobber, we return 0. We will only be able to
1369 delete this insn if there is only one store USED store, but we
1370 can use the clobber to delete other stores earlier. */
1371 return store_info->is_set ? 1 : 0;
1375 static void
1376 dump_insn_info (const char * start, insn_info_t insn_info)
1378 fprintf (dump_file, "%s insn=%d %s\n", start,
1379 INSN_UID (insn_info->insn),
1380 insn_info->store_rec ? "has store" : "naked");
1384 /* Take a sequence of:
1385 A <- r1
1387 ... <- A
1389 and change it into
1390 r2 <- r1
1391 A <- r1
1393 ... <- r2
1395 The STORE_INFO and STORE_INFO are for the store and the READ_INFO
1396 and READ_INSN are for the read. Return true if the replacement
1397 went ok. */
1399 static bool
1400 replace_read (store_info_t store_info, insn_info_t store_insn,
1401 read_info_t read_info, insn_info_t read_insn, rtx *loc)
1403 if (!dbg_cnt (dse))
1404 return false;
1406 if (dump_file)
1407 fprintf (dump_file, "generating move to replace load at %d from store at %d\n",
1408 INSN_UID (read_insn->insn), INSN_UID (store_insn->insn));
1409 if (GET_MODE (store_info->mem) == GET_MODE (read_info->mem))
1411 rtx new_reg = gen_reg_rtx (GET_MODE (store_info->mem));
1412 if (validate_change (read_insn->insn, loc, new_reg, 0))
1414 rtx insns;
1415 deferred_change_t deferred_change = pool_alloc (deferred_change_pool);
1417 start_sequence ();
1418 emit_move_insn (new_reg, store_info->rhs);
1419 insns = get_insns ();
1420 end_sequence ();
1421 emit_insn_before (insns, store_insn->insn);
1423 if (dump_file)
1424 fprintf (dump_file, " -- adding move insn %d: r%d = r%d\n",
1425 INSN_UID (insns), REGNO (new_reg), REGNO (store_info->rhs));
1427 /* And now for the cludge part: cselib croaks if you just
1428 return at this point. There are two reasons for this:
1430 1) Cselib has an idea of how many pseudos there are and
1431 that does not include the new one we just added.
1433 2) Cselib does not know about the move insn we added
1434 above the store_info, and there is no way to tell it
1435 about it, because it has "moved on".
1437 So we are just going to have to lie. The move insn is
1438 not really an issue, cselib did not see it. But the use
1439 of the new pseudo read_insn is a real problem. The way
1440 that we solve this problem is that we are just going to
1441 put the mem back keep a table of mems to get rid of. At
1442 the end of the basic block we can put it back. */
1444 *loc = read_info->mem;
1445 deferred_change->next = deferred_change_list;
1446 deferred_change_list = deferred_change;
1447 deferred_change->loc = loc;
1448 deferred_change->reg = new_reg;
1450 /* Get rid of the read_info, from the point of view of the
1451 rest of dse, play like this read never happened. */
1452 read_insn->read_rec = read_info->next;
1453 pool_free (read_info_pool, read_info);
1454 return true;
1456 else
1458 if (dump_file)
1459 fprintf (dump_file, " -- validation failure\n");
1460 return false;
1463 else
1465 /* Someone with excellent rtl skills needs to fill this in. You
1466 are guaranteed that the read is of the same size or smaller
1467 than the store, and that the read does not hang off one of
1468 the ends of the store. But the offsets of each must be
1469 checked because the read does not have to line up on either
1470 end of the store so the begin fields need to be examined in
1471 both the store_info and read_info. */
1472 if (dump_file)
1473 fprintf (dump_file, " -- complex load, currently unsupported.\n");
1474 return false;
1479 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
1480 if LOC is a mem and if it is look at the address and kill any
1481 appropriate stores that may be active. */
1483 static int
1484 check_mem_read_rtx (rtx *loc, void *data)
1486 rtx mem = *loc;
1487 bb_info_t bb_info;
1488 insn_info_t insn_info;
1489 HOST_WIDE_INT offset = 0;
1490 HOST_WIDE_INT width = 0;
1491 alias_set_type spill_alias_set = 0;
1492 cselib_val *base = NULL;
1493 int group_id;
1494 read_info_t read_info;
1496 if (!mem || !MEM_P (mem))
1497 return 0;
1499 bb_info = (bb_info_t) data;
1500 insn_info = bb_info->last_insn;
1502 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
1503 || (MEM_VOLATILE_P (mem)))
1505 if (dump_file)
1506 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
1507 add_wild_read (bb_info);
1508 insn_info->cannot_delete = true;
1509 return 0;
1512 /* If it is reading readonly mem, then there can be no conflict with
1513 another write. */
1514 if (MEM_READONLY_P (mem))
1515 return 0;
1517 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1519 if (dump_file)
1520 fprintf (dump_file, " adding wild read, canon_address failure.\n");
1521 add_wild_read (bb_info);
1522 return 0;
1525 if (GET_MODE (mem) == BLKmode)
1526 width = -1;
1527 else
1528 width = GET_MODE_SIZE (GET_MODE (mem));
1530 read_info = pool_alloc (read_info_pool);
1531 read_info->group_id = group_id;
1532 read_info->mem = mem;
1533 read_info->alias_set = spill_alias_set;
1534 read_info->begin = offset;
1535 read_info->end = offset + width;
1536 read_info->next = insn_info->read_rec;
1537 insn_info->read_rec = read_info;
1539 /* We ignore the clobbers in store_info. The is mildly aggressive,
1540 but there really should not be a clobber followed by a read. */
1542 if (spill_alias_set)
1544 insn_info_t i_ptr = active_local_stores;
1545 insn_info_t last = NULL;
1547 if (dump_file)
1548 fprintf (dump_file, " processing spill load %d\n",
1549 (int) spill_alias_set);
1551 while (i_ptr)
1553 store_info_t store_info = i_ptr->store_rec;
1555 /* Skip the clobbers. */
1556 while (!store_info->is_set)
1557 store_info = store_info->next;
1559 if (store_info->alias_set == spill_alias_set)
1561 if (dump_file)
1562 dump_insn_info ("removing from active", i_ptr);
1564 if (last)
1565 last->next_local_store = i_ptr->next_local_store;
1566 else
1567 active_local_stores = i_ptr->next_local_store;
1569 else
1570 last = i_ptr;
1571 i_ptr = i_ptr->next_local_store;
1574 else if (group_id >= 0)
1576 /* This is the restricted case where the base is a constant or
1577 the frame pointer and offset is a constant. */
1578 insn_info_t i_ptr = active_local_stores;
1579 insn_info_t last = NULL;
1581 if (dump_file)
1583 if (width == -1)
1584 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
1585 group_id);
1586 else
1587 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
1588 group_id, (int)offset, (int)(offset+width));
1591 while (i_ptr)
1593 bool remove = false;
1594 store_info_t store_info = i_ptr->store_rec;
1596 /* Skip the clobbers. */
1597 while (!store_info->is_set)
1598 store_info = store_info->next;
1600 /* There are three cases here. */
1601 if (store_info->group_id < 0)
1602 /* We have a cselib store followed by a read from a
1603 const base. */
1604 remove
1605 = canon_true_dependence (store_info->mem,
1606 GET_MODE (store_info->mem),
1607 store_info->mem_addr,
1608 mem, rtx_varies_p);
1610 else if (group_id == store_info->group_id)
1612 /* This is a block mode load. We may get lucky and
1613 canon_true_dependence may save the day. */
1614 if (width == -1)
1615 remove
1616 = canon_true_dependence (store_info->mem,
1617 GET_MODE (store_info->mem),
1618 store_info->mem_addr,
1619 mem, rtx_varies_p);
1621 /* If this read is just reading back something that we just
1622 stored, rewrite the read. */
1623 else
1625 if (store_info->rhs
1626 && (offset >= store_info->begin)
1627 && (offset + width <= store_info->end))
1629 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1631 if ((store_info->positions_needed & mask) == mask
1632 && replace_read (store_info, i_ptr,
1633 read_info, insn_info, loc))
1634 return 0;
1636 /* The bases are the same, just see if the offsets
1637 overlap. */
1638 if ((offset < store_info->end)
1639 && (offset + width > store_info->begin))
1640 remove = true;
1644 /* else
1645 The else case that is missing here is that the
1646 bases are constant but different. There is nothing
1647 to do here because there is no overlap. */
1649 if (remove)
1651 if (dump_file)
1652 dump_insn_info ("removing from active", i_ptr);
1654 if (last)
1655 last->next_local_store = i_ptr->next_local_store;
1656 else
1657 active_local_stores = i_ptr->next_local_store;
1659 else
1660 last = i_ptr;
1661 i_ptr = i_ptr->next_local_store;
1664 else
1666 insn_info_t i_ptr = active_local_stores;
1667 insn_info_t last = NULL;
1668 if (dump_file)
1670 fprintf (dump_file, " processing cselib load mem:");
1671 print_inline_rtx (dump_file, mem, 0);
1672 fprintf (dump_file, "\n");
1675 while (i_ptr)
1677 bool remove = false;
1678 store_info_t store_info = i_ptr->store_rec;
1680 if (dump_file)
1681 fprintf (dump_file, " processing cselib load against insn %d\n",
1682 INSN_UID (i_ptr->insn));
1684 /* Skip the clobbers. */
1685 while (!store_info->is_set)
1686 store_info = store_info->next;
1688 /* If this read is just reading back something that we just
1689 stored, rewrite the read. */
1690 if (store_info->rhs
1691 && store_info->group_id == -1
1692 && store_info->cse_base == base
1693 && (offset >= store_info->begin)
1694 && (offset + width <= store_info->end))
1696 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1698 if ((store_info->positions_needed & mask) == mask
1699 && replace_read (store_info, i_ptr,
1700 read_info, insn_info, loc))
1701 return 0;
1704 if (!store_info->alias_set)
1705 remove = canon_true_dependence (store_info->mem,
1706 GET_MODE (store_info->mem),
1707 store_info->mem_addr,
1708 mem, rtx_varies_p);
1710 if (remove)
1712 if (dump_file)
1713 dump_insn_info ("removing from active", i_ptr);
1715 if (last)
1716 last->next_local_store = i_ptr->next_local_store;
1717 else
1718 active_local_stores = i_ptr->next_local_store;
1720 else
1721 last = i_ptr;
1722 i_ptr = i_ptr->next_local_store;
1725 return 0;
1728 /* A for_each_rtx callback in which DATA points the INSN_INFO for
1729 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
1730 true for any part of *LOC. */
1732 static void
1733 check_mem_read_use (rtx *loc, void *data)
1735 for_each_rtx (loc, check_mem_read_rtx, data);
1738 /* Apply record_store to all candidate stores in INSN. Mark INSN
1739 if some part of it is not a candidate store and assigns to a
1740 non-register target. */
1742 static void
1743 scan_insn (bb_info_t bb_info, rtx insn)
1745 rtx body;
1746 insn_info_t insn_info = pool_alloc (insn_info_pool);
1747 int mems_found = 0;
1748 memset (insn_info, 0, sizeof (struct insn_info));
1750 if (dump_file)
1751 fprintf (dump_file, "\n**scanning insn=%d\n",
1752 INSN_UID (insn));
1754 insn_info->prev_insn = bb_info->last_insn;
1755 insn_info->insn = insn;
1756 bb_info->last_insn = insn_info;
1759 /* Cselib clears the table for this case, so we have to essentially
1760 do the same. */
1761 if (NONJUMP_INSN_P (insn)
1762 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1763 && MEM_VOLATILE_P (PATTERN (insn)))
1765 add_wild_read (bb_info);
1766 insn_info->cannot_delete = true;
1767 return;
1770 /* Look at all of the uses in the insn. */
1771 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
1773 if (CALL_P (insn))
1775 insn_info->cannot_delete = true;
1776 /* Const functions cannot do anything bad i.e. read memory,
1777 however, they can read their parameters which may have been
1778 pushed onto the stack. */
1779 if (CONST_OR_PURE_CALL_P (insn) && !pure_call_p (insn))
1781 insn_info_t i_ptr = active_local_stores;
1782 insn_info_t last = NULL;
1784 if (dump_file)
1785 fprintf (dump_file, "const call %d\n", INSN_UID (insn));
1787 while (i_ptr)
1789 store_info_t store_info = i_ptr->store_rec;
1791 /* Skip the clobbers. */
1792 while (!store_info->is_set)
1793 store_info = store_info->next;
1795 /* Remove the frame related stores. */
1796 if (store_info->group_id >= 0
1797 && VEC_index (group_info_t, rtx_group_vec, store_info->group_id)->frame_related)
1799 if (dump_file)
1800 dump_insn_info ("removing from active", i_ptr);
1802 if (last)
1803 last->next_local_store = i_ptr->next_local_store;
1804 else
1805 active_local_stores = i_ptr->next_local_store;
1807 else
1808 last = i_ptr;
1809 i_ptr = i_ptr->next_local_store;
1812 insn_info->stack_read = true;
1814 return;
1817 /* Every other call, including pure functions may read memory. */
1818 add_wild_read (bb_info);
1819 return;
1822 /* Assuming that there are sets in these insns, we cannot delete
1823 them. */
1824 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
1825 || volatile_insn_p (PATTERN (insn))
1826 || (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
1827 || (RTX_FRAME_RELATED_P (insn))
1828 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
1829 insn_info->cannot_delete = true;
1831 body = PATTERN (insn);
1832 if (GET_CODE (body) == PARALLEL)
1834 int i;
1835 for (i = 0; i < XVECLEN (body, 0); i++)
1836 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
1838 else
1839 mems_found += record_store (body, bb_info);
1841 if (dump_file)
1842 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
1843 mems_found, insn_info->cannot_delete ? "true" : "false");
1845 /* If we found some sets of mems, and the insn has not been marked
1846 cannot delete, add it into the active_local_stores so that it can
1847 be locally deleted if found dead. Otherwise mark it as cannot
1848 delete. This simplifies the processing later. */
1849 if (mems_found == 1 && !insn_info->cannot_delete)
1851 insn_info->next_local_store = active_local_stores;
1852 active_local_stores = insn_info;
1854 else
1855 insn_info->cannot_delete = true;
1859 /* Remove BASE from the set of active_local_stores. This is a
1860 callback from cselib that is used to get rid of the stores in
1861 active_local_stores. */
1863 static void
1864 remove_useless_values (cselib_val *base)
1866 insn_info_t insn_info = active_local_stores;
1867 insn_info_t last = NULL;
1869 while (insn_info)
1871 store_info_t store_info = insn_info->store_rec;
1872 bool delete = false;
1874 /* If ANY of the store_infos match the cselib group that is
1875 being deleted, then the insn can not be deleted. */
1876 while (store_info)
1878 if ((store_info->group_id == -1)
1879 && (store_info->cse_base == base))
1881 delete = true;
1882 break;
1884 store_info = store_info->next;
1887 if (delete)
1889 if (last)
1890 last->next_local_store = insn_info->next_local_store;
1891 else
1892 active_local_stores = insn_info->next_local_store;
1893 free_store_info (insn_info);
1895 else
1896 last = insn_info;
1898 insn_info = insn_info->next_local_store;
1903 /* Do all of step 1. */
1905 static void
1906 dse_step1 (void)
1908 basic_block bb;
1910 cselib_init (false);
1911 all_blocks = BITMAP_ALLOC (NULL);
1912 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
1913 bitmap_set_bit (all_blocks, EXIT_BLOCK);
1915 FOR_ALL_BB (bb)
1917 insn_info_t ptr;
1918 bb_info_t bb_info = pool_alloc (bb_info_pool);
1920 memset (bb_info, 0, sizeof (struct bb_info));
1921 bitmap_set_bit (all_blocks, bb->index);
1923 bb_table[bb->index] = bb_info;
1924 cselib_discard_hook = remove_useless_values;
1926 if (bb->index >= NUM_FIXED_BLOCKS)
1928 rtx insn;
1930 cse_store_info_pool
1931 = create_alloc_pool ("cse_store_info_pool",
1932 sizeof (struct store_info), 100);
1933 active_local_stores = NULL;
1934 cselib_clear_table ();
1936 /* Scan the insns. */
1937 FOR_BB_INSNS (bb, insn)
1939 if (INSN_P (insn))
1940 scan_insn (bb_info, insn);
1941 cselib_process_insn (insn);
1944 /* This is something of a hack, because the global algorithm
1945 is supposed to take care of the case where stores go dead
1946 at the end of the function. However, the global
1947 algorithm must take a more conservative view of block
1948 mode reads than the local alg does. So to get the case
1949 where you have a store to the frame followed by a non
1950 overlapping block more read, we look at the active local
1951 stores at the end of the function and delete all of the
1952 frame and spill based ones. */
1953 if (stores_off_frame_dead_at_return
1954 && (EDGE_COUNT (bb->succs) == 0
1955 || (single_succ_p (bb)
1956 && single_succ (bb) == EXIT_BLOCK_PTR
1957 && ! current_function_calls_eh_return)))
1959 insn_info_t i_ptr = active_local_stores;
1960 while (i_ptr)
1962 store_info_t store_info = i_ptr->store_rec;
1964 /* Skip the clobbers. */
1965 while (!store_info->is_set)
1966 store_info = store_info->next;
1967 if (store_info->alias_set)
1968 delete_dead_store_insn (i_ptr);
1969 else
1970 if (store_info->group_id >= 0)
1972 group_info_t group
1973 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
1974 if (group->frame_related)
1975 delete_dead_store_insn (i_ptr);
1978 i_ptr = i_ptr->next_local_store;
1982 /* Get rid of the loads that were discovered in
1983 replace_read. Cselib is finished with this block. */
1984 while (deferred_change_list)
1986 deferred_change_t next = deferred_change_list->next;
1988 /* There is no reason to validate this change. That was
1989 done earlier. */
1990 *deferred_change_list->loc = deferred_change_list->reg;
1991 pool_free (deferred_change_pool, deferred_change_list);
1992 deferred_change_list = next;
1995 /* Get rid of all of the cselib based store_infos in this
1996 block and mark the containing insns as not being
1997 deletable. */
1998 ptr = bb_info->last_insn;
1999 while (ptr)
2001 if (ptr->contains_cselib_groups)
2002 free_store_info (ptr);
2003 ptr = ptr->prev_insn;
2006 free_alloc_pool (cse_store_info_pool);
2010 cselib_finish ();
2011 htab_empty (rtx_group_table);
2015 /*----------------------------------------------------------------------------
2016 Second step.
2018 Assign each byte position in the stores that we are going to
2019 analyze globally to a position in the bitmaps. Returns true if
2020 there are any bit positions assigned.
2021 ----------------------------------------------------------------------------*/
2023 static void
2024 dse_step2_init (void)
2026 unsigned int i;
2027 group_info_t group;
2029 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2031 /* For all non stack related bases, we only consider a store to
2032 be deletable if there are two or more stores for that
2033 position. This is because it takes one store to make the
2034 other store redundant. However, for the stores that are
2035 stack related, we consider them if there is only one store
2036 for the position. We do this because the stack related
2037 stores can be deleted if their is no read between them and
2038 the end of the function.
2040 To make this work in the current framework, we take the stack
2041 related bases add all of the bits from store1 into store2.
2042 This has the effect of making the eligible even if there is
2043 only one store. */
2045 if (stores_off_frame_dead_at_return && group->frame_related)
2047 bitmap_ior_into (group->store2_n, group->store1_n);
2048 bitmap_ior_into (group->store2_p, group->store1_p);
2049 if (dump_file)
2050 fprintf (dump_file, "group %d is frame related ", i);
2053 group->offset_map_size_n++;
2054 group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2055 group->offset_map_size_p++;
2056 group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2057 group->process_globally = false;
2058 if (dump_file)
2060 fprintf (dump_file, "group %d(%d+%d): ", i,
2061 (int)bitmap_count_bits (group->store2_n),
2062 (int)bitmap_count_bits (group->store2_p));
2063 bitmap_print (dump_file, group->store2_n, "n ", " ");
2064 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2070 /* Init the offset tables for the normal case. */
2072 static bool
2073 dse_step2_nospill (void)
2075 unsigned int i;
2076 group_info_t group;
2077 /* Position 0 is unused because 0 is used in the maps to mean
2078 unused. */
2079 current_position = 1;
2081 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2083 bitmap_iterator bi;
2084 unsigned int j;
2086 if (group == clear_alias_group)
2087 continue;
2089 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2090 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2091 bitmap_clear (group->group_kill);
2093 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2095 bitmap_set_bit (group->group_kill, current_position);
2096 group->offset_map_n[j] = current_position++;
2097 group->process_globally = true;
2099 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2101 bitmap_set_bit (group->group_kill, current_position);
2102 group->offset_map_p[j] = current_position++;
2103 group->process_globally = true;
2106 return current_position != 1;
2110 /* Init the offset tables for the spill case. */
2112 static bool
2113 dse_step2_spill (void)
2115 unsigned int j;
2116 group_info_t group = clear_alias_group;
2117 bitmap_iterator bi;
2119 /* Position 0 is unused because 0 is used in the maps to mean
2120 unused. */
2121 current_position = 1;
2123 if (dump_file)
2125 bitmap_print (dump_file, clear_alias_sets,
2126 "clear alias sets ", "\n");
2127 bitmap_print (dump_file, disqualified_clear_alias_sets,
2128 "disqualified clear alias sets ", "\n");
2131 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2132 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2133 bitmap_clear (group->group_kill);
2135 /* Remove the disqualified positions from the store2_p set. */
2136 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
2138 /* We do not need to process the store2_n set because
2139 alias_sets are always positive. */
2140 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2142 bitmap_set_bit (group->group_kill, current_position);
2143 group->offset_map_p[j] = current_position++;
2144 group->process_globally = true;
2147 return current_position != 1;
2152 /*----------------------------------------------------------------------------
2153 Third step.
2155 Build the bit vectors for the transfer functions.
2156 ----------------------------------------------------------------------------*/
2159 /* Note that this is NOT a general purpose function. Any mem that has
2160 an alias set registered here expected to be COMPLETELY unaliased:
2161 i.e it's addresses are not and need not be examined.
2163 It is known that all references to this address will have this
2164 alias set and there are NO other references to this address in the
2165 function.
2167 Currently the only place that is known to be clean enough to use
2168 this interface is the code that assigns the spill locations.
2170 All of the mems that have alias_sets registered are subjected to a
2171 very powerful form of dse where function calls, volatile reads and
2172 writes, and reads from random location are not taken into account.
2174 It is also assumed that these locations go dead when the function
2175 returns. This assumption could be relaxed if there were found to
2176 be places that this assumption was not correct.
2178 The MODE is passed in and saved. The mode of each load or store to
2179 a mem with ALIAS_SET is checked against MEM. If the size of that
2180 load or store is different from MODE, processing is halted on this
2181 alias set. For the vast majority of aliases sets, all of the loads
2182 and stores will use the same mode. But vectors are treated
2183 differently: the alias set is established for the entire vector,
2184 but reload will insert loads and stores for individual elements and
2185 we do not necessarily have the information to track those separate
2186 elements. So when we see a mode mismatch, we just bail. */
2189 void
2190 dse_record_singleton_alias_set (alias_set_type alias_set,
2191 enum machine_mode mode)
2193 struct clear_alias_mode_holder tmp_holder;
2194 struct clear_alias_mode_holder *entry;
2195 void **slot;
2197 /* If we are not going to run dse, we need to return now or there
2198 will be problems with allocating the bitmaps. */
2199 if ((!gate_dse()) || !alias_set)
2200 return;
2202 if (!clear_alias_sets)
2204 clear_alias_sets = BITMAP_ALLOC (NULL);
2205 disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
2206 clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
2207 clear_alias_mode_eq, NULL);
2208 clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
2209 sizeof (struct clear_alias_mode_holder), 100);
2212 bitmap_set_bit (clear_alias_sets, alias_set);
2214 tmp_holder.alias_set = alias_set;
2216 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
2217 gcc_assert (*slot == NULL);
2219 *slot = entry = pool_alloc (clear_alias_mode_pool);
2220 entry->alias_set = alias_set;
2221 entry->mode = mode;
2225 /* Remove ALIAS_SET from the sets of stack slots being considered. */
2227 void
2228 dse_invalidate_singleton_alias_set (alias_set_type alias_set)
2230 if ((!gate_dse()) || !alias_set)
2231 return;
2233 bitmap_clear_bit (clear_alias_sets, alias_set);
2237 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2238 there, return 0. */
2240 static int
2241 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2243 if (offset < 0)
2245 HOST_WIDE_INT offset_p = -offset;
2246 if (offset_p >= group_info->offset_map_size_n)
2247 return 0;
2248 return group_info->offset_map_n[offset_p];
2250 else
2252 if (offset >= group_info->offset_map_size_p)
2253 return 0;
2254 return group_info->offset_map_p[offset];
2259 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2260 may be NULL. */
2262 static void
2263 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
2265 while (store_info)
2267 HOST_WIDE_INT i;
2268 group_info_t group_info
2269 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2270 if (group_info->process_globally)
2271 for (i = store_info->begin; i < store_info->end; i++)
2273 int index = get_bitmap_index (group_info, i);
2274 if (index != 0)
2276 bitmap_set_bit (gen, index);
2277 if (kill)
2278 bitmap_clear_bit (kill, index);
2281 store_info = store_info->next;
2286 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2287 may be NULL. */
2289 static void
2290 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
2292 while (store_info)
2294 if (store_info->alias_set)
2296 int index = get_bitmap_index (clear_alias_group,
2297 store_info->alias_set);
2298 if (index != 0)
2300 bitmap_set_bit (gen, index);
2301 if (kill)
2302 bitmap_clear_bit (kill, index);
2305 store_info = store_info->next;
2310 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2311 may be NULL. */
2313 static void
2314 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
2316 read_info_t read_info = insn_info->read_rec;
2317 int i;
2318 group_info_t group;
2320 /* For const function calls kill the stack related stores. */
2321 if (insn_info->stack_read)
2323 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2324 if (group->process_globally && group->frame_related)
2326 if (kill)
2327 bitmap_ior_into (kill, group->group_kill);
2328 bitmap_and_compl_into (gen, group->group_kill);
2332 while (read_info)
2334 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2336 if (group->process_globally)
2338 if (i == read_info->group_id)
2340 if (read_info->begin > read_info->end)
2342 /* Begin > end for block mode reads. */
2343 if (kill)
2344 bitmap_ior_into (kill, group->group_kill);
2345 bitmap_and_compl_into (gen, group->group_kill);
2347 else
2349 /* The groups are the same, just process the
2350 offsets. */
2351 HOST_WIDE_INT j;
2352 for (j = read_info->begin; j < read_info->end; j++)
2354 int index = get_bitmap_index (group, j);
2355 if (index != 0)
2357 if (kill)
2358 bitmap_set_bit (kill, index);
2359 bitmap_clear_bit (gen, index);
2364 else
2366 /* The groups are different, if the alias sets
2367 conflict, clear the entire group. We only need
2368 to apply this test if the read_info is a cselib
2369 read. Anything with a constant base cannot alias
2370 something else with a different constant
2371 base. */
2372 if ((read_info->group_id < 0)
2373 && canon_true_dependence (group->base_mem,
2374 QImode,
2375 group->canon_base_mem,
2376 read_info->mem, rtx_varies_p))
2378 if (kill)
2379 bitmap_ior_into (kill, group->group_kill);
2380 bitmap_and_compl_into (gen, group->group_kill);
2386 read_info = read_info->next;
2390 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2391 may be NULL. */
2393 static void
2394 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
2396 while (read_info)
2398 if (read_info->alias_set)
2400 int index = get_bitmap_index (clear_alias_group,
2401 read_info->alias_set);
2402 if (index != 0)
2404 if (kill)
2405 bitmap_set_bit (kill, index);
2406 bitmap_clear_bit (gen, index);
2410 read_info = read_info->next;
2415 /* Return the insn in BB_INFO before the first wild read or if there
2416 are no wild reads in the block, return the last insn. */
2418 static insn_info_t
2419 find_insn_before_first_wild_read (bb_info_t bb_info)
2421 insn_info_t insn_info = bb_info->last_insn;
2422 insn_info_t last_wild_read = NULL;
2424 while (insn_info)
2426 if (insn_info->wild_read)
2428 last_wild_read = insn_info->prev_insn;
2429 /* Block starts with wild read. */
2430 if (!last_wild_read)
2431 return NULL;
2434 insn_info = insn_info->prev_insn;
2437 if (last_wild_read)
2438 return last_wild_read;
2439 else
2440 return bb_info->last_insn;
2444 /* Scan the insns in BB_INFO starting at PTR and going to the top of
2445 the block in order to build the gen and kill sets for the block.
2446 We start at ptr which may be the last insn in the block or may be
2447 the first insn with a wild read. In the latter case we are able to
2448 skip the rest of the block because it just does not matter:
2449 anything that happens is hidden by the wild read. */
2451 static void
2452 dse_step3_scan (bool for_spills, basic_block bb)
2454 bb_info_t bb_info = bb_table[bb->index];
2455 insn_info_t insn_info;
2457 if (for_spills)
2458 /* There are no wild reads in the spill case. */
2459 insn_info = bb_info->last_insn;
2460 else
2461 insn_info = find_insn_before_first_wild_read (bb_info);
2463 /* In the spill case or in the no_spill case if there is no wild
2464 read in the block, we will need a kill set. */
2465 if (insn_info == bb_info->last_insn)
2467 if (bb_info->kill)
2468 bitmap_clear (bb_info->kill);
2469 else
2470 bb_info->kill = BITMAP_ALLOC (NULL);
2472 else
2473 if (bb_info->kill)
2474 BITMAP_FREE (bb_info->kill);
2476 while (insn_info)
2478 /* There may have been code deleted by the dce pass run before
2479 this phase. */
2480 if (insn_info->insn && INSN_P (insn_info->insn))
2482 /* Process the read(s) last. */
2483 if (for_spills)
2485 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2486 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
2488 else
2490 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2491 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
2495 insn_info = insn_info->prev_insn;
2500 /* Set the gen set of the exit block, and also any block with no
2501 successors that does not have a wild read. */
2503 static void
2504 dse_step3_exit_block_scan (bb_info_t bb_info)
2506 /* The gen set is all 0's for the exit block except for the
2507 frame_pointer_group. */
2509 if (stores_off_frame_dead_at_return)
2511 unsigned int i;
2512 group_info_t group;
2514 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2516 if (group->process_globally && group->frame_related)
2517 bitmap_ior_into (bb_info->gen, group->group_kill);
2523 /* Find all of the blocks that are not backwards reachable from the
2524 exit block or any block with no successors (BB). These are the
2525 infinite loops or infinite self loops. These blocks will still
2526 have their bits set in UNREACHABLE_BLOCKS. */
2528 static void
2529 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
2531 edge e;
2532 edge_iterator ei;
2534 if (TEST_BIT (unreachable_blocks, bb->index))
2536 RESET_BIT (unreachable_blocks, bb->index);
2537 FOR_EACH_EDGE (e, ei, bb->preds)
2539 mark_reachable_blocks (unreachable_blocks, e->src);
2544 /* Build the transfer functions for the function. */
2546 static void
2547 dse_step3 (bool for_spills)
2549 basic_block bb;
2550 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
2551 sbitmap_iterator sbi;
2552 bitmap all_ones = NULL;
2553 unsigned int i;
2555 sbitmap_ones (unreachable_blocks);
2557 FOR_ALL_BB (bb)
2559 bb_info_t bb_info = bb_table[bb->index];
2560 if (bb_info->gen)
2561 bitmap_clear (bb_info->gen);
2562 else
2563 bb_info->gen = BITMAP_ALLOC (NULL);
2565 if (bb->index == ENTRY_BLOCK)
2567 else if (bb->index == EXIT_BLOCK)
2568 dse_step3_exit_block_scan (bb_info);
2569 else
2570 dse_step3_scan (for_spills, bb);
2571 if (EDGE_COUNT (bb->succs) == 0)
2572 mark_reachable_blocks (unreachable_blocks, bb);
2574 /* If this is the second time dataflow is run, delete the old
2575 sets. */
2576 if (bb_info->in)
2577 BITMAP_FREE (bb_info->in);
2578 if (bb_info->out)
2579 BITMAP_FREE (bb_info->out);
2582 /* For any block in an infinite loop, we must initialize the out set
2583 to all ones. This could be expensive, but almost never occurs in
2584 practice. However, it is common in regression tests. */
2585 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
2587 if (bitmap_bit_p (all_blocks, i))
2589 bb_info_t bb_info = bb_table[i];
2590 if (!all_ones)
2592 unsigned int j;
2593 group_info_t group;
2595 all_ones = BITMAP_ALLOC (NULL);
2596 for (j = 0; VEC_iterate (group_info_t, rtx_group_vec, j, group); j++)
2597 bitmap_ior_into (all_ones, group->group_kill);
2599 if (!bb_info->out)
2601 bb_info->out = BITMAP_ALLOC (NULL);
2602 bitmap_copy (bb_info->out, all_ones);
2607 if (all_ones)
2608 BITMAP_FREE (all_ones);
2609 sbitmap_free (unreachable_blocks);
2614 /*----------------------------------------------------------------------------
2615 Fourth step.
2617 Solve the bitvector equations.
2618 ----------------------------------------------------------------------------*/
2621 /* Confluence function for blocks with no successors. Create an out
2622 set from the gen set of the exit block. This block logically has
2623 the exit block as a successor. */
2627 static void
2628 dse_confluence_0 (basic_block bb)
2630 bb_info_t bb_info = bb_table[bb->index];
2632 if (bb->index == EXIT_BLOCK)
2633 return;
2635 if (!bb_info->out)
2637 bb_info->out = BITMAP_ALLOC (NULL);
2638 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
2642 /* Propagate the information from the in set of the dest of E to the
2643 out set of the src of E. If the various in or out sets are not
2644 there, that means they are all ones. */
2646 static void
2647 dse_confluence_n (edge e)
2649 bb_info_t src_info = bb_table[e->src->index];
2650 bb_info_t dest_info = bb_table[e->dest->index];
2652 if (dest_info->in)
2654 if (src_info->out)
2655 bitmap_and_into (src_info->out, dest_info->in);
2656 else
2658 src_info->out = BITMAP_ALLOC (NULL);
2659 bitmap_copy (src_info->out, dest_info->in);
2665 /* Propagate the info from the out to the in set of BB_INDEX's basic
2666 block. There are three cases:
2668 1) The block has no kill set. In this case the kill set is all
2669 ones. It does not matter what the out set of the block is, none of
2670 the info can reach the top. The only thing that reaches the top is
2671 the gen set and we just copy the set.
2673 2) There is a kill set but no out set and bb has successors. In
2674 this case we just return. Eventually an out set will be created and
2675 it is better to wait than to create a set of ones.
2677 3) There is both a kill and out set. We apply the obvious transfer
2678 function.
2681 static bool
2682 dse_transfer_function (int bb_index)
2684 bb_info_t bb_info = bb_table[bb_index];
2686 if (bb_info->kill)
2688 if (bb_info->out)
2690 /* Case 3 above. */
2691 if (bb_info->in)
2692 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2693 bb_info->out, bb_info->kill);
2694 else
2696 bb_info->in = BITMAP_ALLOC (NULL);
2697 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2698 bb_info->out, bb_info->kill);
2699 return true;
2702 else
2703 /* Case 2 above. */
2704 return false;
2706 else
2708 /* Case 1 above. If there is already an in set, nothing
2709 happens. */
2710 if (bb_info->in)
2711 return false;
2712 else
2714 bb_info->in = BITMAP_ALLOC (NULL);
2715 bitmap_copy (bb_info->in, bb_info->gen);
2716 return true;
2721 /* Solve the dataflow equations. */
2723 static void
2724 dse_step4 (void)
2726 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
2727 dse_confluence_n, dse_transfer_function,
2728 all_blocks, df_get_postorder (DF_BACKWARD),
2729 df_get_n_blocks (DF_BACKWARD));
2730 if (dump_file)
2732 basic_block bb;
2734 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
2735 FOR_ALL_BB (bb)
2737 bb_info_t bb_info = bb_table[bb->index];
2739 df_print_bb_index (bb, dump_file);
2740 if (bb_info->in)
2741 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
2742 else
2743 fprintf (dump_file, " in: *MISSING*\n");
2744 if (bb_info->gen)
2745 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
2746 else
2747 fprintf (dump_file, " gen: *MISSING*\n");
2748 if (bb_info->kill)
2749 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
2750 else
2751 fprintf (dump_file, " kill: *MISSING*\n");
2752 if (bb_info->out)
2753 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
2754 else
2755 fprintf (dump_file, " out: *MISSING*\n\n");
2762 /*----------------------------------------------------------------------------
2763 Fifth step.
2765 Delete the stores that can only be deleted using the global information.
2766 ----------------------------------------------------------------------------*/
2769 static void
2770 dse_step5_nospill (void)
2772 basic_block bb;
2773 FOR_EACH_BB (bb)
2775 bb_info_t bb_info = bb_table[bb->index];
2776 insn_info_t insn_info = bb_info->last_insn;
2777 bitmap v = bb_info->out;
2779 while (insn_info)
2781 bool deleted = false;
2782 if (dump_file && insn_info->insn)
2784 fprintf (dump_file, "starting to process insn %d\n",
2785 INSN_UID (insn_info->insn));
2786 bitmap_print (dump_file, v, " v: ", "\n");
2789 /* There may have been code deleted by the dce pass run before
2790 this phase. */
2791 if (insn_info->insn
2792 && INSN_P (insn_info->insn)
2793 && (!insn_info->cannot_delete)
2794 && (!bitmap_empty_p (v)))
2796 store_info_t store_info = insn_info->store_rec;
2798 /* Try to delete the current insn. */
2799 deleted = true;
2801 /* Skip the clobbers. */
2802 while (!store_info->is_set)
2803 store_info = store_info->next;
2805 if (store_info->alias_set)
2806 deleted = false;
2807 else
2809 HOST_WIDE_INT i;
2810 group_info_t group_info
2811 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2813 for (i = store_info->begin; i < store_info->end; i++)
2815 int index = get_bitmap_index (group_info, i);
2817 if (dump_file)
2818 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
2819 if (index == 0 || !bitmap_bit_p (v, index))
2821 if (dump_file)
2822 fprintf (dump_file, "failing at i = %d\n", (int)i);
2823 deleted = false;
2824 break;
2828 if (deleted)
2830 if (dbg_cnt (dse))
2832 check_for_inc_dec (insn_info->insn);
2833 delete_insn (insn_info->insn);
2834 insn_info->insn = NULL;
2835 globally_deleted++;
2839 /* We do want to process the local info if the insn was
2840 deleted. For instance, if the insn did a wild read, we
2841 no longer need to trash the info. */
2842 if (insn_info->insn
2843 && INSN_P (insn_info->insn)
2844 && (!deleted))
2846 scan_stores_nospill (insn_info->store_rec, v, NULL);
2847 if (insn_info->wild_read)
2849 if (dump_file)
2850 fprintf (dump_file, "wild read\n");
2851 bitmap_clear (v);
2853 else if (insn_info->read_rec)
2855 if (dump_file)
2856 fprintf (dump_file, "regular read\n");
2857 scan_reads_nospill (insn_info, v, NULL);
2861 insn_info = insn_info->prev_insn;
2867 static void
2868 dse_step5_spill (void)
2870 basic_block bb;
2871 FOR_EACH_BB (bb)
2873 bb_info_t bb_info = bb_table[bb->index];
2874 insn_info_t insn_info = bb_info->last_insn;
2875 bitmap v = bb_info->out;
2877 while (insn_info)
2879 bool deleted = false;
2880 /* There may have been code deleted by the dce pass run before
2881 this phase. */
2882 if (insn_info->insn
2883 && INSN_P (insn_info->insn)
2884 && (!insn_info->cannot_delete)
2885 && (!bitmap_empty_p (v)))
2887 /* Try to delete the current insn. */
2888 store_info_t store_info = insn_info->store_rec;
2889 deleted = true;
2891 while (store_info)
2893 if (store_info->alias_set)
2895 int index = get_bitmap_index (clear_alias_group,
2896 store_info->alias_set);
2897 if (index == 0 || !bitmap_bit_p (v, index))
2899 deleted = false;
2900 break;
2903 else
2904 deleted = false;
2905 store_info = store_info->next;
2907 if (deleted && dbg_cnt (dse))
2909 if (dump_file)
2910 fprintf (dump_file, "Spill deleting insn %d\n",
2911 INSN_UID (insn_info->insn));
2912 check_for_inc_dec (insn_info->insn);
2913 delete_insn (insn_info->insn);
2914 spill_deleted++;
2915 insn_info->insn = NULL;
2919 if (insn_info->insn
2920 && INSN_P (insn_info->insn)
2921 && (!deleted))
2923 scan_stores_spill (insn_info->store_rec, v, NULL);
2924 scan_reads_spill (insn_info->read_rec, v, NULL);
2927 insn_info = insn_info->prev_insn;
2934 /*----------------------------------------------------------------------------
2935 Sixth step.
2937 Destroy everything left standing.
2938 ----------------------------------------------------------------------------*/
2940 static void
2941 dse_step6 (bool global_done)
2943 unsigned int i;
2944 group_info_t group;
2945 basic_block bb;
2947 if (global_done)
2949 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2951 free (group->offset_map_n);
2952 free (group->offset_map_p);
2953 BITMAP_FREE (group->store1_n);
2954 BITMAP_FREE (group->store1_p);
2955 BITMAP_FREE (group->store2_n);
2956 BITMAP_FREE (group->store2_p);
2957 BITMAP_FREE (group->group_kill);
2960 FOR_ALL_BB (bb)
2962 bb_info_t bb_info = bb_table[bb->index];
2963 BITMAP_FREE (bb_info->gen);
2964 if (bb_info->kill)
2965 BITMAP_FREE (bb_info->kill);
2966 if (bb_info->in)
2967 BITMAP_FREE (bb_info->in);
2968 if (bb_info->out)
2969 BITMAP_FREE (bb_info->out);
2972 else
2974 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2976 BITMAP_FREE (group->store1_n);
2977 BITMAP_FREE (group->store1_p);
2978 BITMAP_FREE (group->store2_n);
2979 BITMAP_FREE (group->store2_p);
2980 BITMAP_FREE (group->group_kill);
2984 if (clear_alias_sets)
2986 BITMAP_FREE (clear_alias_sets);
2987 BITMAP_FREE (disqualified_clear_alias_sets);
2988 free_alloc_pool (clear_alias_mode_pool);
2989 htab_delete (clear_alias_mode_table);
2992 end_alias_analysis ();
2993 free (bb_table);
2994 htab_delete (rtx_group_table);
2995 VEC_free (group_info_t, heap, rtx_group_vec);
2996 BITMAP_FREE (all_blocks);
2997 BITMAP_FREE (scratch);
2999 free_alloc_pool (rtx_store_info_pool);
3000 free_alloc_pool (read_info_pool);
3001 free_alloc_pool (insn_info_pool);
3002 free_alloc_pool (bb_info_pool);
3003 free_alloc_pool (rtx_group_info_pool);
3004 free_alloc_pool (deferred_change_pool);
3009 /* -------------------------------------------------------------------------
3011 ------------------------------------------------------------------------- */
3013 /* Callback for running pass_rtl_dse. */
3015 static unsigned int
3016 rest_of_handle_dse (void)
3018 bool did_global = false;
3020 df_set_flags (DF_DEFER_INSN_RESCAN);
3022 dse_step0 ();
3023 dse_step1 ();
3024 dse_step2_init ();
3025 if (dse_step2_nospill ())
3027 df_set_flags (DF_LR_RUN_DCE);
3028 df_analyze ();
3029 did_global = true;
3030 if (dump_file)
3031 fprintf (dump_file, "doing global processing\n");
3032 dse_step3 (false);
3033 dse_step4 ();
3034 dse_step5_nospill ();
3037 /* For the instance of dse that runs after reload, we make a special
3038 pass to process the spills. These are special in that they are
3039 totally transparent, i.e, there is no aliasing issues that need
3040 to be considered. This means that the wild reads that kill
3041 everything else do not apply here. */
3042 if (clear_alias_sets && dse_step2_spill ())
3044 if (!did_global)
3046 df_set_flags (DF_LR_RUN_DCE);
3047 df_analyze ();
3049 did_global = true;
3050 if (dump_file)
3051 fprintf (dump_file, "doing global spill processing\n");
3052 dse_step3 (true);
3053 dse_step4 ();
3054 dse_step5_spill ();
3057 dse_step6 (did_global);
3059 if (dump_file)
3060 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3061 locally_deleted, globally_deleted, spill_deleted);
3062 return 0;
3065 static bool
3066 gate_dse (void)
3068 return optimize > 0 && flag_dse;
3071 struct tree_opt_pass pass_rtl_dse1 =
3073 "dse1", /* name */
3074 gate_dse, /* gate */
3075 rest_of_handle_dse, /* execute */
3076 NULL, /* sub */
3077 NULL, /* next */
3078 0, /* static_pass_number */
3079 TV_DSE1, /* tv_id */
3080 0, /* properties_required */
3081 0, /* properties_provided */
3082 0, /* properties_destroyed */
3083 0, /* todo_flags_start */
3084 TODO_dump_func |
3085 TODO_df_finish |
3086 TODO_ggc_collect, /* todo_flags_finish */
3087 'w' /* letter */
3090 struct tree_opt_pass pass_rtl_dse2 =
3092 "dse2", /* name */
3093 gate_dse, /* gate */
3094 rest_of_handle_dse, /* execute */
3095 NULL, /* sub */
3096 NULL, /* next */
3097 0, /* static_pass_number */
3098 TV_DSE2, /* tv_id */
3099 0, /* properties_required */
3100 0, /* properties_provided */
3101 0, /* properties_destroyed */
3102 0, /* todo_flags_start */
3103 TODO_dump_func |
3104 TODO_df_finish |
3105 TODO_ggc_collect, /* todo_flags_finish */
3106 'w' /* letter */