gcc/
[official-gcc.git] / gcc / dse.c
blob696b2d84ff31b3ae4df506f41b9386a7294fa86c
1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hashtab.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "flags.h"
36 #include "df.h"
37 #include "cselib.h"
38 #include "timevar.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "alias.h"
42 #include "insn-config.h"
43 #include "expr.h"
44 #include "recog.h"
45 #include "dse.h"
46 #include "optabs.h"
47 #include "dbgcnt.h"
49 /* This file contains three techniques for performing Dead Store
50 Elimination (dse).
52 * The first technique performs dse locally on any base address. It
53 is based on the cselib which is a local value numbering technique.
54 This technique is local to a basic block but deals with a fairly
55 general addresses.
57 * The second technique performs dse globally but is restricted to
58 base addresses that are either constant or are relative to the
59 frame_pointer.
61 * The third technique, (which is only done after register allocation)
62 processes the spill spill slots. This differs from the second
63 technique because it takes advantage of the fact that spilling is
64 completely free from the effects of aliasing.
66 Logically, dse is a backwards dataflow problem. A store can be
67 deleted if it if cannot be reached in the backward direction by any
68 use of the value being stored. However, the local technique uses a
69 forwards scan of the basic block because cselib requires that the
70 block be processed in that order.
72 The pass is logically broken into 7 steps:
74 0) Initialization.
76 1) The local algorithm, as well as scanning the insns for the two
77 global algorithms.
79 2) Analysis to see if the global algs are necessary. In the case
80 of stores base on a constant address, there must be at least two
81 stores to that address, to make it possible to delete some of the
82 stores. In the case of stores off of the frame or spill related
83 stores, only one store to an address is necessary because those
84 stores die at the end of the function.
86 3) Set up the global dataflow equations based on processing the
87 info parsed in the first step.
89 4) Solve the dataflow equations.
91 5) Delete the insns that the global analysis has indicated are
92 unnecessary.
94 6) Cleanup.
96 This step uses cselib and canon_rtx to build the largest expression
97 possible for each address. This pass is a forwards pass through
98 each basic block. From the point of view of the global technique,
99 the first pass could examine a block in either direction. The
100 forwards ordering is to accommodate cselib.
102 We a simplifying assumption: addresses fall into four broad
103 categories:
105 1) base has rtx_varies_p == false, offset is constant.
106 2) base has rtx_varies_p == false, offset variable.
107 3) base has rtx_varies_p == true, offset constant.
108 4) base has rtx_varies_p == true, offset variable.
110 The local passes are able to process all 4 kinds of addresses. The
111 global pass only handles (1).
113 The global problem is formulated as follows:
115 A store, S1, to address A, where A is not relative to the stack
116 frame, can be eliminated if all paths from S1 to the end of the
117 of the function contain another store to A before a read to A.
119 If the address A is relative to the stack frame, a store S2 to A
120 can be eliminated if there are no paths from S1 that reach the
121 end of the function that read A before another store to A. In
122 this case S2 can be deleted if there are paths to from S2 to the
123 end of the function that have no reads or writes to A. This
124 second case allows stores to the stack frame to be deleted that
125 would otherwise die when the function returns. This cannot be
126 done if stores_off_frame_dead_at_return is not true. See the doc
127 for that variable for when this variable is false.
129 The global problem is formulated as a backwards set union
130 dataflow problem where the stores are the gens and reads are the
131 kills. Set union problems are rare and require some special
132 handling given our representation of bitmaps. A straightforward
133 implementation of requires a lot of bitmaps filled with 1s.
134 These are expensive and cumbersome in our bitmap formulation so
135 care has been taken to avoid large vectors filled with 1s. See
136 the comments in bb_info and in the dataflow confluence functions
137 for details.
139 There are two places for further enhancements to this algorithm:
141 1) The original dse which was embedded in a pass called flow also
142 did local address forwarding. For example in
144 A <- r100
145 ... <- A
147 flow would replace the right hand side of the second insn with a
148 reference to r100. Most of the information is available to add this
149 to this pass. It has not done it because it is a lot of work in
150 the case that either r100 is assigned to between the first and
151 second insn and/or the second insn is a load of part of the value
152 stored by the first insn.
154 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
155 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
156 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
157 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
159 2) The cleaning up of spill code is quite profitable. It currently
160 depends on reading tea leaves and chicken entrails left by reload.
161 This pass depends on reload creating a singleton alias set for each
162 spill slot and telling the next dse pass which of these alias sets
163 are the singletons. Rather than analyze the addresses of the
164 spills, dse's spill processing just does analysis of the loads and
165 stores that use those alias sets. There are three cases where this
166 falls short:
168 a) Reload sometimes creates the slot for one mode of access, and
169 then inserts loads and/or stores for a smaller mode. In this
170 case, the current code just punts on the slot. The proper thing
171 to do is to back out and use one bit vector position for each
172 byte of the entity associated with the slot. This depends on
173 KNOWING that reload always generates the accesses for each of the
174 bytes in some canonical (read that easy to understand several
175 passes after reload happens) way.
177 b) Reload sometimes decides that spill slot it allocated was not
178 large enough for the mode and goes back and allocates more slots
179 with the same mode and alias set. The backout in this case is a
180 little more graceful than (a). In this case the slot is unmarked
181 as being a spill slot and if final address comes out to be based
182 off the frame pointer, the global algorithm handles this slot.
184 c) For any pass that may prespill, there is currently no
185 mechanism to tell the dse pass that the slot being used has the
186 special properties that reload uses. It may be that all that is
187 required is to have those passes make the same calls that reload
188 does, assuming that the alias sets can be manipulated in the same
189 way. */
191 /* There are limits to the size of constant offsets we model for the
192 global problem. There are certainly test cases, that exceed this
193 limit, however, it is unlikely that there are important programs
194 that really have constant offsets this size. */
195 #define MAX_OFFSET (64 * 1024)
198 static bitmap scratch = NULL;
199 struct insn_info;
201 /* This structure holds information about a candidate store. */
202 struct store_info
205 /* False means this is a clobber. */
206 bool is_set;
208 /* The id of the mem group of the base address. If rtx_varies_p is
209 true, this is -1. Otherwise, it is the index into the group
210 table. */
211 int group_id;
213 /* This is the cselib value. */
214 cselib_val *cse_base;
216 /* This canonized mem. */
217 rtx mem;
219 /* The result of get_addr on mem. */
220 rtx mem_addr;
222 /* If this is non-zero, it is the alias set of a spill location. */
223 alias_set_type alias_set;
225 /* The offset of the first and byte before the last byte associated
226 with the operation. */
227 int begin, end;
229 /* An bitmask as wide as the number of bytes in the word that
230 contains a 1 if the byte may be needed. The store is unused if
231 all of the bits are 0. */
232 long positions_needed;
234 /* The next store info for this insn. */
235 struct store_info *next;
237 /* The right hand side of the store. This is used if there is a
238 subsequent reload of the mems address somewhere later in the
239 basic block. */
240 rtx rhs;
243 typedef struct store_info *store_info_t;
244 static alloc_pool cse_store_info_pool;
245 static alloc_pool rtx_store_info_pool;
247 /* This structure holds information about a load. These are only
248 built for rtx bases. */
249 struct read_info
251 /* The id of the mem group of the base address. */
252 int group_id;
254 /* If this is non-zero, it is the alias set of a spill location. */
255 alias_set_type alias_set;
257 /* The offset of the first and byte after the last byte associated
258 with the operation. If begin == end == 0, the read did not have
259 a constant offset. */
260 int begin, end;
262 /* The mem being read. */
263 rtx mem;
265 /* The next read_info for this insn. */
266 struct read_info *next;
268 typedef struct read_info *read_info_t;
269 static alloc_pool read_info_pool;
272 /* One of these records is created for each insn. */
274 struct insn_info
276 /* Set true if the insn contains a store but the insn itself cannot
277 be deleted. This is set if the insn is a parallel and there is
278 more than one non dead output or if the insn is in some way
279 volatile. */
280 bool cannot_delete;
282 /* This field is only used by the global algorithm. It is set true
283 if the insn contains any read of mem except for a (1). This is
284 also set if the insn is a call or has a clobber mem. If the insn
285 contains a wild read, the use_rec will be null. */
286 bool wild_read;
288 /* This field is only used for the processing of const functions.
289 These functions cannot read memory, but they can read the stack
290 because that is where they may get their parms. We need to be
291 this conservative because, like the store motion pass, we don't
292 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
293 Moreover, we need to distinguish two cases:
294 1. Before reload (register elimination), the stores related to
295 outgoing arguments are stack pointer based and thus deemed
296 of non-constant base in this pass. This requires special
297 handling but also means that the frame pointer based stores
298 need not be killed upon encountering a const function call.
299 2. After reload, the stores related to outgoing arguments can be
300 either stack pointer or hard frame pointer based. This means
301 that we have no other choice than also killing all the frame
302 pointer based stores upon encountering a const function call.
303 This field is set after reload for const function calls. Having
304 this set is less severe than a wild read, it just means that all
305 the frame related stores are killed rather than all the stores. */
306 bool frame_read;
308 /* This field is only used for the processing of const functions.
309 It is set if the insn may contain a stack pointer based store. */
310 bool stack_pointer_based;
312 /* This is true if any of the sets within the store contains a
313 cselib base. Such stores can only be deleted by the local
314 algorithm. */
315 bool contains_cselib_groups;
317 /* The insn. */
318 rtx insn;
320 /* The list of mem sets or mem clobbers that are contained in this
321 insn. If the insn is deletable, it contains only one mem set.
322 But it could also contain clobbers. Insns that contain more than
323 one mem set are not deletable, but each of those mems are here in
324 order to provide info to delete other insns. */
325 store_info_t store_rec;
327 /* The linked list of mem uses in this insn. Only the reads from
328 rtx bases are listed here. The reads to cselib bases are
329 completely processed during the first scan and so are never
330 created. */
331 read_info_t read_rec;
333 /* The prev insn in the basic block. */
334 struct insn_info * prev_insn;
336 /* The linked list of insns that are in consideration for removal in
337 the forwards pass thru the basic block. This pointer may be
338 trash as it is not cleared when a wild read occurs. The only
339 time it is guaranteed to be correct is when the traveral starts
340 at active_local_stores. */
341 struct insn_info * next_local_store;
344 typedef struct insn_info *insn_info_t;
345 static alloc_pool insn_info_pool;
347 /* The linked list of stores that are under consideration in this
348 basic block. */
349 static insn_info_t active_local_stores;
351 struct bb_info
354 /* Pointer to the insn info for the last insn in the block. These
355 are linked so this is how all of the insns are reached. During
356 scanning this is the current insn being scanned. */
357 insn_info_t last_insn;
359 /* The info for the global dataflow problem. */
362 /* This is set if the transfer function should and in the wild_read
363 bitmap before applying the kill and gen sets. That vector knocks
364 out most of the bits in the bitmap and thus speeds up the
365 operations. */
366 bool apply_wild_read;
368 /* The set of store positions that exist in this block before a wild read. */
369 bitmap gen;
371 /* The set of load positions that exist in this block above the
372 same position of a store. */
373 bitmap kill;
375 /* The set of stores that reach the top of the block without being
376 killed by a read.
378 Do not represent the in if it is all ones. Note that this is
379 what the bitvector should logically be initialized to for a set
380 intersection problem. However, like the kill set, this is too
381 expensive. So initially, the in set will only be created for the
382 exit block and any block that contains a wild read. */
383 bitmap in;
385 /* The set of stores that reach the bottom of the block from it's
386 successors.
388 Do not represent the in if it is all ones. Note that this is
389 what the bitvector should logically be initialized to for a set
390 intersection problem. However, like the kill and in set, this is
391 too expensive. So what is done is that the confluence operator
392 just initializes the vector from one of the out sets of the
393 successors of the block. */
394 bitmap out;
397 typedef struct bb_info *bb_info_t;
398 static alloc_pool bb_info_pool;
400 /* Table to hold all bb_infos. */
401 static bb_info_t *bb_table;
403 /* There is a group_info for each rtx base that is used to reference
404 memory. There are also not many of the rtx bases because they are
405 very limited in scope. */
407 struct group_info
409 /* The actual base of the address. */
410 rtx rtx_base;
412 /* The sequential id of the base. This allows us to have a
413 canonical ordering of these that is not based on addresses. */
414 int id;
416 /* A mem wrapped around the base pointer for the group in order to
417 do read dependency. */
418 rtx base_mem;
420 /* Canonized version of base_mem, most likely the same thing. */
421 rtx canon_base_mem;
423 /* These two sets of two bitmaps are used to keep track of how many
424 stores are actually referencing that position from this base. We
425 only do this for rtx bases as this will be used to assign
426 positions in the bitmaps for the global problem. Bit N is set in
427 store1 on the first store for offset N. Bit N is set in store2
428 for the second store to offset N. This is all we need since we
429 only care about offsets that have two or more stores for them.
431 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
432 for 0 and greater offsets.
434 There is one special case here, for stores into the stack frame,
435 we will or store1 into store2 before deciding which stores look
436 at globally. This is because stores to the stack frame that have
437 no other reads before the end of the function can also be
438 deleted. */
439 bitmap store1_n, store1_p, store2_n, store2_p;
441 /* The positions in this bitmap have the same assignments as the in,
442 out, gen and kill bitmaps. This bitmap is all zeros except for
443 the positions that are occupied by stores for this group. */
444 bitmap group_kill;
446 /* True if there are any positions that are to be processed
447 globally. */
448 bool process_globally;
450 /* True if the base of this group is either the frame_pointer or
451 hard_frame_pointer. */
452 bool frame_related;
454 /* The offset_map is used to map the offsets from this base into
455 positions in the global bitmaps. It is only created after all of
456 the all of stores have been scanned and we know which ones we
457 care about. */
458 int *offset_map_n, *offset_map_p;
459 int offset_map_size_n, offset_map_size_p;
461 typedef struct group_info *group_info_t;
462 typedef const struct group_info *const_group_info_t;
463 static alloc_pool rtx_group_info_pool;
465 /* Tables of group_info structures, hashed by base value. */
466 static htab_t rtx_group_table;
468 /* Index into the rtx_group_vec. */
469 static int rtx_group_next_id;
471 DEF_VEC_P(group_info_t);
472 DEF_VEC_ALLOC_P(group_info_t,heap);
474 static VEC(group_info_t,heap) *rtx_group_vec;
477 /* This structure holds the set of changes that are being deferred
478 when removing read operation. See replace_read. */
479 struct deferred_change
482 /* The mem that is being replaced. */
483 rtx *loc;
485 /* The reg it is being replaced with. */
486 rtx reg;
488 struct deferred_change *next;
491 typedef struct deferred_change *deferred_change_t;
492 static alloc_pool deferred_change_pool;
494 static deferred_change_t deferred_change_list = NULL;
496 /* This are used to hold the alias sets of spill variables. Since
497 these are never aliased and there may be a lot of them, it makes
498 sense to treat them specially. This bitvector is only allocated in
499 calls from dse_record_singleton_alias_set which currently is only
500 made during reload1. So when dse is called before reload this
501 mechanism does nothing. */
503 static bitmap clear_alias_sets = NULL;
505 /* The set of clear_alias_sets that have been disqualified because
506 there are loads or stores using a different mode than the alias set
507 was registered with. */
508 static bitmap disqualified_clear_alias_sets = NULL;
510 /* The group that holds all of the clear_alias_sets. */
511 static group_info_t clear_alias_group;
513 /* The modes of the clear_alias_sets. */
514 static htab_t clear_alias_mode_table;
516 /* Hash table element to look up the mode for an alias set. */
517 struct clear_alias_mode_holder
519 alias_set_type alias_set;
520 enum machine_mode mode;
523 static alloc_pool clear_alias_mode_pool;
525 /* This is true except for two cases:
526 (1) current_function_stdarg -- i.e. we cannot do this
527 for vararg functions because they play games with the frame.
528 (2) In ada, it is sometimes not safe to do assume that any stores
529 based off the stack frame go dead at the exit to a function. */
530 static bool stores_off_frame_dead_at_return;
532 /* Counter for stats. */
533 static int globally_deleted;
534 static int locally_deleted;
535 static int spill_deleted;
537 static bitmap all_blocks;
539 /* The number of bits used in the global bitmaps. */
540 static unsigned int current_position;
543 static bool gate_dse (void);
544 static bool gate_dse1 (void);
545 static bool gate_dse2 (void);
548 /*----------------------------------------------------------------------------
549 Zeroth step.
551 Initialization.
552 ----------------------------------------------------------------------------*/
554 /* Hashtable callbacks for maintaining the "bases" field of
555 store_group_info, given that the addresses are function invariants. */
557 static int
558 clear_alias_mode_eq (const void *p1, const void *p2)
560 const struct clear_alias_mode_holder * h1
561 = (const struct clear_alias_mode_holder *) p1;
562 const struct clear_alias_mode_holder * h2
563 = (const struct clear_alias_mode_holder *) p2;
564 return h1->alias_set == h2->alias_set;
568 static hashval_t
569 clear_alias_mode_hash (const void *p)
571 const struct clear_alias_mode_holder *holder
572 = (const struct clear_alias_mode_holder *) p;
573 return holder->alias_set;
577 /* Find the entry associated with ALIAS_SET. */
579 static struct clear_alias_mode_holder *
580 clear_alias_set_lookup (alias_set_type alias_set)
582 struct clear_alias_mode_holder tmp_holder;
583 void **slot;
585 tmp_holder.alias_set = alias_set;
586 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
587 gcc_assert (*slot);
589 return *slot;
593 /* Hashtable callbacks for maintaining the "bases" field of
594 store_group_info, given that the addresses are function invariants. */
596 static int
597 invariant_group_base_eq (const void *p1, const void *p2)
599 const_group_info_t gi1 = (const_group_info_t) p1;
600 const_group_info_t gi2 = (const_group_info_t) p2;
601 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
605 static hashval_t
606 invariant_group_base_hash (const void *p)
608 const_group_info_t gi = (const_group_info_t) p;
609 int do_not_record;
610 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
614 /* Get the GROUP for BASE. Add a new group if it is not there. */
616 static group_info_t
617 get_group_info (rtx base)
619 struct group_info tmp_gi;
620 group_info_t gi;
621 void **slot;
623 if (base)
625 /* Find the store_base_info structure for BASE, creating a new one
626 if necessary. */
627 tmp_gi.rtx_base = base;
628 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
629 gi = (group_info_t) *slot;
631 else
633 if (!clear_alias_group)
635 clear_alias_group = gi = pool_alloc (rtx_group_info_pool);
636 memset (gi, 0, sizeof (struct group_info));
637 gi->id = rtx_group_next_id++;
638 gi->store1_n = BITMAP_ALLOC (NULL);
639 gi->store1_p = BITMAP_ALLOC (NULL);
640 gi->store2_n = BITMAP_ALLOC (NULL);
641 gi->store2_p = BITMAP_ALLOC (NULL);
642 gi->group_kill = BITMAP_ALLOC (NULL);
643 gi->process_globally = false;
644 gi->offset_map_size_n = 0;
645 gi->offset_map_size_p = 0;
646 gi->offset_map_n = NULL;
647 gi->offset_map_p = NULL;
648 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
650 return clear_alias_group;
653 if (gi == NULL)
655 *slot = gi = pool_alloc (rtx_group_info_pool);
656 gi->rtx_base = base;
657 gi->id = rtx_group_next_id++;
658 gi->base_mem = gen_rtx_MEM (QImode, base);
659 gi->canon_base_mem = canon_rtx (gi->base_mem);
660 gi->store1_n = BITMAP_ALLOC (NULL);
661 gi->store1_p = BITMAP_ALLOC (NULL);
662 gi->store2_n = BITMAP_ALLOC (NULL);
663 gi->store2_p = BITMAP_ALLOC (NULL);
664 gi->group_kill = BITMAP_ALLOC (NULL);
665 gi->process_globally = false;
666 gi->frame_related =
667 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
668 gi->offset_map_size_n = 0;
669 gi->offset_map_size_p = 0;
670 gi->offset_map_n = NULL;
671 gi->offset_map_p = NULL;
672 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
675 return gi;
679 /* Initialization of data structures. */
681 static void
682 dse_step0 (void)
684 locally_deleted = 0;
685 globally_deleted = 0;
686 spill_deleted = 0;
688 scratch = BITMAP_ALLOC (NULL);
690 rtx_store_info_pool
691 = create_alloc_pool ("rtx_store_info_pool",
692 sizeof (struct store_info), 100);
693 read_info_pool
694 = create_alloc_pool ("read_info_pool",
695 sizeof (struct read_info), 100);
696 insn_info_pool
697 = create_alloc_pool ("insn_info_pool",
698 sizeof (struct insn_info), 100);
699 bb_info_pool
700 = create_alloc_pool ("bb_info_pool",
701 sizeof (struct bb_info), 100);
702 rtx_group_info_pool
703 = create_alloc_pool ("rtx_group_info_pool",
704 sizeof (struct group_info), 100);
705 deferred_change_pool
706 = create_alloc_pool ("deferred_change_pool",
707 sizeof (struct deferred_change), 10);
709 rtx_group_table = htab_create (11, invariant_group_base_hash,
710 invariant_group_base_eq, NULL);
712 bb_table = XCNEWVEC (bb_info_t, last_basic_block);
713 rtx_group_next_id = 0;
715 stores_off_frame_dead_at_return =
716 (!(TREE_CODE (TREE_TYPE (current_function_decl)) == FUNCTION_TYPE
717 && (TYPE_RETURNS_STACK_DEPRESSED (TREE_TYPE (current_function_decl)))))
718 && (!current_function_stdarg);
720 init_alias_analysis ();
722 if (clear_alias_sets)
723 clear_alias_group = get_group_info (NULL);
724 else
725 clear_alias_group = NULL;
730 /*----------------------------------------------------------------------------
731 First step.
733 Scan all of the insns. Any random ordering of the blocks is fine.
734 Each block is scanned in forward order to accommodate cselib which
735 is used to remove stores with non-constant bases.
736 ----------------------------------------------------------------------------*/
738 /* Delete all of the store_info recs from INSN_INFO. */
740 static void
741 free_store_info (insn_info_t insn_info)
743 store_info_t store_info = insn_info->store_rec;
744 while (store_info)
746 store_info_t next = store_info->next;
747 if (store_info->cse_base)
748 pool_free (cse_store_info_pool, store_info);
749 else
750 pool_free (rtx_store_info_pool, store_info);
751 store_info = next;
754 insn_info->cannot_delete = true;
755 insn_info->contains_cselib_groups = false;
756 insn_info->store_rec = NULL;
760 struct insn_size {
761 int size;
762 rtx insn;
766 /* Add an insn to do the add inside a x if it is a
767 PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
768 the size of the mode of the MEM that this is inside of. */
770 static int
771 replace_inc_dec (rtx *r, void *d)
773 rtx x = *r;
774 struct insn_size *data = (struct insn_size *)d;
775 switch (GET_CODE (x))
777 case PRE_INC:
778 case POST_INC:
780 rtx r1 = XEXP (x, 0);
781 rtx c = gen_int_mode (Pmode, data->size);
782 add_insn_before (data->insn,
783 gen_rtx_SET (Pmode, r1,
784 gen_rtx_PLUS (Pmode, r1, c)),
785 NULL);
786 return -1;
789 case PRE_DEC:
790 case POST_DEC:
792 rtx r1 = XEXP (x, 0);
793 rtx c = gen_int_mode (Pmode, -data->size);
794 add_insn_before (data->insn,
795 gen_rtx_SET (Pmode, r1,
796 gen_rtx_PLUS (Pmode, r1, c)),
797 NULL);
798 return -1;
801 case PRE_MODIFY:
802 case POST_MODIFY:
804 /* We can reuse the add because we are about to delete the
805 insn that contained it. */
806 rtx add = XEXP (x, 0);
807 rtx r1 = XEXP (add, 0);
808 add_insn_before (data->insn,
809 gen_rtx_SET (Pmode, r1, add), NULL);
810 return -1;
813 default:
814 return 0;
819 /* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
820 and generate an add to replace that. */
822 static int
823 replace_inc_dec_mem (rtx *r, void *d)
825 rtx x = *r;
826 if (GET_CODE (x) == MEM)
828 struct insn_size data;
830 data.size = GET_MODE_SIZE (GET_MODE (x));
831 data.insn = (rtx)d;
833 for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
835 return -1;
837 return 0;
840 /* Before we delete INSN, make sure that the auto inc/dec, if it is
841 there, is split into a separate insn. */
843 static void
844 check_for_inc_dec (rtx insn)
846 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
847 if (note)
848 for_each_rtx (&insn, replace_inc_dec_mem, insn);
852 /* Delete the insn and free all of the fields inside INSN_INFO. */
854 static void
855 delete_dead_store_insn (insn_info_t insn_info)
857 read_info_t read_info;
859 if (!dbg_cnt (dse))
860 return;
862 check_for_inc_dec (insn_info->insn);
863 if (dump_file)
865 fprintf (dump_file, "Locally deleting insn %d ",
866 INSN_UID (insn_info->insn));
867 if (insn_info->store_rec->alias_set)
868 fprintf (dump_file, "alias set %d\n",
869 (int) insn_info->store_rec->alias_set);
870 else
871 fprintf (dump_file, "\n");
874 free_store_info (insn_info);
875 read_info = insn_info->read_rec;
877 while (read_info)
879 read_info_t next = read_info->next;
880 pool_free (read_info_pool, read_info);
881 read_info = next;
883 insn_info->read_rec = NULL;
885 delete_insn (insn_info->insn);
886 locally_deleted++;
887 insn_info->insn = NULL;
889 insn_info->wild_read = false;
893 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
894 OFFSET and WIDTH. */
896 static void
897 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width)
899 HOST_WIDE_INT i;
901 if ((offset > -MAX_OFFSET) && (offset < MAX_OFFSET))
902 for (i=offset; i<offset+width; i++)
904 bitmap store1;
905 bitmap store2;
906 int ai;
907 if (i < 0)
909 store1 = group->store1_n;
910 store2 = group->store2_n;
911 ai = -i;
913 else
915 store1 = group->store1_p;
916 store2 = group->store2_p;
917 ai = i;
920 if (bitmap_bit_p (store1, ai))
921 bitmap_set_bit (store2, ai);
922 else
924 bitmap_set_bit (store1, ai);
925 if (i < 0)
927 if (group->offset_map_size_n < ai)
928 group->offset_map_size_n = ai;
930 else
932 if (group->offset_map_size_p < ai)
933 group->offset_map_size_p = ai;
940 /* Set the BB_INFO so that the last insn is marked as a wild read. */
942 static void
943 add_wild_read (bb_info_t bb_info)
945 insn_info_t insn_info = bb_info->last_insn;
946 read_info_t *ptr = &insn_info->read_rec;
948 while (*ptr)
950 read_info_t next = (*ptr)->next;
951 if ((*ptr)->alias_set == 0)
953 pool_free (read_info_pool, *ptr);
954 *ptr = next;
956 else
957 ptr = &(*ptr)->next;
959 insn_info->wild_read = true;
960 active_local_stores = NULL;
964 /* Return true if X is a constant or one of the registers that behave
965 as a constant over the life of a function. This is equivalent to
966 !rtx_varies_p for memory addresses. */
968 static bool
969 const_or_frame_p (rtx x)
971 switch (GET_CODE (x))
973 case MEM:
974 return MEM_READONLY_P (x);
976 case CONST:
977 case CONST_INT:
978 case CONST_DOUBLE:
979 case CONST_VECTOR:
980 case SYMBOL_REF:
981 case LABEL_REF:
982 return true;
984 case REG:
985 /* Note that we have to test for the actual rtx used for the frame
986 and arg pointers and not just the register number in case we have
987 eliminated the frame and/or arg pointer and are using it
988 for pseudos. */
989 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
990 /* The arg pointer varies if it is not a fixed register. */
991 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
992 || x == pic_offset_table_rtx)
993 return true;
994 return false;
996 default:
997 return false;
1001 /* Take all reasonable action to put the address of MEM into the form
1002 that we can do analysis on.
1004 The gold standard is to get the address into the form: address +
1005 OFFSET where address is something that rtx_varies_p considers a
1006 constant. When we can get the address in this form, we can do
1007 global analysis on it. Note that for constant bases, address is
1008 not actually returned, only the group_id. The address can be
1009 obtained from that.
1011 If that fails, we try cselib to get a value we can at least use
1012 locally. If that fails we return false.
1014 The GROUP_ID is set to -1 for cselib bases and the index of the
1015 group for non_varying bases.
1017 FOR_READ is true if this is a mem read and false if not. */
1019 static bool
1020 canon_address (rtx mem,
1021 alias_set_type *alias_set_out,
1022 int *group_id,
1023 HOST_WIDE_INT *offset,
1024 cselib_val **base)
1026 rtx mem_address = XEXP (mem, 0);
1027 rtx expanded_address, address;
1028 /* Make sure that cselib is has initialized all of the operands of
1029 the address before asking it to do the subst. */
1031 if (clear_alias_sets)
1033 /* If this is a spill, do not do any further processing. */
1034 alias_set_type alias_set = MEM_ALIAS_SET (mem);
1035 if (dump_file)
1036 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1037 if (bitmap_bit_p (clear_alias_sets, alias_set))
1039 struct clear_alias_mode_holder *entry
1040 = clear_alias_set_lookup (alias_set);
1042 /* If the modes do not match, we cannot process this set. */
1043 if (entry->mode != GET_MODE (mem))
1045 if (dump_file)
1046 fprintf (dump_file,
1047 "disqualifying alias set %d, (%s) != (%s)\n",
1048 (int) alias_set, GET_MODE_NAME (entry->mode),
1049 GET_MODE_NAME (GET_MODE (mem)));
1051 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1052 return false;
1055 *alias_set_out = alias_set;
1056 *group_id = clear_alias_group->id;
1057 return true;
1061 *alias_set_out = 0;
1063 cselib_lookup (mem_address, Pmode, 1);
1065 if (dump_file)
1067 fprintf (dump_file, " mem: ");
1068 print_inline_rtx (dump_file, mem_address, 0);
1069 fprintf (dump_file, "\n");
1072 /* Use cselib to replace all of the reg references with the full
1073 expression. This will take care of the case where we have
1075 r_x = base + offset;
1076 val = *r_x;
1078 by making it into
1080 val = *(base + offset);
1083 expanded_address = cselib_expand_value_rtx (mem_address, scratch, 5);
1085 /* If this fails, just go with the mem_address. */
1086 if (!expanded_address)
1087 expanded_address = mem_address;
1089 /* Split the address into canonical BASE + OFFSET terms. */
1090 address = canon_rtx (expanded_address);
1092 *offset = 0;
1094 if (dump_file)
1096 fprintf (dump_file, "\n after cselib_expand address: ");
1097 print_inline_rtx (dump_file, expanded_address, 0);
1098 fprintf (dump_file, "\n");
1100 fprintf (dump_file, "\n after canon_rtx address: ");
1101 print_inline_rtx (dump_file, address, 0);
1102 fprintf (dump_file, "\n");
1105 if (GET_CODE (address) == CONST)
1106 address = XEXP (address, 0);
1108 if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
1110 *offset = INTVAL (XEXP (address, 1));
1111 address = XEXP (address, 0);
1114 if (const_or_frame_p (address))
1116 group_info_t group = get_group_info (address);
1118 if (dump_file)
1119 fprintf (dump_file, " gid=%d offset=%d \n", group->id, (int)*offset);
1120 *base = NULL;
1121 *group_id = group->id;
1123 else
1125 *base = cselib_lookup (address, Pmode, true);
1126 *group_id = -1;
1128 if (*base == NULL)
1130 if (dump_file)
1131 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1132 return false;
1134 if (dump_file)
1135 fprintf (dump_file, " varying cselib base=%d offset = %d\n",
1136 (*base)->value, (int)*offset);
1138 return true;
1142 /* Clear the rhs field from the active_local_stores array. */
1144 static void
1145 clear_rhs_from_active_local_stores (void)
1147 insn_info_t ptr = active_local_stores;
1149 while (ptr)
1151 store_info_t store_info = ptr->store_rec;
1152 /* Skip the clobbers. */
1153 while (!store_info->is_set)
1154 store_info = store_info->next;
1156 store_info->rhs = NULL;
1158 ptr = ptr->next_local_store;
1163 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1164 there is a candidate store, after adding it to the appropriate
1165 local store group if so. */
1167 static int
1168 record_store (rtx body, bb_info_t bb_info)
1170 rtx mem;
1171 HOST_WIDE_INT offset = 0;
1172 HOST_WIDE_INT width = 0;
1173 alias_set_type spill_alias_set;
1174 insn_info_t insn_info = bb_info->last_insn;
1175 store_info_t store_info = NULL;
1176 int group_id;
1177 cselib_val *base = NULL;
1178 insn_info_t ptr, last;
1179 bool store_is_unused;
1181 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1182 return 0;
1184 /* If this is not used, then this cannot be used to keep the insn
1185 from being deleted. On the other hand, it does provide something
1186 that can be used to prove that another store is dead. */
1187 store_is_unused
1188 = (find_reg_note (insn_info->insn, REG_UNUSED, body) != NULL);
1190 /* Check whether that value is a suitable memory location. */
1191 mem = SET_DEST (body);
1192 if (!MEM_P (mem))
1194 /* If the set or clobber is unused, then it does not effect our
1195 ability to get rid of the entire insn. */
1196 if (!store_is_unused)
1197 insn_info->cannot_delete = true;
1198 return 0;
1201 /* At this point we know mem is a mem. */
1202 if (GET_MODE (mem) == BLKmode)
1204 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1206 if (dump_file)
1207 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1208 add_wild_read (bb_info);
1209 insn_info->cannot_delete = true;
1211 else if (!store_is_unused)
1213 /* If the set or clobber is unused, then it does not effect our
1214 ability to get rid of the entire insn. */
1215 insn_info->cannot_delete = true;
1216 clear_rhs_from_active_local_stores ();
1218 return 0;
1221 /* We can still process a volatile mem, we just cannot delete it. */
1222 if (MEM_VOLATILE_P (mem))
1223 insn_info->cannot_delete = true;
1225 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1227 clear_rhs_from_active_local_stores ();
1228 return 0;
1231 width = GET_MODE_SIZE (GET_MODE (mem));
1233 if (spill_alias_set)
1235 bitmap store1 = clear_alias_group->store1_p;
1236 bitmap store2 = clear_alias_group->store2_p;
1238 if (bitmap_bit_p (store1, spill_alias_set))
1239 bitmap_set_bit (store2, spill_alias_set);
1240 else
1241 bitmap_set_bit (store1, spill_alias_set);
1243 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1244 clear_alias_group->offset_map_size_p = spill_alias_set;
1246 store_info = pool_alloc (rtx_store_info_pool);
1248 if (dump_file)
1249 fprintf (dump_file, " processing spill store %d(%s)\n",
1250 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1252 else if (group_id >= 0)
1254 /* In the restrictive case where the base is a constant or the
1255 frame pointer we can do global analysis. */
1257 group_info_t group
1258 = VEC_index (group_info_t, rtx_group_vec, group_id);
1260 store_info = pool_alloc (rtx_store_info_pool);
1261 set_usage_bits (group, offset, width);
1263 if (dump_file)
1264 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1265 group_id, (int)offset, (int)(offset+width));
1267 else
1269 rtx base_term = find_base_term (XEXP (mem, 0));
1270 if (!base_term
1271 || (GET_CODE (base_term) == ADDRESS
1272 && GET_MODE (base_term) == Pmode
1273 && XEXP (base_term, 0) == stack_pointer_rtx))
1274 insn_info->stack_pointer_based = true;
1275 insn_info->contains_cselib_groups = true;
1277 store_info = pool_alloc (cse_store_info_pool);
1278 group_id = -1;
1280 if (dump_file)
1281 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1282 (int)offset, (int)(offset+width));
1285 /* Check to see if this stores causes some other stores to be
1286 dead. */
1287 ptr = active_local_stores;
1288 last = NULL;
1290 while (ptr)
1292 insn_info_t next = ptr->next_local_store;
1293 store_info_t s_info = ptr->store_rec;
1294 bool delete = true;
1296 /* Skip the clobbers. We delete the active insn if this insn
1297 shadows the set. To have been put on the active list, it
1298 has exactly on set. */
1299 while (!s_info->is_set)
1300 s_info = s_info->next;
1302 if (s_info->alias_set != spill_alias_set)
1303 delete = false;
1304 else if (s_info->alias_set)
1306 struct clear_alias_mode_holder *entry
1307 = clear_alias_set_lookup (s_info->alias_set);
1308 /* Generally, spills cannot be processed if and of the
1309 references to the slot have a different mode. But if
1310 we are in the same block and mode is exactly the same
1311 between this store and one before in the same block,
1312 we can still delete it. */
1313 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1314 && (GET_MODE (mem) == entry->mode))
1316 delete = true;
1317 s_info->positions_needed = 0;
1319 if (dump_file)
1320 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1321 INSN_UID (ptr->insn), (int) s_info->alias_set);
1323 else if ((s_info->group_id == group_id)
1324 && (s_info->cse_base == base))
1326 HOST_WIDE_INT i;
1327 if (dump_file)
1328 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1329 INSN_UID (ptr->insn), s_info->group_id,
1330 (int)s_info->begin, (int)s_info->end);
1331 for (i = offset; i < offset+width; i++)
1332 if (i >= s_info->begin && i < s_info->end)
1333 s_info->positions_needed &= ~(1L << (i - s_info->begin));
1335 else if (s_info->rhs)
1336 /* Need to see if it is possible for this store to overwrite
1337 the value of store_info. If it is, set the rhs to NULL to
1338 keep it from being used to remove a load. */
1340 if (canon_true_dependence (s_info->mem,
1341 GET_MODE (s_info->mem),
1342 s_info->mem_addr,
1343 mem, rtx_varies_p))
1344 s_info->rhs = NULL;
1347 /* An insn can be deleted if every position of every one of
1348 its s_infos is zero. */
1349 if (s_info->positions_needed != 0)
1350 delete = false;
1352 if (delete)
1354 insn_info_t insn_to_delete = ptr;
1356 if (last)
1357 last->next_local_store = ptr->next_local_store;
1358 else
1359 active_local_stores = ptr->next_local_store;
1361 delete_dead_store_insn (insn_to_delete);
1363 else
1364 last = ptr;
1366 ptr = next;
1369 gcc_assert ((unsigned) width < sizeof (store_info->positions_needed) * CHAR_BIT);
1371 /* Finish filling in the store_info. */
1372 store_info->next = insn_info->store_rec;
1373 insn_info->store_rec = store_info;
1374 store_info->mem = canon_rtx (mem);
1375 store_info->alias_set = spill_alias_set;
1376 store_info->mem_addr = get_addr (XEXP (mem, 0));
1377 store_info->cse_base = base;
1378 store_info->positions_needed = (1L << width) - 1;
1379 store_info->group_id = group_id;
1380 store_info->begin = offset;
1381 store_info->end = offset + width;
1382 store_info->is_set = GET_CODE (body) == SET;
1384 if (store_info->is_set
1385 /* No place to keep the value after ra. */
1386 && !reload_completed
1387 && (REG_P (SET_SRC (body))
1388 || GET_CODE (SET_SRC (body)) == SUBREG
1389 || CONSTANT_P (SET_SRC (body)))
1390 /* Sometimes the store and reload is used for truncation and
1391 rounding. */
1392 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1393 store_info->rhs = SET_SRC (body);
1394 else
1395 store_info->rhs = NULL;
1397 /* If this is a clobber, we return 0. We will only be able to
1398 delete this insn if there is only one store USED store, but we
1399 can use the clobber to delete other stores earlier. */
1400 return store_info->is_set ? 1 : 0;
1404 static void
1405 dump_insn_info (const char * start, insn_info_t insn_info)
1407 fprintf (dump_file, "%s insn=%d %s\n", start,
1408 INSN_UID (insn_info->insn),
1409 insn_info->store_rec ? "has store" : "naked");
1413 /* If the modes are different and the value's source and target do not
1414 line up, we need to extract the value from lower part of the rhs of
1415 the store, shift it, and then put it into a form that can be shoved
1416 into the read_insn. This function generates a right SHIFT of a
1417 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1418 shift sequence is returned or NULL if we failed to find a
1419 shift. */
1421 static rtx
1422 find_shift_sequence (int access_size,
1423 store_info_t store_info,
1424 read_info_t read_info,
1425 int shift)
1427 enum machine_mode store_mode = GET_MODE (store_info->mem);
1428 enum machine_mode read_mode = GET_MODE (read_info->mem);
1429 enum machine_mode new_mode;
1430 rtx read_reg = NULL;
1432 /* Some machines like the x86 have shift insns for each size of
1433 operand. Other machines like the ppc or the ia-64 may only have
1434 shift insns that shift values within 32 or 64 bit registers.
1435 This loop tries to find the smallest shift insn that will right
1436 justify the value we want to read but is available in one insn on
1437 the machine. */
1439 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1440 MODE_INT);
1441 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1442 new_mode = GET_MODE_WIDER_MODE (new_mode))
1444 rtx target, new_reg, shift_seq, insn, new_lhs;
1445 int cost;
1447 /* Try a wider mode if truncating the store mode to NEW_MODE
1448 requires a real instruction. */
1449 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1450 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (new_mode),
1451 GET_MODE_BITSIZE (store_mode)))
1452 continue;
1454 /* Also try a wider mode if the necessary punning is either not
1455 desirable or not possible. */
1456 if (!CONSTANT_P (store_info->rhs)
1457 && !MODES_TIEABLE_P (new_mode, store_mode))
1458 continue;
1459 new_lhs = simplify_gen_subreg (new_mode, copy_rtx (store_info->rhs),
1460 store_mode, 0);
1461 if (new_lhs == NULL_RTX)
1462 continue;
1464 new_reg = gen_reg_rtx (new_mode);
1466 start_sequence ();
1468 /* In theory we could also check for an ashr. Ian Taylor knows
1469 of one dsp where the cost of these two was not the same. But
1470 this really is a rare case anyway. */
1471 target = expand_binop (new_mode, lshr_optab, new_reg,
1472 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1474 shift_seq = get_insns ();
1475 end_sequence ();
1477 if (target != new_reg || shift_seq == NULL)
1478 continue;
1480 cost = 0;
1481 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1482 if (INSN_P (insn))
1483 cost += insn_rtx_cost (PATTERN (insn));
1485 /* The computation up to here is essentially independent
1486 of the arguments and could be precomputed. It may
1487 not be worth doing so. We could precompute if
1488 worthwhile or at least cache the results. The result
1489 technically depends on both SHIFT and ACCESS_SIZE,
1490 but in practice the answer will depend only on ACCESS_SIZE. */
1492 if (cost > COSTS_N_INSNS (1))
1493 continue;
1495 /* We found an acceptable shift. Generate a move to
1496 take the value from the store and put it into the
1497 shift pseudo, then shift it, then generate another
1498 move to put in into the target of the read. */
1499 emit_move_insn (new_reg, new_lhs);
1500 emit_insn (shift_seq);
1501 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1502 break;
1505 return read_reg;
1509 /* Take a sequence of:
1510 A <- r1
1512 ... <- A
1514 and change it into
1515 r2 <- r1
1516 A <- r1
1518 ... <- r2
1522 r3 <- extract (r1)
1523 r3 <- r3 >> shift
1524 r2 <- extract (r3)
1525 ... <- r2
1529 r2 <- extract (r1)
1530 ... <- r2
1532 Depending on the alignment and the mode of the store and
1533 subsequent load.
1536 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1537 and READ_INSN are for the read. Return true if the replacement
1538 went ok. */
1540 static bool
1541 replace_read (store_info_t store_info, insn_info_t store_insn,
1542 read_info_t read_info, insn_info_t read_insn, rtx *loc)
1544 enum machine_mode store_mode = GET_MODE (store_info->mem);
1545 enum machine_mode read_mode = GET_MODE (read_info->mem);
1546 int shift;
1547 int access_size; /* In bytes. */
1548 rtx insns, read_reg;
1550 if (!dbg_cnt (dse))
1551 return false;
1553 /* To get here the read is within the boundaries of the write so
1554 shift will never be negative. Start out with the shift being in
1555 bytes. */
1556 if (BYTES_BIG_ENDIAN)
1557 shift = store_info->end - read_info->end;
1558 else
1559 shift = read_info->begin - store_info->begin;
1561 access_size = shift + GET_MODE_SIZE (read_mode);
1563 /* From now on it is bits. */
1564 shift *= BITS_PER_UNIT;
1566 /* Create a sequence of instructions to set up the read register.
1567 This sequence goes immediately before the store and its result
1568 is read by the load.
1570 We need to keep this in perspective. We are replacing a read
1571 with a sequence of insns, but the read will almost certainly be
1572 in cache, so it is not going to be an expensive one. Thus, we
1573 are not willing to do a multi insn shift or worse a subroutine
1574 call to get rid of the read. */
1575 if (dump_file)
1576 fprintf (dump_file, "trying to replace %smode load in insn %d"
1577 " from %smode store in insn %d\n",
1578 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1579 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1580 start_sequence ();
1581 if (shift)
1582 read_reg = find_shift_sequence (access_size, store_info, read_info, shift);
1583 else
1584 read_reg = extract_low_bits (read_mode, store_mode,
1585 copy_rtx (store_info->rhs));
1586 if (read_reg == NULL_RTX)
1588 end_sequence ();
1589 if (dump_file)
1590 fprintf (dump_file, " -- could not extract bits of stored value\n");
1591 return false;
1593 /* Force the value into a new register so that it won't be clobbered
1594 between the store and the load. */
1595 read_reg = copy_to_mode_reg (read_mode, read_reg);
1596 insns = get_insns ();
1597 end_sequence ();
1599 if (validate_change (read_insn->insn, loc, read_reg, 0))
1601 deferred_change_t deferred_change = pool_alloc (deferred_change_pool);
1603 /* Insert this right before the store insn where it will be safe
1604 from later insns that might change it before the read. */
1605 emit_insn_before (insns, store_insn->insn);
1607 /* And now for the kludge part: cselib croaks if you just
1608 return at this point. There are two reasons for this:
1610 1) Cselib has an idea of how many pseudos there are and
1611 that does not include the new ones we just added.
1613 2) Cselib does not know about the move insn we added
1614 above the store_info, and there is no way to tell it
1615 about it, because it has "moved on".
1617 Problem (1) is fixable with a certain amount of engineering.
1618 Problem (2) is requires starting the bb from scratch. This
1619 could be expensive.
1621 So we are just going to have to lie. The move/extraction
1622 insns are not really an issue, cselib did not see them. But
1623 the use of the new pseudo read_insn is a real problem because
1624 cselib has not scanned this insn. The way that we solve this
1625 problem is that we are just going to put the mem back for now
1626 and when we are finished with the block, we undo this. We
1627 keep a table of mems to get rid of. At the end of the basic
1628 block we can put them back. */
1630 *loc = read_info->mem;
1631 deferred_change->next = deferred_change_list;
1632 deferred_change_list = deferred_change;
1633 deferred_change->loc = loc;
1634 deferred_change->reg = read_reg;
1636 /* Get rid of the read_info, from the point of view of the
1637 rest of dse, play like this read never happened. */
1638 read_insn->read_rec = read_info->next;
1639 pool_free (read_info_pool, read_info);
1640 if (dump_file)
1642 fprintf (dump_file, " -- replaced the loaded MEM with ");
1643 print_simple_rtl (dump_file, read_reg);
1644 fprintf (dump_file, "\n");
1646 return true;
1648 else
1650 if (dump_file)
1652 fprintf (dump_file, " -- replacing the loaded MEM with ");
1653 print_simple_rtl (dump_file, read_reg);
1654 fprintf (dump_file, " led to an invalid instruction\n");
1656 return false;
1660 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
1661 if LOC is a mem and if it is look at the address and kill any
1662 appropriate stores that may be active. */
1664 static int
1665 check_mem_read_rtx (rtx *loc, void *data)
1667 rtx mem = *loc;
1668 bb_info_t bb_info;
1669 insn_info_t insn_info;
1670 HOST_WIDE_INT offset = 0;
1671 HOST_WIDE_INT width = 0;
1672 alias_set_type spill_alias_set = 0;
1673 cselib_val *base = NULL;
1674 int group_id;
1675 read_info_t read_info;
1677 if (!mem || !MEM_P (mem))
1678 return 0;
1680 bb_info = (bb_info_t) data;
1681 insn_info = bb_info->last_insn;
1683 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
1684 || (MEM_VOLATILE_P (mem)))
1686 if (dump_file)
1687 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
1688 add_wild_read (bb_info);
1689 insn_info->cannot_delete = true;
1690 return 0;
1693 /* If it is reading readonly mem, then there can be no conflict with
1694 another write. */
1695 if (MEM_READONLY_P (mem))
1696 return 0;
1698 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1700 if (dump_file)
1701 fprintf (dump_file, " adding wild read, canon_address failure.\n");
1702 add_wild_read (bb_info);
1703 return 0;
1706 if (GET_MODE (mem) == BLKmode)
1707 width = -1;
1708 else
1709 width = GET_MODE_SIZE (GET_MODE (mem));
1711 read_info = pool_alloc (read_info_pool);
1712 read_info->group_id = group_id;
1713 read_info->mem = mem;
1714 read_info->alias_set = spill_alias_set;
1715 read_info->begin = offset;
1716 read_info->end = offset + width;
1717 read_info->next = insn_info->read_rec;
1718 insn_info->read_rec = read_info;
1720 /* We ignore the clobbers in store_info. The is mildly aggressive,
1721 but there really should not be a clobber followed by a read. */
1723 if (spill_alias_set)
1725 insn_info_t i_ptr = active_local_stores;
1726 insn_info_t last = NULL;
1728 if (dump_file)
1729 fprintf (dump_file, " processing spill load %d\n",
1730 (int) spill_alias_set);
1732 while (i_ptr)
1734 store_info_t store_info = i_ptr->store_rec;
1736 /* Skip the clobbers. */
1737 while (!store_info->is_set)
1738 store_info = store_info->next;
1740 if (store_info->alias_set == spill_alias_set)
1742 if (dump_file)
1743 dump_insn_info ("removing from active", i_ptr);
1745 if (last)
1746 last->next_local_store = i_ptr->next_local_store;
1747 else
1748 active_local_stores = i_ptr->next_local_store;
1750 else
1751 last = i_ptr;
1752 i_ptr = i_ptr->next_local_store;
1755 else if (group_id >= 0)
1757 /* This is the restricted case where the base is a constant or
1758 the frame pointer and offset is a constant. */
1759 insn_info_t i_ptr = active_local_stores;
1760 insn_info_t last = NULL;
1762 if (dump_file)
1764 if (width == -1)
1765 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
1766 group_id);
1767 else
1768 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
1769 group_id, (int)offset, (int)(offset+width));
1772 while (i_ptr)
1774 bool remove = false;
1775 store_info_t store_info = i_ptr->store_rec;
1777 /* Skip the clobbers. */
1778 while (!store_info->is_set)
1779 store_info = store_info->next;
1781 /* There are three cases here. */
1782 if (store_info->group_id < 0)
1783 /* We have a cselib store followed by a read from a
1784 const base. */
1785 remove
1786 = canon_true_dependence (store_info->mem,
1787 GET_MODE (store_info->mem),
1788 store_info->mem_addr,
1789 mem, rtx_varies_p);
1791 else if (group_id == store_info->group_id)
1793 /* This is a block mode load. We may get lucky and
1794 canon_true_dependence may save the day. */
1795 if (width == -1)
1796 remove
1797 = canon_true_dependence (store_info->mem,
1798 GET_MODE (store_info->mem),
1799 store_info->mem_addr,
1800 mem, rtx_varies_p);
1802 /* If this read is just reading back something that we just
1803 stored, rewrite the read. */
1804 else
1806 if (store_info->rhs
1807 && (offset >= store_info->begin)
1808 && (offset + width <= store_info->end))
1810 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1812 if ((store_info->positions_needed & mask) == mask
1813 && replace_read (store_info, i_ptr,
1814 read_info, insn_info, loc))
1815 return 0;
1817 /* The bases are the same, just see if the offsets
1818 overlap. */
1819 if ((offset < store_info->end)
1820 && (offset + width > store_info->begin))
1821 remove = true;
1825 /* else
1826 The else case that is missing here is that the
1827 bases are constant but different. There is nothing
1828 to do here because there is no overlap. */
1830 if (remove)
1832 if (dump_file)
1833 dump_insn_info ("removing from active", i_ptr);
1835 if (last)
1836 last->next_local_store = i_ptr->next_local_store;
1837 else
1838 active_local_stores = i_ptr->next_local_store;
1840 else
1841 last = i_ptr;
1842 i_ptr = i_ptr->next_local_store;
1845 else
1847 insn_info_t i_ptr = active_local_stores;
1848 insn_info_t last = NULL;
1849 if (dump_file)
1851 fprintf (dump_file, " processing cselib load mem:");
1852 print_inline_rtx (dump_file, mem, 0);
1853 fprintf (dump_file, "\n");
1856 while (i_ptr)
1858 bool remove = false;
1859 store_info_t store_info = i_ptr->store_rec;
1861 if (dump_file)
1862 fprintf (dump_file, " processing cselib load against insn %d\n",
1863 INSN_UID (i_ptr->insn));
1865 /* Skip the clobbers. */
1866 while (!store_info->is_set)
1867 store_info = store_info->next;
1869 /* If this read is just reading back something that we just
1870 stored, rewrite the read. */
1871 if (store_info->rhs
1872 && store_info->group_id == -1
1873 && store_info->cse_base == base
1874 && (offset >= store_info->begin)
1875 && (offset + width <= store_info->end))
1877 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1879 if ((store_info->positions_needed & mask) == mask
1880 && replace_read (store_info, i_ptr,
1881 read_info, insn_info, loc))
1882 return 0;
1885 if (!store_info->alias_set)
1886 remove = canon_true_dependence (store_info->mem,
1887 GET_MODE (store_info->mem),
1888 store_info->mem_addr,
1889 mem, rtx_varies_p);
1891 if (remove)
1893 if (dump_file)
1894 dump_insn_info ("removing from active", i_ptr);
1896 if (last)
1897 last->next_local_store = i_ptr->next_local_store;
1898 else
1899 active_local_stores = i_ptr->next_local_store;
1901 else
1902 last = i_ptr;
1903 i_ptr = i_ptr->next_local_store;
1906 return 0;
1909 /* A for_each_rtx callback in which DATA points the INSN_INFO for
1910 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
1911 true for any part of *LOC. */
1913 static void
1914 check_mem_read_use (rtx *loc, void *data)
1916 for_each_rtx (loc, check_mem_read_rtx, data);
1919 /* Apply record_store to all candidate stores in INSN. Mark INSN
1920 if some part of it is not a candidate store and assigns to a
1921 non-register target. */
1923 static void
1924 scan_insn (bb_info_t bb_info, rtx insn)
1926 rtx body;
1927 insn_info_t insn_info = pool_alloc (insn_info_pool);
1928 int mems_found = 0;
1929 memset (insn_info, 0, sizeof (struct insn_info));
1931 if (dump_file)
1932 fprintf (dump_file, "\n**scanning insn=%d\n",
1933 INSN_UID (insn));
1935 insn_info->prev_insn = bb_info->last_insn;
1936 insn_info->insn = insn;
1937 bb_info->last_insn = insn_info;
1940 /* Cselib clears the table for this case, so we have to essentially
1941 do the same. */
1942 if (NONJUMP_INSN_P (insn)
1943 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1944 && MEM_VOLATILE_P (PATTERN (insn)))
1946 add_wild_read (bb_info);
1947 insn_info->cannot_delete = true;
1948 return;
1951 /* Look at all of the uses in the insn. */
1952 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
1954 if (CALL_P (insn))
1956 insn_info->cannot_delete = true;
1958 /* Const functions cannot do anything bad i.e. read memory,
1959 however, they can read their parameters which may have
1960 been pushed onto the stack. */
1961 if (CONST_OR_PURE_CALL_P (insn) && !pure_call_p (insn))
1963 insn_info_t i_ptr = active_local_stores;
1964 insn_info_t last = NULL;
1966 if (dump_file)
1967 fprintf (dump_file, "const call %d\n", INSN_UID (insn));
1969 /* See the head comment of the frame_read field. */
1970 if (reload_completed)
1971 insn_info->frame_read = true;
1973 /* Loop over the active stores and remove those which are
1974 killed by the const function call. */
1975 while (i_ptr)
1977 bool remove_store = false;
1979 /* The stack pointer based stores are always killed. */
1980 if (i_ptr->stack_pointer_based)
1981 remove_store = true;
1983 /* If the frame is read, the frame related stores are killed. */
1984 else if (insn_info->frame_read)
1986 store_info_t store_info = i_ptr->store_rec;
1988 /* Skip the clobbers. */
1989 while (!store_info->is_set)
1990 store_info = store_info->next;
1992 if (store_info->group_id >= 0
1993 && VEC_index (group_info_t, rtx_group_vec,
1994 store_info->group_id)->frame_related)
1995 remove_store = true;
1998 if (remove_store)
2000 if (dump_file)
2001 dump_insn_info ("removing from active", i_ptr);
2003 if (last)
2004 last->next_local_store = i_ptr->next_local_store;
2005 else
2006 active_local_stores = i_ptr->next_local_store;
2008 else
2009 last = i_ptr;
2011 i_ptr = i_ptr->next_local_store;
2015 else
2016 /* Every other call, including pure functions, may read memory. */
2017 add_wild_read (bb_info);
2019 return;
2022 /* Assuming that there are sets in these insns, we cannot delete
2023 them. */
2024 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2025 || volatile_refs_p (PATTERN (insn))
2026 || (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
2027 || (RTX_FRAME_RELATED_P (insn))
2028 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2029 insn_info->cannot_delete = true;
2031 body = PATTERN (insn);
2032 if (GET_CODE (body) == PARALLEL)
2034 int i;
2035 for (i = 0; i < XVECLEN (body, 0); i++)
2036 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2038 else
2039 mems_found += record_store (body, bb_info);
2041 if (dump_file)
2042 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2043 mems_found, insn_info->cannot_delete ? "true" : "false");
2045 /* If we found some sets of mems, and the insn has not been marked
2046 cannot delete, add it into the active_local_stores so that it can
2047 be locally deleted if found dead. Otherwise mark it as cannot
2048 delete. This simplifies the processing later. */
2049 if (mems_found == 1 && !insn_info->cannot_delete)
2051 insn_info->next_local_store = active_local_stores;
2052 active_local_stores = insn_info;
2054 else
2055 insn_info->cannot_delete = true;
2059 /* Remove BASE from the set of active_local_stores. This is a
2060 callback from cselib that is used to get rid of the stores in
2061 active_local_stores. */
2063 static void
2064 remove_useless_values (cselib_val *base)
2066 insn_info_t insn_info = active_local_stores;
2067 insn_info_t last = NULL;
2069 while (insn_info)
2071 store_info_t store_info = insn_info->store_rec;
2072 bool delete = false;
2074 /* If ANY of the store_infos match the cselib group that is
2075 being deleted, then the insn can not be deleted. */
2076 while (store_info)
2078 if ((store_info->group_id == -1)
2079 && (store_info->cse_base == base))
2081 delete = true;
2082 break;
2084 store_info = store_info->next;
2087 if (delete)
2089 if (last)
2090 last->next_local_store = insn_info->next_local_store;
2091 else
2092 active_local_stores = insn_info->next_local_store;
2093 free_store_info (insn_info);
2095 else
2096 last = insn_info;
2098 insn_info = insn_info->next_local_store;
2103 /* Do all of step 1. */
2105 static void
2106 dse_step1 (void)
2108 basic_block bb;
2110 cselib_init (false);
2111 all_blocks = BITMAP_ALLOC (NULL);
2112 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2113 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2115 FOR_ALL_BB (bb)
2117 insn_info_t ptr;
2118 bb_info_t bb_info = pool_alloc (bb_info_pool);
2120 memset (bb_info, 0, sizeof (struct bb_info));
2121 bitmap_set_bit (all_blocks, bb->index);
2123 bb_table[bb->index] = bb_info;
2124 cselib_discard_hook = remove_useless_values;
2126 if (bb->index >= NUM_FIXED_BLOCKS)
2128 rtx insn;
2130 cse_store_info_pool
2131 = create_alloc_pool ("cse_store_info_pool",
2132 sizeof (struct store_info), 100);
2133 active_local_stores = NULL;
2134 cselib_clear_table ();
2136 /* Scan the insns. */
2137 FOR_BB_INSNS (bb, insn)
2139 if (INSN_P (insn))
2140 scan_insn (bb_info, insn);
2141 cselib_process_insn (insn);
2144 /* This is something of a hack, because the global algorithm
2145 is supposed to take care of the case where stores go dead
2146 at the end of the function. However, the global
2147 algorithm must take a more conservative view of block
2148 mode reads than the local alg does. So to get the case
2149 where you have a store to the frame followed by a non
2150 overlapping block more read, we look at the active local
2151 stores at the end of the function and delete all of the
2152 frame and spill based ones. */
2153 if (stores_off_frame_dead_at_return
2154 && (EDGE_COUNT (bb->succs) == 0
2155 || (single_succ_p (bb)
2156 && single_succ (bb) == EXIT_BLOCK_PTR
2157 && ! current_function_calls_eh_return)))
2159 insn_info_t i_ptr = active_local_stores;
2160 while (i_ptr)
2162 store_info_t store_info = i_ptr->store_rec;
2164 /* Skip the clobbers. */
2165 while (!store_info->is_set)
2166 store_info = store_info->next;
2167 if (store_info->alias_set)
2168 delete_dead_store_insn (i_ptr);
2169 else
2170 if (store_info->group_id >= 0)
2172 group_info_t group
2173 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2174 if (group->frame_related)
2175 delete_dead_store_insn (i_ptr);
2178 i_ptr = i_ptr->next_local_store;
2182 /* Get rid of the loads that were discovered in
2183 replace_read. Cselib is finished with this block. */
2184 while (deferred_change_list)
2186 deferred_change_t next = deferred_change_list->next;
2188 /* There is no reason to validate this change. That was
2189 done earlier. */
2190 *deferred_change_list->loc = deferred_change_list->reg;
2191 pool_free (deferred_change_pool, deferred_change_list);
2192 deferred_change_list = next;
2195 /* Get rid of all of the cselib based store_infos in this
2196 block and mark the containing insns as not being
2197 deletable. */
2198 ptr = bb_info->last_insn;
2199 while (ptr)
2201 if (ptr->contains_cselib_groups)
2202 free_store_info (ptr);
2203 ptr = ptr->prev_insn;
2206 free_alloc_pool (cse_store_info_pool);
2210 cselib_finish ();
2211 htab_empty (rtx_group_table);
2215 /*----------------------------------------------------------------------------
2216 Second step.
2218 Assign each byte position in the stores that we are going to
2219 analyze globally to a position in the bitmaps. Returns true if
2220 there are any bit positions assigned.
2221 ----------------------------------------------------------------------------*/
2223 static void
2224 dse_step2_init (void)
2226 unsigned int i;
2227 group_info_t group;
2229 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2231 /* For all non stack related bases, we only consider a store to
2232 be deletable if there are two or more stores for that
2233 position. This is because it takes one store to make the
2234 other store redundant. However, for the stores that are
2235 stack related, we consider them if there is only one store
2236 for the position. We do this because the stack related
2237 stores can be deleted if their is no read between them and
2238 the end of the function.
2240 To make this work in the current framework, we take the stack
2241 related bases add all of the bits from store1 into store2.
2242 This has the effect of making the eligible even if there is
2243 only one store. */
2245 if (stores_off_frame_dead_at_return && group->frame_related)
2247 bitmap_ior_into (group->store2_n, group->store1_n);
2248 bitmap_ior_into (group->store2_p, group->store1_p);
2249 if (dump_file)
2250 fprintf (dump_file, "group %d is frame related ", i);
2253 group->offset_map_size_n++;
2254 group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2255 group->offset_map_size_p++;
2256 group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2257 group->process_globally = false;
2258 if (dump_file)
2260 fprintf (dump_file, "group %d(%d+%d): ", i,
2261 (int)bitmap_count_bits (group->store2_n),
2262 (int)bitmap_count_bits (group->store2_p));
2263 bitmap_print (dump_file, group->store2_n, "n ", " ");
2264 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2270 /* Init the offset tables for the normal case. */
2272 static bool
2273 dse_step2_nospill (void)
2275 unsigned int i;
2276 group_info_t group;
2277 /* Position 0 is unused because 0 is used in the maps to mean
2278 unused. */
2279 current_position = 1;
2281 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2283 bitmap_iterator bi;
2284 unsigned int j;
2286 if (group == clear_alias_group)
2287 continue;
2289 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2290 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2291 bitmap_clear (group->group_kill);
2293 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2295 bitmap_set_bit (group->group_kill, current_position);
2296 group->offset_map_n[j] = current_position++;
2297 group->process_globally = true;
2299 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2301 bitmap_set_bit (group->group_kill, current_position);
2302 group->offset_map_p[j] = current_position++;
2303 group->process_globally = true;
2306 return current_position != 1;
2310 /* Init the offset tables for the spill case. */
2312 static bool
2313 dse_step2_spill (void)
2315 unsigned int j;
2316 group_info_t group = clear_alias_group;
2317 bitmap_iterator bi;
2319 /* Position 0 is unused because 0 is used in the maps to mean
2320 unused. */
2321 current_position = 1;
2323 if (dump_file)
2325 bitmap_print (dump_file, clear_alias_sets,
2326 "clear alias sets ", "\n");
2327 bitmap_print (dump_file, disqualified_clear_alias_sets,
2328 "disqualified clear alias sets ", "\n");
2331 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2332 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2333 bitmap_clear (group->group_kill);
2335 /* Remove the disqualified positions from the store2_p set. */
2336 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
2338 /* We do not need to process the store2_n set because
2339 alias_sets are always positive. */
2340 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2342 bitmap_set_bit (group->group_kill, current_position);
2343 group->offset_map_p[j] = current_position++;
2344 group->process_globally = true;
2347 return current_position != 1;
2352 /*----------------------------------------------------------------------------
2353 Third step.
2355 Build the bit vectors for the transfer functions.
2356 ----------------------------------------------------------------------------*/
2359 /* Note that this is NOT a general purpose function. Any mem that has
2360 an alias set registered here expected to be COMPLETELY unaliased:
2361 i.e it's addresses are not and need not be examined.
2363 It is known that all references to this address will have this
2364 alias set and there are NO other references to this address in the
2365 function.
2367 Currently the only place that is known to be clean enough to use
2368 this interface is the code that assigns the spill locations.
2370 All of the mems that have alias_sets registered are subjected to a
2371 very powerful form of dse where function calls, volatile reads and
2372 writes, and reads from random location are not taken into account.
2374 It is also assumed that these locations go dead when the function
2375 returns. This assumption could be relaxed if there were found to
2376 be places that this assumption was not correct.
2378 The MODE is passed in and saved. The mode of each load or store to
2379 a mem with ALIAS_SET is checked against MEM. If the size of that
2380 load or store is different from MODE, processing is halted on this
2381 alias set. For the vast majority of aliases sets, all of the loads
2382 and stores will use the same mode. But vectors are treated
2383 differently: the alias set is established for the entire vector,
2384 but reload will insert loads and stores for individual elements and
2385 we do not necessarily have the information to track those separate
2386 elements. So when we see a mode mismatch, we just bail. */
2389 void
2390 dse_record_singleton_alias_set (alias_set_type alias_set,
2391 enum machine_mode mode)
2393 struct clear_alias_mode_holder tmp_holder;
2394 struct clear_alias_mode_holder *entry;
2395 void **slot;
2397 /* If we are not going to run dse, we need to return now or there
2398 will be problems with allocating the bitmaps. */
2399 if ((!gate_dse()) || !alias_set)
2400 return;
2402 if (!clear_alias_sets)
2404 clear_alias_sets = BITMAP_ALLOC (NULL);
2405 disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
2406 clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
2407 clear_alias_mode_eq, NULL);
2408 clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
2409 sizeof (struct clear_alias_mode_holder), 100);
2412 bitmap_set_bit (clear_alias_sets, alias_set);
2414 tmp_holder.alias_set = alias_set;
2416 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
2417 gcc_assert (*slot == NULL);
2419 *slot = entry = pool_alloc (clear_alias_mode_pool);
2420 entry->alias_set = alias_set;
2421 entry->mode = mode;
2425 /* Remove ALIAS_SET from the sets of stack slots being considered. */
2427 void
2428 dse_invalidate_singleton_alias_set (alias_set_type alias_set)
2430 if ((!gate_dse()) || !alias_set)
2431 return;
2433 bitmap_clear_bit (clear_alias_sets, alias_set);
2437 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2438 there, return 0. */
2440 static int
2441 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2443 if (offset < 0)
2445 HOST_WIDE_INT offset_p = -offset;
2446 if (offset_p >= group_info->offset_map_size_n)
2447 return 0;
2448 return group_info->offset_map_n[offset_p];
2450 else
2452 if (offset >= group_info->offset_map_size_p)
2453 return 0;
2454 return group_info->offset_map_p[offset];
2459 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2460 may be NULL. */
2462 static void
2463 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
2465 while (store_info)
2467 HOST_WIDE_INT i;
2468 group_info_t group_info
2469 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2470 if (group_info->process_globally)
2471 for (i = store_info->begin; i < store_info->end; i++)
2473 int index = get_bitmap_index (group_info, i);
2474 if (index != 0)
2476 bitmap_set_bit (gen, index);
2477 if (kill)
2478 bitmap_clear_bit (kill, index);
2481 store_info = store_info->next;
2486 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2487 may be NULL. */
2489 static void
2490 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
2492 while (store_info)
2494 if (store_info->alias_set)
2496 int index = get_bitmap_index (clear_alias_group,
2497 store_info->alias_set);
2498 if (index != 0)
2500 bitmap_set_bit (gen, index);
2501 if (kill)
2502 bitmap_clear_bit (kill, index);
2505 store_info = store_info->next;
2510 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2511 may be NULL. */
2513 static void
2514 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
2516 read_info_t read_info = insn_info->read_rec;
2517 int i;
2518 group_info_t group;
2520 /* If this insn reads the frame, kill all the frame related stores. */
2521 if (insn_info->frame_read)
2523 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2524 if (group->process_globally && group->frame_related)
2526 if (kill)
2527 bitmap_ior_into (kill, group->group_kill);
2528 bitmap_and_compl_into (gen, group->group_kill);
2532 while (read_info)
2534 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2536 if (group->process_globally)
2538 if (i == read_info->group_id)
2540 if (read_info->begin > read_info->end)
2542 /* Begin > end for block mode reads. */
2543 if (kill)
2544 bitmap_ior_into (kill, group->group_kill);
2545 bitmap_and_compl_into (gen, group->group_kill);
2547 else
2549 /* The groups are the same, just process the
2550 offsets. */
2551 HOST_WIDE_INT j;
2552 for (j = read_info->begin; j < read_info->end; j++)
2554 int index = get_bitmap_index (group, j);
2555 if (index != 0)
2557 if (kill)
2558 bitmap_set_bit (kill, index);
2559 bitmap_clear_bit (gen, index);
2564 else
2566 /* The groups are different, if the alias sets
2567 conflict, clear the entire group. We only need
2568 to apply this test if the read_info is a cselib
2569 read. Anything with a constant base cannot alias
2570 something else with a different constant
2571 base. */
2572 if ((read_info->group_id < 0)
2573 && canon_true_dependence (group->base_mem,
2574 QImode,
2575 group->canon_base_mem,
2576 read_info->mem, rtx_varies_p))
2578 if (kill)
2579 bitmap_ior_into (kill, group->group_kill);
2580 bitmap_and_compl_into (gen, group->group_kill);
2586 read_info = read_info->next;
2590 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2591 may be NULL. */
2593 static void
2594 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
2596 while (read_info)
2598 if (read_info->alias_set)
2600 int index = get_bitmap_index (clear_alias_group,
2601 read_info->alias_set);
2602 if (index != 0)
2604 if (kill)
2605 bitmap_set_bit (kill, index);
2606 bitmap_clear_bit (gen, index);
2610 read_info = read_info->next;
2615 /* Return the insn in BB_INFO before the first wild read or if there
2616 are no wild reads in the block, return the last insn. */
2618 static insn_info_t
2619 find_insn_before_first_wild_read (bb_info_t bb_info)
2621 insn_info_t insn_info = bb_info->last_insn;
2622 insn_info_t last_wild_read = NULL;
2624 while (insn_info)
2626 if (insn_info->wild_read)
2628 last_wild_read = insn_info->prev_insn;
2629 /* Block starts with wild read. */
2630 if (!last_wild_read)
2631 return NULL;
2634 insn_info = insn_info->prev_insn;
2637 if (last_wild_read)
2638 return last_wild_read;
2639 else
2640 return bb_info->last_insn;
2644 /* Scan the insns in BB_INFO starting at PTR and going to the top of
2645 the block in order to build the gen and kill sets for the block.
2646 We start at ptr which may be the last insn in the block or may be
2647 the first insn with a wild read. In the latter case we are able to
2648 skip the rest of the block because it just does not matter:
2649 anything that happens is hidden by the wild read. */
2651 static void
2652 dse_step3_scan (bool for_spills, basic_block bb)
2654 bb_info_t bb_info = bb_table[bb->index];
2655 insn_info_t insn_info;
2657 if (for_spills)
2658 /* There are no wild reads in the spill case. */
2659 insn_info = bb_info->last_insn;
2660 else
2661 insn_info = find_insn_before_first_wild_read (bb_info);
2663 /* In the spill case or in the no_spill case if there is no wild
2664 read in the block, we will need a kill set. */
2665 if (insn_info == bb_info->last_insn)
2667 if (bb_info->kill)
2668 bitmap_clear (bb_info->kill);
2669 else
2670 bb_info->kill = BITMAP_ALLOC (NULL);
2672 else
2673 if (bb_info->kill)
2674 BITMAP_FREE (bb_info->kill);
2676 while (insn_info)
2678 /* There may have been code deleted by the dce pass run before
2679 this phase. */
2680 if (insn_info->insn && INSN_P (insn_info->insn))
2682 /* Process the read(s) last. */
2683 if (for_spills)
2685 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2686 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
2688 else
2690 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2691 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
2695 insn_info = insn_info->prev_insn;
2700 /* Set the gen set of the exit block, and also any block with no
2701 successors that does not have a wild read. */
2703 static void
2704 dse_step3_exit_block_scan (bb_info_t bb_info)
2706 /* The gen set is all 0's for the exit block except for the
2707 frame_pointer_group. */
2709 if (stores_off_frame_dead_at_return)
2711 unsigned int i;
2712 group_info_t group;
2714 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2716 if (group->process_globally && group->frame_related)
2717 bitmap_ior_into (bb_info->gen, group->group_kill);
2723 /* Find all of the blocks that are not backwards reachable from the
2724 exit block or any block with no successors (BB). These are the
2725 infinite loops or infinite self loops. These blocks will still
2726 have their bits set in UNREACHABLE_BLOCKS. */
2728 static void
2729 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
2731 edge e;
2732 edge_iterator ei;
2734 if (TEST_BIT (unreachable_blocks, bb->index))
2736 RESET_BIT (unreachable_blocks, bb->index);
2737 FOR_EACH_EDGE (e, ei, bb->preds)
2739 mark_reachable_blocks (unreachable_blocks, e->src);
2744 /* Build the transfer functions for the function. */
2746 static void
2747 dse_step3 (bool for_spills)
2749 basic_block bb;
2750 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
2751 sbitmap_iterator sbi;
2752 bitmap all_ones = NULL;
2753 unsigned int i;
2755 sbitmap_ones (unreachable_blocks);
2757 FOR_ALL_BB (bb)
2759 bb_info_t bb_info = bb_table[bb->index];
2760 if (bb_info->gen)
2761 bitmap_clear (bb_info->gen);
2762 else
2763 bb_info->gen = BITMAP_ALLOC (NULL);
2765 if (bb->index == ENTRY_BLOCK)
2767 else if (bb->index == EXIT_BLOCK)
2768 dse_step3_exit_block_scan (bb_info);
2769 else
2770 dse_step3_scan (for_spills, bb);
2771 if (EDGE_COUNT (bb->succs) == 0)
2772 mark_reachable_blocks (unreachable_blocks, bb);
2774 /* If this is the second time dataflow is run, delete the old
2775 sets. */
2776 if (bb_info->in)
2777 BITMAP_FREE (bb_info->in);
2778 if (bb_info->out)
2779 BITMAP_FREE (bb_info->out);
2782 /* For any block in an infinite loop, we must initialize the out set
2783 to all ones. This could be expensive, but almost never occurs in
2784 practice. However, it is common in regression tests. */
2785 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
2787 if (bitmap_bit_p (all_blocks, i))
2789 bb_info_t bb_info = bb_table[i];
2790 if (!all_ones)
2792 unsigned int j;
2793 group_info_t group;
2795 all_ones = BITMAP_ALLOC (NULL);
2796 for (j = 0; VEC_iterate (group_info_t, rtx_group_vec, j, group); j++)
2797 bitmap_ior_into (all_ones, group->group_kill);
2799 if (!bb_info->out)
2801 bb_info->out = BITMAP_ALLOC (NULL);
2802 bitmap_copy (bb_info->out, all_ones);
2807 if (all_ones)
2808 BITMAP_FREE (all_ones);
2809 sbitmap_free (unreachable_blocks);
2814 /*----------------------------------------------------------------------------
2815 Fourth step.
2817 Solve the bitvector equations.
2818 ----------------------------------------------------------------------------*/
2821 /* Confluence function for blocks with no successors. Create an out
2822 set from the gen set of the exit block. This block logically has
2823 the exit block as a successor. */
2827 static void
2828 dse_confluence_0 (basic_block bb)
2830 bb_info_t bb_info = bb_table[bb->index];
2832 if (bb->index == EXIT_BLOCK)
2833 return;
2835 if (!bb_info->out)
2837 bb_info->out = BITMAP_ALLOC (NULL);
2838 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
2842 /* Propagate the information from the in set of the dest of E to the
2843 out set of the src of E. If the various in or out sets are not
2844 there, that means they are all ones. */
2846 static void
2847 dse_confluence_n (edge e)
2849 bb_info_t src_info = bb_table[e->src->index];
2850 bb_info_t dest_info = bb_table[e->dest->index];
2852 if (dest_info->in)
2854 if (src_info->out)
2855 bitmap_and_into (src_info->out, dest_info->in);
2856 else
2858 src_info->out = BITMAP_ALLOC (NULL);
2859 bitmap_copy (src_info->out, dest_info->in);
2865 /* Propagate the info from the out to the in set of BB_INDEX's basic
2866 block. There are three cases:
2868 1) The block has no kill set. In this case the kill set is all
2869 ones. It does not matter what the out set of the block is, none of
2870 the info can reach the top. The only thing that reaches the top is
2871 the gen set and we just copy the set.
2873 2) There is a kill set but no out set and bb has successors. In
2874 this case we just return. Eventually an out set will be created and
2875 it is better to wait than to create a set of ones.
2877 3) There is both a kill and out set. We apply the obvious transfer
2878 function.
2881 static bool
2882 dse_transfer_function (int bb_index)
2884 bb_info_t bb_info = bb_table[bb_index];
2886 if (bb_info->kill)
2888 if (bb_info->out)
2890 /* Case 3 above. */
2891 if (bb_info->in)
2892 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2893 bb_info->out, bb_info->kill);
2894 else
2896 bb_info->in = BITMAP_ALLOC (NULL);
2897 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2898 bb_info->out, bb_info->kill);
2899 return true;
2902 else
2903 /* Case 2 above. */
2904 return false;
2906 else
2908 /* Case 1 above. If there is already an in set, nothing
2909 happens. */
2910 if (bb_info->in)
2911 return false;
2912 else
2914 bb_info->in = BITMAP_ALLOC (NULL);
2915 bitmap_copy (bb_info->in, bb_info->gen);
2916 return true;
2921 /* Solve the dataflow equations. */
2923 static void
2924 dse_step4 (void)
2926 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
2927 dse_confluence_n, dse_transfer_function,
2928 all_blocks, df_get_postorder (DF_BACKWARD),
2929 df_get_n_blocks (DF_BACKWARD));
2930 if (dump_file)
2932 basic_block bb;
2934 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
2935 FOR_ALL_BB (bb)
2937 bb_info_t bb_info = bb_table[bb->index];
2939 df_print_bb_index (bb, dump_file);
2940 if (bb_info->in)
2941 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
2942 else
2943 fprintf (dump_file, " in: *MISSING*\n");
2944 if (bb_info->gen)
2945 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
2946 else
2947 fprintf (dump_file, " gen: *MISSING*\n");
2948 if (bb_info->kill)
2949 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
2950 else
2951 fprintf (dump_file, " kill: *MISSING*\n");
2952 if (bb_info->out)
2953 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
2954 else
2955 fprintf (dump_file, " out: *MISSING*\n\n");
2962 /*----------------------------------------------------------------------------
2963 Fifth step.
2965 Delete the stores that can only be deleted using the global information.
2966 ----------------------------------------------------------------------------*/
2969 static void
2970 dse_step5_nospill (void)
2972 basic_block bb;
2973 FOR_EACH_BB (bb)
2975 bb_info_t bb_info = bb_table[bb->index];
2976 insn_info_t insn_info = bb_info->last_insn;
2977 bitmap v = bb_info->out;
2979 while (insn_info)
2981 bool deleted = false;
2982 if (dump_file && insn_info->insn)
2984 fprintf (dump_file, "starting to process insn %d\n",
2985 INSN_UID (insn_info->insn));
2986 bitmap_print (dump_file, v, " v: ", "\n");
2989 /* There may have been code deleted by the dce pass run before
2990 this phase. */
2991 if (insn_info->insn
2992 && INSN_P (insn_info->insn)
2993 && (!insn_info->cannot_delete)
2994 && (!bitmap_empty_p (v)))
2996 store_info_t store_info = insn_info->store_rec;
2998 /* Try to delete the current insn. */
2999 deleted = true;
3001 /* Skip the clobbers. */
3002 while (!store_info->is_set)
3003 store_info = store_info->next;
3005 if (store_info->alias_set)
3006 deleted = false;
3007 else
3009 HOST_WIDE_INT i;
3010 group_info_t group_info
3011 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3013 for (i = store_info->begin; i < store_info->end; i++)
3015 int index = get_bitmap_index (group_info, i);
3017 if (dump_file)
3018 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3019 if (index == 0 || !bitmap_bit_p (v, index))
3021 if (dump_file)
3022 fprintf (dump_file, "failing at i = %d\n", (int)i);
3023 deleted = false;
3024 break;
3028 if (deleted)
3030 if (dbg_cnt (dse))
3032 check_for_inc_dec (insn_info->insn);
3033 delete_insn (insn_info->insn);
3034 insn_info->insn = NULL;
3035 globally_deleted++;
3039 /* We do want to process the local info if the insn was
3040 deleted. For instance, if the insn did a wild read, we
3041 no longer need to trash the info. */
3042 if (insn_info->insn
3043 && INSN_P (insn_info->insn)
3044 && (!deleted))
3046 scan_stores_nospill (insn_info->store_rec, v, NULL);
3047 if (insn_info->wild_read)
3049 if (dump_file)
3050 fprintf (dump_file, "wild read\n");
3051 bitmap_clear (v);
3053 else if (insn_info->read_rec)
3055 if (dump_file)
3056 fprintf (dump_file, "regular read\n");
3057 scan_reads_nospill (insn_info, v, NULL);
3061 insn_info = insn_info->prev_insn;
3067 static void
3068 dse_step5_spill (void)
3070 basic_block bb;
3071 FOR_EACH_BB (bb)
3073 bb_info_t bb_info = bb_table[bb->index];
3074 insn_info_t insn_info = bb_info->last_insn;
3075 bitmap v = bb_info->out;
3077 while (insn_info)
3079 bool deleted = false;
3080 /* There may have been code deleted by the dce pass run before
3081 this phase. */
3082 if (insn_info->insn
3083 && INSN_P (insn_info->insn)
3084 && (!insn_info->cannot_delete)
3085 && (!bitmap_empty_p (v)))
3087 /* Try to delete the current insn. */
3088 store_info_t store_info = insn_info->store_rec;
3089 deleted = true;
3091 while (store_info)
3093 if (store_info->alias_set)
3095 int index = get_bitmap_index (clear_alias_group,
3096 store_info->alias_set);
3097 if (index == 0 || !bitmap_bit_p (v, index))
3099 deleted = false;
3100 break;
3103 else
3104 deleted = false;
3105 store_info = store_info->next;
3107 if (deleted && dbg_cnt (dse))
3109 if (dump_file)
3110 fprintf (dump_file, "Spill deleting insn %d\n",
3111 INSN_UID (insn_info->insn));
3112 check_for_inc_dec (insn_info->insn);
3113 delete_insn (insn_info->insn);
3114 spill_deleted++;
3115 insn_info->insn = NULL;
3119 if (insn_info->insn
3120 && INSN_P (insn_info->insn)
3121 && (!deleted))
3123 scan_stores_spill (insn_info->store_rec, v, NULL);
3124 scan_reads_spill (insn_info->read_rec, v, NULL);
3127 insn_info = insn_info->prev_insn;
3134 /*----------------------------------------------------------------------------
3135 Sixth step.
3137 Destroy everything left standing.
3138 ----------------------------------------------------------------------------*/
3140 static void
3141 dse_step6 (bool global_done)
3143 unsigned int i;
3144 group_info_t group;
3145 basic_block bb;
3147 if (global_done)
3149 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3151 free (group->offset_map_n);
3152 free (group->offset_map_p);
3153 BITMAP_FREE (group->store1_n);
3154 BITMAP_FREE (group->store1_p);
3155 BITMAP_FREE (group->store2_n);
3156 BITMAP_FREE (group->store2_p);
3157 BITMAP_FREE (group->group_kill);
3160 FOR_ALL_BB (bb)
3162 bb_info_t bb_info = bb_table[bb->index];
3163 BITMAP_FREE (bb_info->gen);
3164 if (bb_info->kill)
3165 BITMAP_FREE (bb_info->kill);
3166 if (bb_info->in)
3167 BITMAP_FREE (bb_info->in);
3168 if (bb_info->out)
3169 BITMAP_FREE (bb_info->out);
3172 else
3174 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3176 BITMAP_FREE (group->store1_n);
3177 BITMAP_FREE (group->store1_p);
3178 BITMAP_FREE (group->store2_n);
3179 BITMAP_FREE (group->store2_p);
3180 BITMAP_FREE (group->group_kill);
3184 if (clear_alias_sets)
3186 BITMAP_FREE (clear_alias_sets);
3187 BITMAP_FREE (disqualified_clear_alias_sets);
3188 free_alloc_pool (clear_alias_mode_pool);
3189 htab_delete (clear_alias_mode_table);
3192 end_alias_analysis ();
3193 free (bb_table);
3194 htab_delete (rtx_group_table);
3195 VEC_free (group_info_t, heap, rtx_group_vec);
3196 BITMAP_FREE (all_blocks);
3197 BITMAP_FREE (scratch);
3199 free_alloc_pool (rtx_store_info_pool);
3200 free_alloc_pool (read_info_pool);
3201 free_alloc_pool (insn_info_pool);
3202 free_alloc_pool (bb_info_pool);
3203 free_alloc_pool (rtx_group_info_pool);
3204 free_alloc_pool (deferred_change_pool);
3209 /* -------------------------------------------------------------------------
3211 ------------------------------------------------------------------------- */
3213 /* Callback for running pass_rtl_dse. */
3215 static unsigned int
3216 rest_of_handle_dse (void)
3218 bool did_global = false;
3220 df_set_flags (DF_DEFER_INSN_RESCAN);
3222 dse_step0 ();
3223 dse_step1 ();
3224 dse_step2_init ();
3225 if (dse_step2_nospill ())
3227 df_set_flags (DF_LR_RUN_DCE);
3228 df_analyze ();
3229 did_global = true;
3230 if (dump_file)
3231 fprintf (dump_file, "doing global processing\n");
3232 dse_step3 (false);
3233 dse_step4 ();
3234 dse_step5_nospill ();
3237 /* For the instance of dse that runs after reload, we make a special
3238 pass to process the spills. These are special in that they are
3239 totally transparent, i.e, there is no aliasing issues that need
3240 to be considered. This means that the wild reads that kill
3241 everything else do not apply here. */
3242 if (clear_alias_sets && dse_step2_spill ())
3244 if (!did_global)
3246 df_set_flags (DF_LR_RUN_DCE);
3247 df_analyze ();
3249 did_global = true;
3250 if (dump_file)
3251 fprintf (dump_file, "doing global spill processing\n");
3252 dse_step3 (true);
3253 dse_step4 ();
3254 dse_step5_spill ();
3257 dse_step6 (did_global);
3259 if (dump_file)
3260 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3261 locally_deleted, globally_deleted, spill_deleted);
3262 return 0;
3265 static bool
3266 gate_dse (void)
3268 return gate_dse1 () || gate_dse2 ();
3271 static bool
3272 gate_dse1 (void)
3274 return optimize > 0 && flag_dse
3275 && dbg_cnt (dse1);
3278 static bool
3279 gate_dse2 (void)
3281 return optimize > 0 && flag_dse
3282 && dbg_cnt (dse2);
3285 struct rtl_opt_pass pass_rtl_dse1 =
3288 RTL_PASS,
3289 "dse1", /* name */
3290 gate_dse1, /* gate */
3291 rest_of_handle_dse, /* execute */
3292 NULL, /* sub */
3293 NULL, /* next */
3294 0, /* static_pass_number */
3295 TV_DSE1, /* tv_id */
3296 0, /* properties_required */
3297 0, /* properties_provided */
3298 0, /* properties_destroyed */
3299 0, /* todo_flags_start */
3300 TODO_dump_func |
3301 TODO_df_finish | TODO_verify_rtl_sharing |
3302 TODO_ggc_collect /* todo_flags_finish */
3306 struct rtl_opt_pass pass_rtl_dse2 =
3309 RTL_PASS,
3310 "dse2", /* name */
3311 gate_dse2, /* gate */
3312 rest_of_handle_dse, /* execute */
3313 NULL, /* sub */
3314 NULL, /* next */
3315 0, /* static_pass_number */
3316 TV_DSE2, /* tv_id */
3317 0, /* properties_required */
3318 0, /* properties_provided */
3319 0, /* properties_destroyed */
3320 0, /* todo_flags_start */
3321 TODO_dump_func |
3322 TODO_df_finish | TODO_verify_rtl_sharing |
3323 TODO_ggc_collect /* todo_flags_finish */