Daily bump.
[official-gcc.git] / gcc / dse.c
blobd5db62834440b5f9ca2a3e12fe585948c3ea0aa7
1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hashtab.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "flags.h"
36 #include "df.h"
37 #include "cselib.h"
38 #include "timevar.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "alias.h"
42 #include "insn-config.h"
43 #include "expr.h"
44 #include "recog.h"
45 #include "dse.h"
46 #include "optabs.h"
47 #include "dbgcnt.h"
49 /* This file contains three techniques for performing Dead Store
50 Elimination (dse).
52 * The first technique performs dse locally on any base address. It
53 is based on the cselib which is a local value numbering technique.
54 This technique is local to a basic block but deals with a fairly
55 general addresses.
57 * The second technique performs dse globally but is restricted to
58 base addresses that are either constant or are relative to the
59 frame_pointer.
61 * The third technique, (which is only done after register allocation)
62 processes the spill spill slots. This differs from the second
63 technique because it takes advantage of the fact that spilling is
64 completely free from the effects of aliasing.
66 Logically, dse is a backwards dataflow problem. A store can be
67 deleted if it if cannot be reached in the backward direction by any
68 use of the value being stored. However, the local technique uses a
69 forwards scan of the basic block because cselib requires that the
70 block be processed in that order.
72 The pass is logically broken into 7 steps:
74 0) Initialization.
76 1) The local algorithm, as well as scanning the insns for the two
77 global algorithms.
79 2) Analysis to see if the global algs are necessary. In the case
80 of stores base on a constant address, there must be at least two
81 stores to that address, to make it possible to delete some of the
82 stores. In the case of stores off of the frame or spill related
83 stores, only one store to an address is necessary because those
84 stores die at the end of the function.
86 3) Set up the global dataflow equations based on processing the
87 info parsed in the first step.
89 4) Solve the dataflow equations.
91 5) Delete the insns that the global analysis has indicated are
92 unnecessary.
94 6) Cleanup.
96 This step uses cselib and canon_rtx to build the largest expression
97 possible for each address. This pass is a forwards pass through
98 each basic block. From the point of view of the global technique,
99 the first pass could examine a block in either direction. The
100 forwards ordering is to accommodate cselib.
102 We a simplifying assumption: addresses fall into four broad
103 categories:
105 1) base has rtx_varies_p == false, offset is constant.
106 2) base has rtx_varies_p == false, offset variable.
107 3) base has rtx_varies_p == true, offset constant.
108 4) base has rtx_varies_p == true, offset variable.
110 The local passes are able to process all 4 kinds of addresses. The
111 global pass only handles (1).
113 The global problem is formulated as follows:
115 A store, S1, to address A, where A is not relative to the stack
116 frame, can be eliminated if all paths from S1 to the end of the
117 of the function contain another store to A before a read to A.
119 If the address A is relative to the stack frame, a store S2 to A
120 can be eliminated if there are no paths from S1 that reach the
121 end of the function that read A before another store to A. In
122 this case S2 can be deleted if there are paths to from S2 to the
123 end of the function that have no reads or writes to A. This
124 second case allows stores to the stack frame to be deleted that
125 would otherwise die when the function returns. This cannot be
126 done if stores_off_frame_dead_at_return is not true. See the doc
127 for that variable for when this variable is false.
129 The global problem is formulated as a backwards set union
130 dataflow problem where the stores are the gens and reads are the
131 kills. Set union problems are rare and require some special
132 handling given our representation of bitmaps. A straightforward
133 implementation of requires a lot of bitmaps filled with 1s.
134 These are expensive and cumbersome in our bitmap formulation so
135 care has been taken to avoid large vectors filled with 1s. See
136 the comments in bb_info and in the dataflow confluence functions
137 for details.
139 There are two places for further enhancements to this algorithm:
141 1) The original dse which was embedded in a pass called flow also
142 did local address forwarding. For example in
144 A <- r100
145 ... <- A
147 flow would replace the right hand side of the second insn with a
148 reference to r100. Most of the information is available to add this
149 to this pass. It has not done it because it is a lot of work in
150 the case that either r100 is assigned to between the first and
151 second insn and/or the second insn is a load of part of the value
152 stored by the first insn.
154 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
155 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
156 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
157 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
159 2) The cleaning up of spill code is quite profitable. It currently
160 depends on reading tea leaves and chicken entrails left by reload.
161 This pass depends on reload creating a singleton alias set for each
162 spill slot and telling the next dse pass which of these alias sets
163 are the singletons. Rather than analyze the addresses of the
164 spills, dse's spill processing just does analysis of the loads and
165 stores that use those alias sets. There are three cases where this
166 falls short:
168 a) Reload sometimes creates the slot for one mode of access, and
169 then inserts loads and/or stores for a smaller mode. In this
170 case, the current code just punts on the slot. The proper thing
171 to do is to back out and use one bit vector position for each
172 byte of the entity associated with the slot. This depends on
173 KNOWING that reload always generates the accesses for each of the
174 bytes in some canonical (read that easy to understand several
175 passes after reload happens) way.
177 b) Reload sometimes decides that spill slot it allocated was not
178 large enough for the mode and goes back and allocates more slots
179 with the same mode and alias set. The backout in this case is a
180 little more graceful than (a). In this case the slot is unmarked
181 as being a spill slot and if final address comes out to be based
182 off the frame pointer, the global algorithm handles this slot.
184 c) For any pass that may prespill, there is currently no
185 mechanism to tell the dse pass that the slot being used has the
186 special properties that reload uses. It may be that all that is
187 required is to have those passes make the same calls that reload
188 does, assuming that the alias sets can be manipulated in the same
189 way. */
191 /* There are limits to the size of constant offsets we model for the
192 global problem. There are certainly test cases, that exceed this
193 limit, however, it is unlikely that there are important programs
194 that really have constant offsets this size. */
195 #define MAX_OFFSET (64 * 1024)
198 static bitmap scratch = NULL;
199 struct insn_info;
201 /* This structure holds information about a candidate store. */
202 struct store_info
205 /* False means this is a clobber. */
206 bool is_set;
208 /* The id of the mem group of the base address. If rtx_varies_p is
209 true, this is -1. Otherwise, it is the index into the group
210 table. */
211 int group_id;
213 /* This is the cselib value. */
214 cselib_val *cse_base;
216 /* This canonized mem. */
217 rtx mem;
219 /* The result of get_addr on mem. */
220 rtx mem_addr;
222 /* If this is non-zero, it is the alias set of a spill location. */
223 alias_set_type alias_set;
225 /* The offset of the first and byte before the last byte associated
226 with the operation. */
227 int begin, end;
229 /* An bitmask as wide as the number of bytes in the word that
230 contains a 1 if the byte may be needed. The store is unused if
231 all of the bits are 0. */
232 long positions_needed;
234 /* The next store info for this insn. */
235 struct store_info *next;
237 /* The right hand side of the store. This is used if there is a
238 subsequent reload of the mems address somewhere later in the
239 basic block. */
240 rtx rhs;
243 typedef struct store_info *store_info_t;
244 static alloc_pool cse_store_info_pool;
245 static alloc_pool rtx_store_info_pool;
247 /* This structure holds information about a load. These are only
248 built for rtx bases. */
249 struct read_info
251 /* The id of the mem group of the base address. */
252 int group_id;
254 /* If this is non-zero, it is the alias set of a spill location. */
255 alias_set_type alias_set;
257 /* The offset of the first and byte after the last byte associated
258 with the operation. If begin == end == 0, the read did not have
259 a constant offset. */
260 int begin, end;
262 /* The mem being read. */
263 rtx mem;
265 /* The next read_info for this insn. */
266 struct read_info *next;
268 typedef struct read_info *read_info_t;
269 static alloc_pool read_info_pool;
272 /* One of these records is created for each insn. */
274 struct insn_info
276 /* Set true if the insn contains a store but the insn itself cannot
277 be deleted. This is set if the insn is a parallel and there is
278 more than one non dead output or if the insn is in some way
279 volatile. */
280 bool cannot_delete;
282 /* This field is only used by the global algorithm. It is set true
283 if the insn contains any read of mem except for a (1). This is
284 also set if the insn is a call or has a clobber mem. If the insn
285 contains a wild read, the use_rec will be null. */
286 bool wild_read;
288 /* This field is only used for the processing of const functions.
289 These functions cannot read memory, but they can read the stack
290 because that is where they may get their parms. We need to be
291 this conservative because, like the store motion pass, we don't
292 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
293 Moreover, we need to distinguish two cases:
294 1. Before reload (register elimination), the stores related to
295 outgoing arguments are stack pointer based and thus deemed
296 of non-constant base in this pass. This requires special
297 handling but also means that the frame pointer based stores
298 need not be killed upon encountering a const function call.
299 2. After reload, the stores related to outgoing arguments can be
300 either stack pointer or hard frame pointer based. This means
301 that we have no other choice than also killing all the frame
302 pointer based stores upon encountering a const function call.
303 This field is set after reload for const function calls. Having
304 this set is less severe than a wild read, it just means that all
305 the frame related stores are killed rather than all the stores. */
306 bool frame_read;
308 /* This field is only used for the processing of const functions.
309 It is set if the insn may contain a stack pointer based store. */
310 bool stack_pointer_based;
312 /* This is true if any of the sets within the store contains a
313 cselib base. Such stores can only be deleted by the local
314 algorithm. */
315 bool contains_cselib_groups;
317 /* The insn. */
318 rtx insn;
320 /* The list of mem sets or mem clobbers that are contained in this
321 insn. If the insn is deletable, it contains only one mem set.
322 But it could also contain clobbers. Insns that contain more than
323 one mem set are not deletable, but each of those mems are here in
324 order to provide info to delete other insns. */
325 store_info_t store_rec;
327 /* The linked list of mem uses in this insn. Only the reads from
328 rtx bases are listed here. The reads to cselib bases are
329 completely processed during the first scan and so are never
330 created. */
331 read_info_t read_rec;
333 /* The prev insn in the basic block. */
334 struct insn_info * prev_insn;
336 /* The linked list of insns that are in consideration for removal in
337 the forwards pass thru the basic block. This pointer may be
338 trash as it is not cleared when a wild read occurs. The only
339 time it is guaranteed to be correct is when the traveral starts
340 at active_local_stores. */
341 struct insn_info * next_local_store;
344 typedef struct insn_info *insn_info_t;
345 static alloc_pool insn_info_pool;
347 /* The linked list of stores that are under consideration in this
348 basic block. */
349 static insn_info_t active_local_stores;
351 struct bb_info
354 /* Pointer to the insn info for the last insn in the block. These
355 are linked so this is how all of the insns are reached. During
356 scanning this is the current insn being scanned. */
357 insn_info_t last_insn;
359 /* The info for the global dataflow problem. */
362 /* This is set if the transfer function should and in the wild_read
363 bitmap before applying the kill and gen sets. That vector knocks
364 out most of the bits in the bitmap and thus speeds up the
365 operations. */
366 bool apply_wild_read;
368 /* The set of store positions that exist in this block before a wild read. */
369 bitmap gen;
371 /* The set of load positions that exist in this block above the
372 same position of a store. */
373 bitmap kill;
375 /* The set of stores that reach the top of the block without being
376 killed by a read.
378 Do not represent the in if it is all ones. Note that this is
379 what the bitvector should logically be initialized to for a set
380 intersection problem. However, like the kill set, this is too
381 expensive. So initially, the in set will only be created for the
382 exit block and any block that contains a wild read. */
383 bitmap in;
385 /* The set of stores that reach the bottom of the block from it's
386 successors.
388 Do not represent the in if it is all ones. Note that this is
389 what the bitvector should logically be initialized to for a set
390 intersection problem. However, like the kill and in set, this is
391 too expensive. So what is done is that the confluence operator
392 just initializes the vector from one of the out sets of the
393 successors of the block. */
394 bitmap out;
397 typedef struct bb_info *bb_info_t;
398 static alloc_pool bb_info_pool;
400 /* Table to hold all bb_infos. */
401 static bb_info_t *bb_table;
403 /* There is a group_info for each rtx base that is used to reference
404 memory. There are also not many of the rtx bases because they are
405 very limited in scope. */
407 struct group_info
409 /* The actual base of the address. */
410 rtx rtx_base;
412 /* The sequential id of the base. This allows us to have a
413 canonical ordering of these that is not based on addresses. */
414 int id;
416 /* A mem wrapped around the base pointer for the group in order to
417 do read dependency. */
418 rtx base_mem;
420 /* Canonized version of base_mem, most likely the same thing. */
421 rtx canon_base_mem;
423 /* These two sets of two bitmaps are used to keep track of how many
424 stores are actually referencing that position from this base. We
425 only do this for rtx bases as this will be used to assign
426 positions in the bitmaps for the global problem. Bit N is set in
427 store1 on the first store for offset N. Bit N is set in store2
428 for the second store to offset N. This is all we need since we
429 only care about offsets that have two or more stores for them.
431 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
432 for 0 and greater offsets.
434 There is one special case here, for stores into the stack frame,
435 we will or store1 into store2 before deciding which stores look
436 at globally. This is because stores to the stack frame that have
437 no other reads before the end of the function can also be
438 deleted. */
439 bitmap store1_n, store1_p, store2_n, store2_p;
441 /* The positions in this bitmap have the same assignments as the in,
442 out, gen and kill bitmaps. This bitmap is all zeros except for
443 the positions that are occupied by stores for this group. */
444 bitmap group_kill;
446 /* True if there are any positions that are to be processed
447 globally. */
448 bool process_globally;
450 /* True if the base of this group is either the frame_pointer or
451 hard_frame_pointer. */
452 bool frame_related;
454 /* The offset_map is used to map the offsets from this base into
455 positions in the global bitmaps. It is only created after all of
456 the all of stores have been scanned and we know which ones we
457 care about. */
458 int *offset_map_n, *offset_map_p;
459 int offset_map_size_n, offset_map_size_p;
461 typedef struct group_info *group_info_t;
462 typedef const struct group_info *const_group_info_t;
463 static alloc_pool rtx_group_info_pool;
465 /* Tables of group_info structures, hashed by base value. */
466 static htab_t rtx_group_table;
468 /* Index into the rtx_group_vec. */
469 static int rtx_group_next_id;
471 DEF_VEC_P(group_info_t);
472 DEF_VEC_ALLOC_P(group_info_t,heap);
474 static VEC(group_info_t,heap) *rtx_group_vec;
477 /* This structure holds the set of changes that are being deferred
478 when removing read operation. See replace_read. */
479 struct deferred_change
482 /* The mem that is being replaced. */
483 rtx *loc;
485 /* The reg it is being replaced with. */
486 rtx reg;
488 struct deferred_change *next;
491 typedef struct deferred_change *deferred_change_t;
492 static alloc_pool deferred_change_pool;
494 static deferred_change_t deferred_change_list = NULL;
496 /* This are used to hold the alias sets of spill variables. Since
497 these are never aliased and there may be a lot of them, it makes
498 sense to treat them specially. This bitvector is only allocated in
499 calls from dse_record_singleton_alias_set which currently is only
500 made during reload1. So when dse is called before reload this
501 mechanism does nothing. */
503 static bitmap clear_alias_sets = NULL;
505 /* The set of clear_alias_sets that have been disqualified because
506 there are loads or stores using a different mode than the alias set
507 was registered with. */
508 static bitmap disqualified_clear_alias_sets = NULL;
510 /* The group that holds all of the clear_alias_sets. */
511 static group_info_t clear_alias_group;
513 /* The modes of the clear_alias_sets. */
514 static htab_t clear_alias_mode_table;
516 /* Hash table element to look up the mode for an alias set. */
517 struct clear_alias_mode_holder
519 alias_set_type alias_set;
520 enum machine_mode mode;
523 static alloc_pool clear_alias_mode_pool;
525 /* This is true except if current_function_stdarg -- i.e. we cannot do
526 this for vararg functions because they play games with the frame. */
527 static bool stores_off_frame_dead_at_return;
529 /* Counter for stats. */
530 static int globally_deleted;
531 static int locally_deleted;
532 static int spill_deleted;
534 static bitmap all_blocks;
536 /* The number of bits used in the global bitmaps. */
537 static unsigned int current_position;
540 static bool gate_dse (void);
541 static bool gate_dse1 (void);
542 static bool gate_dse2 (void);
545 /*----------------------------------------------------------------------------
546 Zeroth step.
548 Initialization.
549 ----------------------------------------------------------------------------*/
551 /* Hashtable callbacks for maintaining the "bases" field of
552 store_group_info, given that the addresses are function invariants. */
554 static int
555 clear_alias_mode_eq (const void *p1, const void *p2)
557 const struct clear_alias_mode_holder * h1
558 = (const struct clear_alias_mode_holder *) p1;
559 const struct clear_alias_mode_holder * h2
560 = (const struct clear_alias_mode_holder *) p2;
561 return h1->alias_set == h2->alias_set;
565 static hashval_t
566 clear_alias_mode_hash (const void *p)
568 const struct clear_alias_mode_holder *holder
569 = (const struct clear_alias_mode_holder *) p;
570 return holder->alias_set;
574 /* Find the entry associated with ALIAS_SET. */
576 static struct clear_alias_mode_holder *
577 clear_alias_set_lookup (alias_set_type alias_set)
579 struct clear_alias_mode_holder tmp_holder;
580 void **slot;
582 tmp_holder.alias_set = alias_set;
583 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
584 gcc_assert (*slot);
586 return *slot;
590 /* Hashtable callbacks for maintaining the "bases" field of
591 store_group_info, given that the addresses are function invariants. */
593 static int
594 invariant_group_base_eq (const void *p1, const void *p2)
596 const_group_info_t gi1 = (const_group_info_t) p1;
597 const_group_info_t gi2 = (const_group_info_t) p2;
598 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
602 static hashval_t
603 invariant_group_base_hash (const void *p)
605 const_group_info_t gi = (const_group_info_t) p;
606 int do_not_record;
607 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
611 /* Get the GROUP for BASE. Add a new group if it is not there. */
613 static group_info_t
614 get_group_info (rtx base)
616 struct group_info tmp_gi;
617 group_info_t gi;
618 void **slot;
620 if (base)
622 /* Find the store_base_info structure for BASE, creating a new one
623 if necessary. */
624 tmp_gi.rtx_base = base;
625 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
626 gi = (group_info_t) *slot;
628 else
630 if (!clear_alias_group)
632 clear_alias_group = gi = pool_alloc (rtx_group_info_pool);
633 memset (gi, 0, sizeof (struct group_info));
634 gi->id = rtx_group_next_id++;
635 gi->store1_n = BITMAP_ALLOC (NULL);
636 gi->store1_p = BITMAP_ALLOC (NULL);
637 gi->store2_n = BITMAP_ALLOC (NULL);
638 gi->store2_p = BITMAP_ALLOC (NULL);
639 gi->group_kill = BITMAP_ALLOC (NULL);
640 gi->process_globally = false;
641 gi->offset_map_size_n = 0;
642 gi->offset_map_size_p = 0;
643 gi->offset_map_n = NULL;
644 gi->offset_map_p = NULL;
645 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
647 return clear_alias_group;
650 if (gi == NULL)
652 *slot = gi = pool_alloc (rtx_group_info_pool);
653 gi->rtx_base = base;
654 gi->id = rtx_group_next_id++;
655 gi->base_mem = gen_rtx_MEM (QImode, base);
656 gi->canon_base_mem = canon_rtx (gi->base_mem);
657 gi->store1_n = BITMAP_ALLOC (NULL);
658 gi->store1_p = BITMAP_ALLOC (NULL);
659 gi->store2_n = BITMAP_ALLOC (NULL);
660 gi->store2_p = BITMAP_ALLOC (NULL);
661 gi->group_kill = BITMAP_ALLOC (NULL);
662 gi->process_globally = false;
663 gi->frame_related =
664 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
665 gi->offset_map_size_n = 0;
666 gi->offset_map_size_p = 0;
667 gi->offset_map_n = NULL;
668 gi->offset_map_p = NULL;
669 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
672 return gi;
676 /* Initialization of data structures. */
678 static void
679 dse_step0 (void)
681 locally_deleted = 0;
682 globally_deleted = 0;
683 spill_deleted = 0;
685 scratch = BITMAP_ALLOC (NULL);
687 rtx_store_info_pool
688 = create_alloc_pool ("rtx_store_info_pool",
689 sizeof (struct store_info), 100);
690 read_info_pool
691 = create_alloc_pool ("read_info_pool",
692 sizeof (struct read_info), 100);
693 insn_info_pool
694 = create_alloc_pool ("insn_info_pool",
695 sizeof (struct insn_info), 100);
696 bb_info_pool
697 = create_alloc_pool ("bb_info_pool",
698 sizeof (struct bb_info), 100);
699 rtx_group_info_pool
700 = create_alloc_pool ("rtx_group_info_pool",
701 sizeof (struct group_info), 100);
702 deferred_change_pool
703 = create_alloc_pool ("deferred_change_pool",
704 sizeof (struct deferred_change), 10);
706 rtx_group_table = htab_create (11, invariant_group_base_hash,
707 invariant_group_base_eq, NULL);
709 bb_table = XCNEWVEC (bb_info_t, last_basic_block);
710 rtx_group_next_id = 0;
712 stores_off_frame_dead_at_return = !current_function_stdarg;
714 init_alias_analysis ();
716 if (clear_alias_sets)
717 clear_alias_group = get_group_info (NULL);
718 else
719 clear_alias_group = NULL;
724 /*----------------------------------------------------------------------------
725 First step.
727 Scan all of the insns. Any random ordering of the blocks is fine.
728 Each block is scanned in forward order to accommodate cselib which
729 is used to remove stores with non-constant bases.
730 ----------------------------------------------------------------------------*/
732 /* Delete all of the store_info recs from INSN_INFO. */
734 static void
735 free_store_info (insn_info_t insn_info)
737 store_info_t store_info = insn_info->store_rec;
738 while (store_info)
740 store_info_t next = store_info->next;
741 if (store_info->cse_base)
742 pool_free (cse_store_info_pool, store_info);
743 else
744 pool_free (rtx_store_info_pool, store_info);
745 store_info = next;
748 insn_info->cannot_delete = true;
749 insn_info->contains_cselib_groups = false;
750 insn_info->store_rec = NULL;
754 struct insn_size {
755 int size;
756 rtx insn;
760 /* Add an insn to do the add inside a x if it is a
761 PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
762 the size of the mode of the MEM that this is inside of. */
764 static int
765 replace_inc_dec (rtx *r, void *d)
767 rtx x = *r;
768 struct insn_size *data = (struct insn_size *)d;
769 switch (GET_CODE (x))
771 case PRE_INC:
772 case POST_INC:
774 rtx r1 = XEXP (x, 0);
775 rtx c = gen_int_mode (Pmode, data->size);
776 add_insn_before (data->insn,
777 gen_rtx_SET (Pmode, r1,
778 gen_rtx_PLUS (Pmode, r1, c)),
779 NULL);
780 return -1;
783 case PRE_DEC:
784 case POST_DEC:
786 rtx r1 = XEXP (x, 0);
787 rtx c = gen_int_mode (Pmode, -data->size);
788 add_insn_before (data->insn,
789 gen_rtx_SET (Pmode, r1,
790 gen_rtx_PLUS (Pmode, r1, c)),
791 NULL);
792 return -1;
795 case PRE_MODIFY:
796 case POST_MODIFY:
798 /* We can reuse the add because we are about to delete the
799 insn that contained it. */
800 rtx add = XEXP (x, 0);
801 rtx r1 = XEXP (add, 0);
802 add_insn_before (data->insn,
803 gen_rtx_SET (Pmode, r1, add), NULL);
804 return -1;
807 default:
808 return 0;
813 /* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
814 and generate an add to replace that. */
816 static int
817 replace_inc_dec_mem (rtx *r, void *d)
819 rtx x = *r;
820 if (GET_CODE (x) == MEM)
822 struct insn_size data;
824 data.size = GET_MODE_SIZE (GET_MODE (x));
825 data.insn = (rtx)d;
827 for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
829 return -1;
831 return 0;
834 /* Before we delete INSN, make sure that the auto inc/dec, if it is
835 there, is split into a separate insn. */
837 static void
838 check_for_inc_dec (rtx insn)
840 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
841 if (note)
842 for_each_rtx (&insn, replace_inc_dec_mem, insn);
846 /* Delete the insn and free all of the fields inside INSN_INFO. */
848 static void
849 delete_dead_store_insn (insn_info_t insn_info)
851 read_info_t read_info;
853 if (!dbg_cnt (dse))
854 return;
856 check_for_inc_dec (insn_info->insn);
857 if (dump_file)
859 fprintf (dump_file, "Locally deleting insn %d ",
860 INSN_UID (insn_info->insn));
861 if (insn_info->store_rec->alias_set)
862 fprintf (dump_file, "alias set %d\n",
863 (int) insn_info->store_rec->alias_set);
864 else
865 fprintf (dump_file, "\n");
868 free_store_info (insn_info);
869 read_info = insn_info->read_rec;
871 while (read_info)
873 read_info_t next = read_info->next;
874 pool_free (read_info_pool, read_info);
875 read_info = next;
877 insn_info->read_rec = NULL;
879 delete_insn (insn_info->insn);
880 locally_deleted++;
881 insn_info->insn = NULL;
883 insn_info->wild_read = false;
887 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
888 OFFSET and WIDTH. */
890 static void
891 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width)
893 HOST_WIDE_INT i;
895 if ((offset > -MAX_OFFSET) && (offset < MAX_OFFSET))
896 for (i=offset; i<offset+width; i++)
898 bitmap store1;
899 bitmap store2;
900 int ai;
901 if (i < 0)
903 store1 = group->store1_n;
904 store2 = group->store2_n;
905 ai = -i;
907 else
909 store1 = group->store1_p;
910 store2 = group->store2_p;
911 ai = i;
914 if (bitmap_bit_p (store1, ai))
915 bitmap_set_bit (store2, ai);
916 else
918 bitmap_set_bit (store1, ai);
919 if (i < 0)
921 if (group->offset_map_size_n < ai)
922 group->offset_map_size_n = ai;
924 else
926 if (group->offset_map_size_p < ai)
927 group->offset_map_size_p = ai;
934 /* Set the BB_INFO so that the last insn is marked as a wild read. */
936 static void
937 add_wild_read (bb_info_t bb_info)
939 insn_info_t insn_info = bb_info->last_insn;
940 read_info_t *ptr = &insn_info->read_rec;
942 while (*ptr)
944 read_info_t next = (*ptr)->next;
945 if ((*ptr)->alias_set == 0)
947 pool_free (read_info_pool, *ptr);
948 *ptr = next;
950 else
951 ptr = &(*ptr)->next;
953 insn_info->wild_read = true;
954 active_local_stores = NULL;
958 /* Return true if X is a constant or one of the registers that behave
959 as a constant over the life of a function. This is equivalent to
960 !rtx_varies_p for memory addresses. */
962 static bool
963 const_or_frame_p (rtx x)
965 switch (GET_CODE (x))
967 case MEM:
968 return MEM_READONLY_P (x);
970 case CONST:
971 case CONST_INT:
972 case CONST_DOUBLE:
973 case CONST_VECTOR:
974 case SYMBOL_REF:
975 case LABEL_REF:
976 return true;
978 case REG:
979 /* Note that we have to test for the actual rtx used for the frame
980 and arg pointers and not just the register number in case we have
981 eliminated the frame and/or arg pointer and are using it
982 for pseudos. */
983 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
984 /* The arg pointer varies if it is not a fixed register. */
985 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
986 || x == pic_offset_table_rtx)
987 return true;
988 return false;
990 default:
991 return false;
995 /* Take all reasonable action to put the address of MEM into the form
996 that we can do analysis on.
998 The gold standard is to get the address into the form: address +
999 OFFSET where address is something that rtx_varies_p considers a
1000 constant. When we can get the address in this form, we can do
1001 global analysis on it. Note that for constant bases, address is
1002 not actually returned, only the group_id. The address can be
1003 obtained from that.
1005 If that fails, we try cselib to get a value we can at least use
1006 locally. If that fails we return false.
1008 The GROUP_ID is set to -1 for cselib bases and the index of the
1009 group for non_varying bases.
1011 FOR_READ is true if this is a mem read and false if not. */
1013 static bool
1014 canon_address (rtx mem,
1015 alias_set_type *alias_set_out,
1016 int *group_id,
1017 HOST_WIDE_INT *offset,
1018 cselib_val **base)
1020 rtx mem_address = XEXP (mem, 0);
1021 rtx expanded_address, address;
1022 /* Make sure that cselib is has initialized all of the operands of
1023 the address before asking it to do the subst. */
1025 if (clear_alias_sets)
1027 /* If this is a spill, do not do any further processing. */
1028 alias_set_type alias_set = MEM_ALIAS_SET (mem);
1029 if (dump_file)
1030 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1031 if (bitmap_bit_p (clear_alias_sets, alias_set))
1033 struct clear_alias_mode_holder *entry
1034 = clear_alias_set_lookup (alias_set);
1036 /* If the modes do not match, we cannot process this set. */
1037 if (entry->mode != GET_MODE (mem))
1039 if (dump_file)
1040 fprintf (dump_file,
1041 "disqualifying alias set %d, (%s) != (%s)\n",
1042 (int) alias_set, GET_MODE_NAME (entry->mode),
1043 GET_MODE_NAME (GET_MODE (mem)));
1045 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1046 return false;
1049 *alias_set_out = alias_set;
1050 *group_id = clear_alias_group->id;
1051 return true;
1055 *alias_set_out = 0;
1057 cselib_lookup (mem_address, Pmode, 1);
1059 if (dump_file)
1061 fprintf (dump_file, " mem: ");
1062 print_inline_rtx (dump_file, mem_address, 0);
1063 fprintf (dump_file, "\n");
1066 /* Use cselib to replace all of the reg references with the full
1067 expression. This will take care of the case where we have
1069 r_x = base + offset;
1070 val = *r_x;
1072 by making it into
1074 val = *(base + offset);
1077 expanded_address = cselib_expand_value_rtx (mem_address, scratch, 5);
1079 /* If this fails, just go with the mem_address. */
1080 if (!expanded_address)
1081 expanded_address = mem_address;
1083 /* Split the address into canonical BASE + OFFSET terms. */
1084 address = canon_rtx (expanded_address);
1086 *offset = 0;
1088 if (dump_file)
1090 fprintf (dump_file, "\n after cselib_expand address: ");
1091 print_inline_rtx (dump_file, expanded_address, 0);
1092 fprintf (dump_file, "\n");
1094 fprintf (dump_file, "\n after canon_rtx address: ");
1095 print_inline_rtx (dump_file, address, 0);
1096 fprintf (dump_file, "\n");
1099 if (GET_CODE (address) == CONST)
1100 address = XEXP (address, 0);
1102 if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
1104 *offset = INTVAL (XEXP (address, 1));
1105 address = XEXP (address, 0);
1108 if (const_or_frame_p (address))
1110 group_info_t group = get_group_info (address);
1112 if (dump_file)
1113 fprintf (dump_file, " gid=%d offset=%d \n", group->id, (int)*offset);
1114 *base = NULL;
1115 *group_id = group->id;
1117 else
1119 *base = cselib_lookup (address, Pmode, true);
1120 *group_id = -1;
1122 if (*base == NULL)
1124 if (dump_file)
1125 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1126 return false;
1128 if (dump_file)
1129 fprintf (dump_file, " varying cselib base=%d offset = %d\n",
1130 (*base)->value, (int)*offset);
1132 return true;
1136 /* Clear the rhs field from the active_local_stores array. */
1138 static void
1139 clear_rhs_from_active_local_stores (void)
1141 insn_info_t ptr = active_local_stores;
1143 while (ptr)
1145 store_info_t store_info = ptr->store_rec;
1146 /* Skip the clobbers. */
1147 while (!store_info->is_set)
1148 store_info = store_info->next;
1150 store_info->rhs = NULL;
1152 ptr = ptr->next_local_store;
1157 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1158 there is a candidate store, after adding it to the appropriate
1159 local store group if so. */
1161 static int
1162 record_store (rtx body, bb_info_t bb_info)
1164 rtx mem;
1165 HOST_WIDE_INT offset = 0;
1166 HOST_WIDE_INT width = 0;
1167 alias_set_type spill_alias_set;
1168 insn_info_t insn_info = bb_info->last_insn;
1169 store_info_t store_info = NULL;
1170 int group_id;
1171 cselib_val *base = NULL;
1172 insn_info_t ptr, last;
1173 bool store_is_unused;
1175 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1176 return 0;
1178 /* If this is not used, then this cannot be used to keep the insn
1179 from being deleted. On the other hand, it does provide something
1180 that can be used to prove that another store is dead. */
1181 store_is_unused
1182 = (find_reg_note (insn_info->insn, REG_UNUSED, body) != NULL);
1184 /* Check whether that value is a suitable memory location. */
1185 mem = SET_DEST (body);
1186 if (!MEM_P (mem))
1188 /* If the set or clobber is unused, then it does not effect our
1189 ability to get rid of the entire insn. */
1190 if (!store_is_unused)
1191 insn_info->cannot_delete = true;
1192 return 0;
1195 /* At this point we know mem is a mem. */
1196 if (GET_MODE (mem) == BLKmode)
1198 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1200 if (dump_file)
1201 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1202 add_wild_read (bb_info);
1203 insn_info->cannot_delete = true;
1205 else if (!store_is_unused)
1207 /* If the set or clobber is unused, then it does not effect our
1208 ability to get rid of the entire insn. */
1209 insn_info->cannot_delete = true;
1210 clear_rhs_from_active_local_stores ();
1212 return 0;
1215 /* We can still process a volatile mem, we just cannot delete it. */
1216 if (MEM_VOLATILE_P (mem))
1217 insn_info->cannot_delete = true;
1219 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1221 clear_rhs_from_active_local_stores ();
1222 return 0;
1225 width = GET_MODE_SIZE (GET_MODE (mem));
1227 if (spill_alias_set)
1229 bitmap store1 = clear_alias_group->store1_p;
1230 bitmap store2 = clear_alias_group->store2_p;
1232 if (bitmap_bit_p (store1, spill_alias_set))
1233 bitmap_set_bit (store2, spill_alias_set);
1234 else
1235 bitmap_set_bit (store1, spill_alias_set);
1237 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1238 clear_alias_group->offset_map_size_p = spill_alias_set;
1240 store_info = pool_alloc (rtx_store_info_pool);
1242 if (dump_file)
1243 fprintf (dump_file, " processing spill store %d(%s)\n",
1244 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1246 else if (group_id >= 0)
1248 /* In the restrictive case where the base is a constant or the
1249 frame pointer we can do global analysis. */
1251 group_info_t group
1252 = VEC_index (group_info_t, rtx_group_vec, group_id);
1254 store_info = pool_alloc (rtx_store_info_pool);
1255 set_usage_bits (group, offset, width);
1257 if (dump_file)
1258 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1259 group_id, (int)offset, (int)(offset+width));
1261 else
1263 rtx base_term = find_base_term (XEXP (mem, 0));
1264 if (!base_term
1265 || (GET_CODE (base_term) == ADDRESS
1266 && GET_MODE (base_term) == Pmode
1267 && XEXP (base_term, 0) == stack_pointer_rtx))
1268 insn_info->stack_pointer_based = true;
1269 insn_info->contains_cselib_groups = true;
1271 store_info = pool_alloc (cse_store_info_pool);
1272 group_id = -1;
1274 if (dump_file)
1275 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1276 (int)offset, (int)(offset+width));
1279 /* Check to see if this stores causes some other stores to be
1280 dead. */
1281 ptr = active_local_stores;
1282 last = NULL;
1284 while (ptr)
1286 insn_info_t next = ptr->next_local_store;
1287 store_info_t s_info = ptr->store_rec;
1288 bool delete = true;
1290 /* Skip the clobbers. We delete the active insn if this insn
1291 shadows the set. To have been put on the active list, it
1292 has exactly on set. */
1293 while (!s_info->is_set)
1294 s_info = s_info->next;
1296 if (s_info->alias_set != spill_alias_set)
1297 delete = false;
1298 else if (s_info->alias_set)
1300 struct clear_alias_mode_holder *entry
1301 = clear_alias_set_lookup (s_info->alias_set);
1302 /* Generally, spills cannot be processed if and of the
1303 references to the slot have a different mode. But if
1304 we are in the same block and mode is exactly the same
1305 between this store and one before in the same block,
1306 we can still delete it. */
1307 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1308 && (GET_MODE (mem) == entry->mode))
1310 delete = true;
1311 s_info->positions_needed = 0;
1313 if (dump_file)
1314 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1315 INSN_UID (ptr->insn), (int) s_info->alias_set);
1317 else if ((s_info->group_id == group_id)
1318 && (s_info->cse_base == base))
1320 HOST_WIDE_INT i;
1321 if (dump_file)
1322 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1323 INSN_UID (ptr->insn), s_info->group_id,
1324 (int)s_info->begin, (int)s_info->end);
1325 for (i = offset; i < offset+width; i++)
1326 if (i >= s_info->begin && i < s_info->end)
1327 s_info->positions_needed &= ~(1L << (i - s_info->begin));
1329 else if (s_info->rhs)
1330 /* Need to see if it is possible for this store to overwrite
1331 the value of store_info. If it is, set the rhs to NULL to
1332 keep it from being used to remove a load. */
1334 if (canon_true_dependence (s_info->mem,
1335 GET_MODE (s_info->mem),
1336 s_info->mem_addr,
1337 mem, rtx_varies_p))
1338 s_info->rhs = NULL;
1341 /* An insn can be deleted if every position of every one of
1342 its s_infos is zero. */
1343 if (s_info->positions_needed != 0)
1344 delete = false;
1346 if (delete)
1348 insn_info_t insn_to_delete = ptr;
1350 if (last)
1351 last->next_local_store = ptr->next_local_store;
1352 else
1353 active_local_stores = ptr->next_local_store;
1355 delete_dead_store_insn (insn_to_delete);
1357 else
1358 last = ptr;
1360 ptr = next;
1363 gcc_assert ((unsigned) width < sizeof (store_info->positions_needed) * CHAR_BIT);
1365 /* Finish filling in the store_info. */
1366 store_info->next = insn_info->store_rec;
1367 insn_info->store_rec = store_info;
1368 store_info->mem = canon_rtx (mem);
1369 store_info->alias_set = spill_alias_set;
1370 store_info->mem_addr = get_addr (XEXP (mem, 0));
1371 store_info->cse_base = base;
1372 store_info->positions_needed = (1L << width) - 1;
1373 store_info->group_id = group_id;
1374 store_info->begin = offset;
1375 store_info->end = offset + width;
1376 store_info->is_set = GET_CODE (body) == SET;
1378 if (store_info->is_set
1379 /* No place to keep the value after ra. */
1380 && !reload_completed
1381 && (REG_P (SET_SRC (body))
1382 || GET_CODE (SET_SRC (body)) == SUBREG
1383 || CONSTANT_P (SET_SRC (body)))
1384 /* Sometimes the store and reload is used for truncation and
1385 rounding. */
1386 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1387 store_info->rhs = SET_SRC (body);
1388 else
1389 store_info->rhs = NULL;
1391 /* If this is a clobber, we return 0. We will only be able to
1392 delete this insn if there is only one store USED store, but we
1393 can use the clobber to delete other stores earlier. */
1394 return store_info->is_set ? 1 : 0;
1398 static void
1399 dump_insn_info (const char * start, insn_info_t insn_info)
1401 fprintf (dump_file, "%s insn=%d %s\n", start,
1402 INSN_UID (insn_info->insn),
1403 insn_info->store_rec ? "has store" : "naked");
1407 /* If the modes are different and the value's source and target do not
1408 line up, we need to extract the value from lower part of the rhs of
1409 the store, shift it, and then put it into a form that can be shoved
1410 into the read_insn. This function generates a right SHIFT of a
1411 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1412 shift sequence is returned or NULL if we failed to find a
1413 shift. */
1415 static rtx
1416 find_shift_sequence (int access_size,
1417 store_info_t store_info,
1418 read_info_t read_info,
1419 int shift)
1421 enum machine_mode store_mode = GET_MODE (store_info->mem);
1422 enum machine_mode read_mode = GET_MODE (read_info->mem);
1423 enum machine_mode new_mode;
1424 rtx read_reg = NULL;
1426 /* Some machines like the x86 have shift insns for each size of
1427 operand. Other machines like the ppc or the ia-64 may only have
1428 shift insns that shift values within 32 or 64 bit registers.
1429 This loop tries to find the smallest shift insn that will right
1430 justify the value we want to read but is available in one insn on
1431 the machine. */
1433 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1434 MODE_INT);
1435 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1436 new_mode = GET_MODE_WIDER_MODE (new_mode))
1438 rtx target, new_reg, shift_seq, insn, new_lhs;
1439 int cost;
1441 /* Try a wider mode if truncating the store mode to NEW_MODE
1442 requires a real instruction. */
1443 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1444 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (new_mode),
1445 GET_MODE_BITSIZE (store_mode)))
1446 continue;
1448 /* Also try a wider mode if the necessary punning is either not
1449 desirable or not possible. */
1450 if (!CONSTANT_P (store_info->rhs)
1451 && !MODES_TIEABLE_P (new_mode, store_mode))
1452 continue;
1453 new_lhs = simplify_gen_subreg (new_mode, copy_rtx (store_info->rhs),
1454 store_mode, 0);
1455 if (new_lhs == NULL_RTX)
1456 continue;
1458 new_reg = gen_reg_rtx (new_mode);
1460 start_sequence ();
1462 /* In theory we could also check for an ashr. Ian Taylor knows
1463 of one dsp where the cost of these two was not the same. But
1464 this really is a rare case anyway. */
1465 target = expand_binop (new_mode, lshr_optab, new_reg,
1466 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1468 shift_seq = get_insns ();
1469 end_sequence ();
1471 if (target != new_reg || shift_seq == NULL)
1472 continue;
1474 cost = 0;
1475 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1476 if (INSN_P (insn))
1477 cost += insn_rtx_cost (PATTERN (insn));
1479 /* The computation up to here is essentially independent
1480 of the arguments and could be precomputed. It may
1481 not be worth doing so. We could precompute if
1482 worthwhile or at least cache the results. The result
1483 technically depends on both SHIFT and ACCESS_SIZE,
1484 but in practice the answer will depend only on ACCESS_SIZE. */
1486 if (cost > COSTS_N_INSNS (1))
1487 continue;
1489 /* We found an acceptable shift. Generate a move to
1490 take the value from the store and put it into the
1491 shift pseudo, then shift it, then generate another
1492 move to put in into the target of the read. */
1493 emit_move_insn (new_reg, new_lhs);
1494 emit_insn (shift_seq);
1495 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1496 break;
1499 return read_reg;
1503 /* Take a sequence of:
1504 A <- r1
1506 ... <- A
1508 and change it into
1509 r2 <- r1
1510 A <- r1
1512 ... <- r2
1516 r3 <- extract (r1)
1517 r3 <- r3 >> shift
1518 r2 <- extract (r3)
1519 ... <- r2
1523 r2 <- extract (r1)
1524 ... <- r2
1526 Depending on the alignment and the mode of the store and
1527 subsequent load.
1530 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1531 and READ_INSN are for the read. Return true if the replacement
1532 went ok. */
1534 static bool
1535 replace_read (store_info_t store_info, insn_info_t store_insn,
1536 read_info_t read_info, insn_info_t read_insn, rtx *loc)
1538 enum machine_mode store_mode = GET_MODE (store_info->mem);
1539 enum machine_mode read_mode = GET_MODE (read_info->mem);
1540 int shift;
1541 int access_size; /* In bytes. */
1542 rtx insns, read_reg;
1544 if (!dbg_cnt (dse))
1545 return false;
1547 /* To get here the read is within the boundaries of the write so
1548 shift will never be negative. Start out with the shift being in
1549 bytes. */
1550 if (BYTES_BIG_ENDIAN)
1551 shift = store_info->end - read_info->end;
1552 else
1553 shift = read_info->begin - store_info->begin;
1555 access_size = shift + GET_MODE_SIZE (read_mode);
1557 /* From now on it is bits. */
1558 shift *= BITS_PER_UNIT;
1560 /* Create a sequence of instructions to set up the read register.
1561 This sequence goes immediately before the store and its result
1562 is read by the load.
1564 We need to keep this in perspective. We are replacing a read
1565 with a sequence of insns, but the read will almost certainly be
1566 in cache, so it is not going to be an expensive one. Thus, we
1567 are not willing to do a multi insn shift or worse a subroutine
1568 call to get rid of the read. */
1569 if (dump_file)
1570 fprintf (dump_file, "trying to replace %smode load in insn %d"
1571 " from %smode store in insn %d\n",
1572 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1573 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1574 start_sequence ();
1575 if (shift)
1576 read_reg = find_shift_sequence (access_size, store_info, read_info, shift);
1577 else
1578 read_reg = extract_low_bits (read_mode, store_mode,
1579 copy_rtx (store_info->rhs));
1580 if (read_reg == NULL_RTX)
1582 end_sequence ();
1583 if (dump_file)
1584 fprintf (dump_file, " -- could not extract bits of stored value\n");
1585 return false;
1587 /* Force the value into a new register so that it won't be clobbered
1588 between the store and the load. */
1589 read_reg = copy_to_mode_reg (read_mode, read_reg);
1590 insns = get_insns ();
1591 end_sequence ();
1593 if (validate_change (read_insn->insn, loc, read_reg, 0))
1595 deferred_change_t deferred_change = pool_alloc (deferred_change_pool);
1597 /* Insert this right before the store insn where it will be safe
1598 from later insns that might change it before the read. */
1599 emit_insn_before (insns, store_insn->insn);
1601 /* And now for the kludge part: cselib croaks if you just
1602 return at this point. There are two reasons for this:
1604 1) Cselib has an idea of how many pseudos there are and
1605 that does not include the new ones we just added.
1607 2) Cselib does not know about the move insn we added
1608 above the store_info, and there is no way to tell it
1609 about it, because it has "moved on".
1611 Problem (1) is fixable with a certain amount of engineering.
1612 Problem (2) is requires starting the bb from scratch. This
1613 could be expensive.
1615 So we are just going to have to lie. The move/extraction
1616 insns are not really an issue, cselib did not see them. But
1617 the use of the new pseudo read_insn is a real problem because
1618 cselib has not scanned this insn. The way that we solve this
1619 problem is that we are just going to put the mem back for now
1620 and when we are finished with the block, we undo this. We
1621 keep a table of mems to get rid of. At the end of the basic
1622 block we can put them back. */
1624 *loc = read_info->mem;
1625 deferred_change->next = deferred_change_list;
1626 deferred_change_list = deferred_change;
1627 deferred_change->loc = loc;
1628 deferred_change->reg = read_reg;
1630 /* Get rid of the read_info, from the point of view of the
1631 rest of dse, play like this read never happened. */
1632 read_insn->read_rec = read_info->next;
1633 pool_free (read_info_pool, read_info);
1634 if (dump_file)
1636 fprintf (dump_file, " -- replaced the loaded MEM with ");
1637 print_simple_rtl (dump_file, read_reg);
1638 fprintf (dump_file, "\n");
1640 return true;
1642 else
1644 if (dump_file)
1646 fprintf (dump_file, " -- replacing the loaded MEM with ");
1647 print_simple_rtl (dump_file, read_reg);
1648 fprintf (dump_file, " led to an invalid instruction\n");
1650 return false;
1654 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
1655 if LOC is a mem and if it is look at the address and kill any
1656 appropriate stores that may be active. */
1658 static int
1659 check_mem_read_rtx (rtx *loc, void *data)
1661 rtx mem = *loc;
1662 bb_info_t bb_info;
1663 insn_info_t insn_info;
1664 HOST_WIDE_INT offset = 0;
1665 HOST_WIDE_INT width = 0;
1666 alias_set_type spill_alias_set = 0;
1667 cselib_val *base = NULL;
1668 int group_id;
1669 read_info_t read_info;
1671 if (!mem || !MEM_P (mem))
1672 return 0;
1674 bb_info = (bb_info_t) data;
1675 insn_info = bb_info->last_insn;
1677 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
1678 || (MEM_VOLATILE_P (mem)))
1680 if (dump_file)
1681 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
1682 add_wild_read (bb_info);
1683 insn_info->cannot_delete = true;
1684 return 0;
1687 /* If it is reading readonly mem, then there can be no conflict with
1688 another write. */
1689 if (MEM_READONLY_P (mem))
1690 return 0;
1692 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1694 if (dump_file)
1695 fprintf (dump_file, " adding wild read, canon_address failure.\n");
1696 add_wild_read (bb_info);
1697 return 0;
1700 if (GET_MODE (mem) == BLKmode)
1701 width = -1;
1702 else
1703 width = GET_MODE_SIZE (GET_MODE (mem));
1705 read_info = pool_alloc (read_info_pool);
1706 read_info->group_id = group_id;
1707 read_info->mem = mem;
1708 read_info->alias_set = spill_alias_set;
1709 read_info->begin = offset;
1710 read_info->end = offset + width;
1711 read_info->next = insn_info->read_rec;
1712 insn_info->read_rec = read_info;
1714 /* We ignore the clobbers in store_info. The is mildly aggressive,
1715 but there really should not be a clobber followed by a read. */
1717 if (spill_alias_set)
1719 insn_info_t i_ptr = active_local_stores;
1720 insn_info_t last = NULL;
1722 if (dump_file)
1723 fprintf (dump_file, " processing spill load %d\n",
1724 (int) spill_alias_set);
1726 while (i_ptr)
1728 store_info_t store_info = i_ptr->store_rec;
1730 /* Skip the clobbers. */
1731 while (!store_info->is_set)
1732 store_info = store_info->next;
1734 if (store_info->alias_set == spill_alias_set)
1736 if (dump_file)
1737 dump_insn_info ("removing from active", i_ptr);
1739 if (last)
1740 last->next_local_store = i_ptr->next_local_store;
1741 else
1742 active_local_stores = i_ptr->next_local_store;
1744 else
1745 last = i_ptr;
1746 i_ptr = i_ptr->next_local_store;
1749 else if (group_id >= 0)
1751 /* This is the restricted case where the base is a constant or
1752 the frame pointer and offset is a constant. */
1753 insn_info_t i_ptr = active_local_stores;
1754 insn_info_t last = NULL;
1756 if (dump_file)
1758 if (width == -1)
1759 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
1760 group_id);
1761 else
1762 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
1763 group_id, (int)offset, (int)(offset+width));
1766 while (i_ptr)
1768 bool remove = false;
1769 store_info_t store_info = i_ptr->store_rec;
1771 /* Skip the clobbers. */
1772 while (!store_info->is_set)
1773 store_info = store_info->next;
1775 /* There are three cases here. */
1776 if (store_info->group_id < 0)
1777 /* We have a cselib store followed by a read from a
1778 const base. */
1779 remove
1780 = canon_true_dependence (store_info->mem,
1781 GET_MODE (store_info->mem),
1782 store_info->mem_addr,
1783 mem, rtx_varies_p);
1785 else if (group_id == store_info->group_id)
1787 /* This is a block mode load. We may get lucky and
1788 canon_true_dependence may save the day. */
1789 if (width == -1)
1790 remove
1791 = canon_true_dependence (store_info->mem,
1792 GET_MODE (store_info->mem),
1793 store_info->mem_addr,
1794 mem, rtx_varies_p);
1796 /* If this read is just reading back something that we just
1797 stored, rewrite the read. */
1798 else
1800 if (store_info->rhs
1801 && (offset >= store_info->begin)
1802 && (offset + width <= store_info->end))
1804 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1806 if ((store_info->positions_needed & mask) == mask
1807 && replace_read (store_info, i_ptr,
1808 read_info, insn_info, loc))
1809 return 0;
1811 /* The bases are the same, just see if the offsets
1812 overlap. */
1813 if ((offset < store_info->end)
1814 && (offset + width > store_info->begin))
1815 remove = true;
1819 /* else
1820 The else case that is missing here is that the
1821 bases are constant but different. There is nothing
1822 to do here because there is no overlap. */
1824 if (remove)
1826 if (dump_file)
1827 dump_insn_info ("removing from active", i_ptr);
1829 if (last)
1830 last->next_local_store = i_ptr->next_local_store;
1831 else
1832 active_local_stores = i_ptr->next_local_store;
1834 else
1835 last = i_ptr;
1836 i_ptr = i_ptr->next_local_store;
1839 else
1841 insn_info_t i_ptr = active_local_stores;
1842 insn_info_t last = NULL;
1843 if (dump_file)
1845 fprintf (dump_file, " processing cselib load mem:");
1846 print_inline_rtx (dump_file, mem, 0);
1847 fprintf (dump_file, "\n");
1850 while (i_ptr)
1852 bool remove = false;
1853 store_info_t store_info = i_ptr->store_rec;
1855 if (dump_file)
1856 fprintf (dump_file, " processing cselib load against insn %d\n",
1857 INSN_UID (i_ptr->insn));
1859 /* Skip the clobbers. */
1860 while (!store_info->is_set)
1861 store_info = store_info->next;
1863 /* If this read is just reading back something that we just
1864 stored, rewrite the read. */
1865 if (store_info->rhs
1866 && store_info->group_id == -1
1867 && store_info->cse_base == base
1868 && (offset >= store_info->begin)
1869 && (offset + width <= store_info->end))
1871 int mask = ((1L << width) - 1) << (offset - store_info->begin);
1873 if ((store_info->positions_needed & mask) == mask
1874 && replace_read (store_info, i_ptr,
1875 read_info, insn_info, loc))
1876 return 0;
1879 if (!store_info->alias_set)
1880 remove = canon_true_dependence (store_info->mem,
1881 GET_MODE (store_info->mem),
1882 store_info->mem_addr,
1883 mem, rtx_varies_p);
1885 if (remove)
1887 if (dump_file)
1888 dump_insn_info ("removing from active", i_ptr);
1890 if (last)
1891 last->next_local_store = i_ptr->next_local_store;
1892 else
1893 active_local_stores = i_ptr->next_local_store;
1895 else
1896 last = i_ptr;
1897 i_ptr = i_ptr->next_local_store;
1900 return 0;
1903 /* A for_each_rtx callback in which DATA points the INSN_INFO for
1904 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
1905 true for any part of *LOC. */
1907 static void
1908 check_mem_read_use (rtx *loc, void *data)
1910 for_each_rtx (loc, check_mem_read_rtx, data);
1913 /* Apply record_store to all candidate stores in INSN. Mark INSN
1914 if some part of it is not a candidate store and assigns to a
1915 non-register target. */
1917 static void
1918 scan_insn (bb_info_t bb_info, rtx insn)
1920 rtx body;
1921 insn_info_t insn_info = pool_alloc (insn_info_pool);
1922 int mems_found = 0;
1923 memset (insn_info, 0, sizeof (struct insn_info));
1925 if (dump_file)
1926 fprintf (dump_file, "\n**scanning insn=%d\n",
1927 INSN_UID (insn));
1929 insn_info->prev_insn = bb_info->last_insn;
1930 insn_info->insn = insn;
1931 bb_info->last_insn = insn_info;
1934 /* Cselib clears the table for this case, so we have to essentially
1935 do the same. */
1936 if (NONJUMP_INSN_P (insn)
1937 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
1938 && MEM_VOLATILE_P (PATTERN (insn)))
1940 add_wild_read (bb_info);
1941 insn_info->cannot_delete = true;
1942 return;
1945 /* Look at all of the uses in the insn. */
1946 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
1948 if (CALL_P (insn))
1950 insn_info->cannot_delete = true;
1952 /* Const functions cannot do anything bad i.e. read memory,
1953 however, they can read their parameters which may have
1954 been pushed onto the stack. */
1955 if (CONST_OR_PURE_CALL_P (insn) && !pure_call_p (insn))
1957 insn_info_t i_ptr = active_local_stores;
1958 insn_info_t last = NULL;
1960 if (dump_file)
1961 fprintf (dump_file, "const call %d\n", INSN_UID (insn));
1963 /* See the head comment of the frame_read field. */
1964 if (reload_completed)
1965 insn_info->frame_read = true;
1967 /* Loop over the active stores and remove those which are
1968 killed by the const function call. */
1969 while (i_ptr)
1971 bool remove_store = false;
1973 /* The stack pointer based stores are always killed. */
1974 if (i_ptr->stack_pointer_based)
1975 remove_store = true;
1977 /* If the frame is read, the frame related stores are killed. */
1978 else if (insn_info->frame_read)
1980 store_info_t store_info = i_ptr->store_rec;
1982 /* Skip the clobbers. */
1983 while (!store_info->is_set)
1984 store_info = store_info->next;
1986 if (store_info->group_id >= 0
1987 && VEC_index (group_info_t, rtx_group_vec,
1988 store_info->group_id)->frame_related)
1989 remove_store = true;
1992 if (remove_store)
1994 if (dump_file)
1995 dump_insn_info ("removing from active", i_ptr);
1997 if (last)
1998 last->next_local_store = i_ptr->next_local_store;
1999 else
2000 active_local_stores = i_ptr->next_local_store;
2002 else
2003 last = i_ptr;
2005 i_ptr = i_ptr->next_local_store;
2009 else
2010 /* Every other call, including pure functions, may read memory. */
2011 add_wild_read (bb_info);
2013 return;
2016 /* Assuming that there are sets in these insns, we cannot delete
2017 them. */
2018 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2019 || volatile_refs_p (PATTERN (insn))
2020 || (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
2021 || (RTX_FRAME_RELATED_P (insn))
2022 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2023 insn_info->cannot_delete = true;
2025 body = PATTERN (insn);
2026 if (GET_CODE (body) == PARALLEL)
2028 int i;
2029 for (i = 0; i < XVECLEN (body, 0); i++)
2030 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2032 else
2033 mems_found += record_store (body, bb_info);
2035 if (dump_file)
2036 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2037 mems_found, insn_info->cannot_delete ? "true" : "false");
2039 /* If we found some sets of mems, and the insn has not been marked
2040 cannot delete, add it into the active_local_stores so that it can
2041 be locally deleted if found dead. Otherwise mark it as cannot
2042 delete. This simplifies the processing later. */
2043 if (mems_found == 1 && !insn_info->cannot_delete)
2045 insn_info->next_local_store = active_local_stores;
2046 active_local_stores = insn_info;
2048 else
2049 insn_info->cannot_delete = true;
2053 /* Remove BASE from the set of active_local_stores. This is a
2054 callback from cselib that is used to get rid of the stores in
2055 active_local_stores. */
2057 static void
2058 remove_useless_values (cselib_val *base)
2060 insn_info_t insn_info = active_local_stores;
2061 insn_info_t last = NULL;
2063 while (insn_info)
2065 store_info_t store_info = insn_info->store_rec;
2066 bool delete = false;
2068 /* If ANY of the store_infos match the cselib group that is
2069 being deleted, then the insn can not be deleted. */
2070 while (store_info)
2072 if ((store_info->group_id == -1)
2073 && (store_info->cse_base == base))
2075 delete = true;
2076 break;
2078 store_info = store_info->next;
2081 if (delete)
2083 if (last)
2084 last->next_local_store = insn_info->next_local_store;
2085 else
2086 active_local_stores = insn_info->next_local_store;
2087 free_store_info (insn_info);
2089 else
2090 last = insn_info;
2092 insn_info = insn_info->next_local_store;
2097 /* Do all of step 1. */
2099 static void
2100 dse_step1 (void)
2102 basic_block bb;
2104 cselib_init (false);
2105 all_blocks = BITMAP_ALLOC (NULL);
2106 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2107 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2109 FOR_ALL_BB (bb)
2111 insn_info_t ptr;
2112 bb_info_t bb_info = pool_alloc (bb_info_pool);
2114 memset (bb_info, 0, sizeof (struct bb_info));
2115 bitmap_set_bit (all_blocks, bb->index);
2117 bb_table[bb->index] = bb_info;
2118 cselib_discard_hook = remove_useless_values;
2120 if (bb->index >= NUM_FIXED_BLOCKS)
2122 rtx insn;
2124 cse_store_info_pool
2125 = create_alloc_pool ("cse_store_info_pool",
2126 sizeof (struct store_info), 100);
2127 active_local_stores = NULL;
2128 cselib_clear_table ();
2130 /* Scan the insns. */
2131 FOR_BB_INSNS (bb, insn)
2133 if (INSN_P (insn))
2134 scan_insn (bb_info, insn);
2135 cselib_process_insn (insn);
2138 /* This is something of a hack, because the global algorithm
2139 is supposed to take care of the case where stores go dead
2140 at the end of the function. However, the global
2141 algorithm must take a more conservative view of block
2142 mode reads than the local alg does. So to get the case
2143 where you have a store to the frame followed by a non
2144 overlapping block more read, we look at the active local
2145 stores at the end of the function and delete all of the
2146 frame and spill based ones. */
2147 if (stores_off_frame_dead_at_return
2148 && (EDGE_COUNT (bb->succs) == 0
2149 || (single_succ_p (bb)
2150 && single_succ (bb) == EXIT_BLOCK_PTR
2151 && ! current_function_calls_eh_return)))
2153 insn_info_t i_ptr = active_local_stores;
2154 while (i_ptr)
2156 store_info_t store_info = i_ptr->store_rec;
2158 /* Skip the clobbers. */
2159 while (!store_info->is_set)
2160 store_info = store_info->next;
2161 if (store_info->alias_set)
2162 delete_dead_store_insn (i_ptr);
2163 else
2164 if (store_info->group_id >= 0)
2166 group_info_t group
2167 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2168 if (group->frame_related)
2169 delete_dead_store_insn (i_ptr);
2172 i_ptr = i_ptr->next_local_store;
2176 /* Get rid of the loads that were discovered in
2177 replace_read. Cselib is finished with this block. */
2178 while (deferred_change_list)
2180 deferred_change_t next = deferred_change_list->next;
2182 /* There is no reason to validate this change. That was
2183 done earlier. */
2184 *deferred_change_list->loc = deferred_change_list->reg;
2185 pool_free (deferred_change_pool, deferred_change_list);
2186 deferred_change_list = next;
2189 /* Get rid of all of the cselib based store_infos in this
2190 block and mark the containing insns as not being
2191 deletable. */
2192 ptr = bb_info->last_insn;
2193 while (ptr)
2195 if (ptr->contains_cselib_groups)
2196 free_store_info (ptr);
2197 ptr = ptr->prev_insn;
2200 free_alloc_pool (cse_store_info_pool);
2204 cselib_finish ();
2205 htab_empty (rtx_group_table);
2209 /*----------------------------------------------------------------------------
2210 Second step.
2212 Assign each byte position in the stores that we are going to
2213 analyze globally to a position in the bitmaps. Returns true if
2214 there are any bit positions assigned.
2215 ----------------------------------------------------------------------------*/
2217 static void
2218 dse_step2_init (void)
2220 unsigned int i;
2221 group_info_t group;
2223 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2225 /* For all non stack related bases, we only consider a store to
2226 be deletable if there are two or more stores for that
2227 position. This is because it takes one store to make the
2228 other store redundant. However, for the stores that are
2229 stack related, we consider them if there is only one store
2230 for the position. We do this because the stack related
2231 stores can be deleted if their is no read between them and
2232 the end of the function.
2234 To make this work in the current framework, we take the stack
2235 related bases add all of the bits from store1 into store2.
2236 This has the effect of making the eligible even if there is
2237 only one store. */
2239 if (stores_off_frame_dead_at_return && group->frame_related)
2241 bitmap_ior_into (group->store2_n, group->store1_n);
2242 bitmap_ior_into (group->store2_p, group->store1_p);
2243 if (dump_file)
2244 fprintf (dump_file, "group %d is frame related ", i);
2247 group->offset_map_size_n++;
2248 group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2249 group->offset_map_size_p++;
2250 group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2251 group->process_globally = false;
2252 if (dump_file)
2254 fprintf (dump_file, "group %d(%d+%d): ", i,
2255 (int)bitmap_count_bits (group->store2_n),
2256 (int)bitmap_count_bits (group->store2_p));
2257 bitmap_print (dump_file, group->store2_n, "n ", " ");
2258 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2264 /* Init the offset tables for the normal case. */
2266 static bool
2267 dse_step2_nospill (void)
2269 unsigned int i;
2270 group_info_t group;
2271 /* Position 0 is unused because 0 is used in the maps to mean
2272 unused. */
2273 current_position = 1;
2275 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2277 bitmap_iterator bi;
2278 unsigned int j;
2280 if (group == clear_alias_group)
2281 continue;
2283 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2284 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2285 bitmap_clear (group->group_kill);
2287 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2289 bitmap_set_bit (group->group_kill, current_position);
2290 group->offset_map_n[j] = current_position++;
2291 group->process_globally = true;
2293 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2295 bitmap_set_bit (group->group_kill, current_position);
2296 group->offset_map_p[j] = current_position++;
2297 group->process_globally = true;
2300 return current_position != 1;
2304 /* Init the offset tables for the spill case. */
2306 static bool
2307 dse_step2_spill (void)
2309 unsigned int j;
2310 group_info_t group = clear_alias_group;
2311 bitmap_iterator bi;
2313 /* Position 0 is unused because 0 is used in the maps to mean
2314 unused. */
2315 current_position = 1;
2317 if (dump_file)
2319 bitmap_print (dump_file, clear_alias_sets,
2320 "clear alias sets ", "\n");
2321 bitmap_print (dump_file, disqualified_clear_alias_sets,
2322 "disqualified clear alias sets ", "\n");
2325 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2326 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2327 bitmap_clear (group->group_kill);
2329 /* Remove the disqualified positions from the store2_p set. */
2330 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
2332 /* We do not need to process the store2_n set because
2333 alias_sets are always positive. */
2334 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2336 bitmap_set_bit (group->group_kill, current_position);
2337 group->offset_map_p[j] = current_position++;
2338 group->process_globally = true;
2341 return current_position != 1;
2346 /*----------------------------------------------------------------------------
2347 Third step.
2349 Build the bit vectors for the transfer functions.
2350 ----------------------------------------------------------------------------*/
2353 /* Note that this is NOT a general purpose function. Any mem that has
2354 an alias set registered here expected to be COMPLETELY unaliased:
2355 i.e it's addresses are not and need not be examined.
2357 It is known that all references to this address will have this
2358 alias set and there are NO other references to this address in the
2359 function.
2361 Currently the only place that is known to be clean enough to use
2362 this interface is the code that assigns the spill locations.
2364 All of the mems that have alias_sets registered are subjected to a
2365 very powerful form of dse where function calls, volatile reads and
2366 writes, and reads from random location are not taken into account.
2368 It is also assumed that these locations go dead when the function
2369 returns. This assumption could be relaxed if there were found to
2370 be places that this assumption was not correct.
2372 The MODE is passed in and saved. The mode of each load or store to
2373 a mem with ALIAS_SET is checked against MEM. If the size of that
2374 load or store is different from MODE, processing is halted on this
2375 alias set. For the vast majority of aliases sets, all of the loads
2376 and stores will use the same mode. But vectors are treated
2377 differently: the alias set is established for the entire vector,
2378 but reload will insert loads and stores for individual elements and
2379 we do not necessarily have the information to track those separate
2380 elements. So when we see a mode mismatch, we just bail. */
2383 void
2384 dse_record_singleton_alias_set (alias_set_type alias_set,
2385 enum machine_mode mode)
2387 struct clear_alias_mode_holder tmp_holder;
2388 struct clear_alias_mode_holder *entry;
2389 void **slot;
2391 /* If we are not going to run dse, we need to return now or there
2392 will be problems with allocating the bitmaps. */
2393 if ((!gate_dse()) || !alias_set)
2394 return;
2396 if (!clear_alias_sets)
2398 clear_alias_sets = BITMAP_ALLOC (NULL);
2399 disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
2400 clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
2401 clear_alias_mode_eq, NULL);
2402 clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
2403 sizeof (struct clear_alias_mode_holder), 100);
2406 bitmap_set_bit (clear_alias_sets, alias_set);
2408 tmp_holder.alias_set = alias_set;
2410 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
2411 gcc_assert (*slot == NULL);
2413 *slot = entry = pool_alloc (clear_alias_mode_pool);
2414 entry->alias_set = alias_set;
2415 entry->mode = mode;
2419 /* Remove ALIAS_SET from the sets of stack slots being considered. */
2421 void
2422 dse_invalidate_singleton_alias_set (alias_set_type alias_set)
2424 if ((!gate_dse()) || !alias_set)
2425 return;
2427 bitmap_clear_bit (clear_alias_sets, alias_set);
2431 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2432 there, return 0. */
2434 static int
2435 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2437 if (offset < 0)
2439 HOST_WIDE_INT offset_p = -offset;
2440 if (offset_p >= group_info->offset_map_size_n)
2441 return 0;
2442 return group_info->offset_map_n[offset_p];
2444 else
2446 if (offset >= group_info->offset_map_size_p)
2447 return 0;
2448 return group_info->offset_map_p[offset];
2453 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2454 may be NULL. */
2456 static void
2457 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
2459 while (store_info)
2461 HOST_WIDE_INT i;
2462 group_info_t group_info
2463 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2464 if (group_info->process_globally)
2465 for (i = store_info->begin; i < store_info->end; i++)
2467 int index = get_bitmap_index (group_info, i);
2468 if (index != 0)
2470 bitmap_set_bit (gen, index);
2471 if (kill)
2472 bitmap_clear_bit (kill, index);
2475 store_info = store_info->next;
2480 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2481 may be NULL. */
2483 static void
2484 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
2486 while (store_info)
2488 if (store_info->alias_set)
2490 int index = get_bitmap_index (clear_alias_group,
2491 store_info->alias_set);
2492 if (index != 0)
2494 bitmap_set_bit (gen, index);
2495 if (kill)
2496 bitmap_clear_bit (kill, index);
2499 store_info = store_info->next;
2504 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2505 may be NULL. */
2507 static void
2508 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
2510 read_info_t read_info = insn_info->read_rec;
2511 int i;
2512 group_info_t group;
2514 /* If this insn reads the frame, kill all the frame related stores. */
2515 if (insn_info->frame_read)
2517 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2518 if (group->process_globally && group->frame_related)
2520 if (kill)
2521 bitmap_ior_into (kill, group->group_kill);
2522 bitmap_and_compl_into (gen, group->group_kill);
2526 while (read_info)
2528 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2530 if (group->process_globally)
2532 if (i == read_info->group_id)
2534 if (read_info->begin > read_info->end)
2536 /* Begin > end for block mode reads. */
2537 if (kill)
2538 bitmap_ior_into (kill, group->group_kill);
2539 bitmap_and_compl_into (gen, group->group_kill);
2541 else
2543 /* The groups are the same, just process the
2544 offsets. */
2545 HOST_WIDE_INT j;
2546 for (j = read_info->begin; j < read_info->end; j++)
2548 int index = get_bitmap_index (group, j);
2549 if (index != 0)
2551 if (kill)
2552 bitmap_set_bit (kill, index);
2553 bitmap_clear_bit (gen, index);
2558 else
2560 /* The groups are different, if the alias sets
2561 conflict, clear the entire group. We only need
2562 to apply this test if the read_info is a cselib
2563 read. Anything with a constant base cannot alias
2564 something else with a different constant
2565 base. */
2566 if ((read_info->group_id < 0)
2567 && canon_true_dependence (group->base_mem,
2568 QImode,
2569 group->canon_base_mem,
2570 read_info->mem, rtx_varies_p))
2572 if (kill)
2573 bitmap_ior_into (kill, group->group_kill);
2574 bitmap_and_compl_into (gen, group->group_kill);
2580 read_info = read_info->next;
2584 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
2585 may be NULL. */
2587 static void
2588 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
2590 while (read_info)
2592 if (read_info->alias_set)
2594 int index = get_bitmap_index (clear_alias_group,
2595 read_info->alias_set);
2596 if (index != 0)
2598 if (kill)
2599 bitmap_set_bit (kill, index);
2600 bitmap_clear_bit (gen, index);
2604 read_info = read_info->next;
2609 /* Return the insn in BB_INFO before the first wild read or if there
2610 are no wild reads in the block, return the last insn. */
2612 static insn_info_t
2613 find_insn_before_first_wild_read (bb_info_t bb_info)
2615 insn_info_t insn_info = bb_info->last_insn;
2616 insn_info_t last_wild_read = NULL;
2618 while (insn_info)
2620 if (insn_info->wild_read)
2622 last_wild_read = insn_info->prev_insn;
2623 /* Block starts with wild read. */
2624 if (!last_wild_read)
2625 return NULL;
2628 insn_info = insn_info->prev_insn;
2631 if (last_wild_read)
2632 return last_wild_read;
2633 else
2634 return bb_info->last_insn;
2638 /* Scan the insns in BB_INFO starting at PTR and going to the top of
2639 the block in order to build the gen and kill sets for the block.
2640 We start at ptr which may be the last insn in the block or may be
2641 the first insn with a wild read. In the latter case we are able to
2642 skip the rest of the block because it just does not matter:
2643 anything that happens is hidden by the wild read. */
2645 static void
2646 dse_step3_scan (bool for_spills, basic_block bb)
2648 bb_info_t bb_info = bb_table[bb->index];
2649 insn_info_t insn_info;
2651 if (for_spills)
2652 /* There are no wild reads in the spill case. */
2653 insn_info = bb_info->last_insn;
2654 else
2655 insn_info = find_insn_before_first_wild_read (bb_info);
2657 /* In the spill case or in the no_spill case if there is no wild
2658 read in the block, we will need a kill set. */
2659 if (insn_info == bb_info->last_insn)
2661 if (bb_info->kill)
2662 bitmap_clear (bb_info->kill);
2663 else
2664 bb_info->kill = BITMAP_ALLOC (NULL);
2666 else
2667 if (bb_info->kill)
2668 BITMAP_FREE (bb_info->kill);
2670 while (insn_info)
2672 /* There may have been code deleted by the dce pass run before
2673 this phase. */
2674 if (insn_info->insn && INSN_P (insn_info->insn))
2676 /* Process the read(s) last. */
2677 if (for_spills)
2679 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2680 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
2682 else
2684 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
2685 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
2689 insn_info = insn_info->prev_insn;
2694 /* Set the gen set of the exit block, and also any block with no
2695 successors that does not have a wild read. */
2697 static void
2698 dse_step3_exit_block_scan (bb_info_t bb_info)
2700 /* The gen set is all 0's for the exit block except for the
2701 frame_pointer_group. */
2703 if (stores_off_frame_dead_at_return)
2705 unsigned int i;
2706 group_info_t group;
2708 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2710 if (group->process_globally && group->frame_related)
2711 bitmap_ior_into (bb_info->gen, group->group_kill);
2717 /* Find all of the blocks that are not backwards reachable from the
2718 exit block or any block with no successors (BB). These are the
2719 infinite loops or infinite self loops. These blocks will still
2720 have their bits set in UNREACHABLE_BLOCKS. */
2722 static void
2723 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
2725 edge e;
2726 edge_iterator ei;
2728 if (TEST_BIT (unreachable_blocks, bb->index))
2730 RESET_BIT (unreachable_blocks, bb->index);
2731 FOR_EACH_EDGE (e, ei, bb->preds)
2733 mark_reachable_blocks (unreachable_blocks, e->src);
2738 /* Build the transfer functions for the function. */
2740 static void
2741 dse_step3 (bool for_spills)
2743 basic_block bb;
2744 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
2745 sbitmap_iterator sbi;
2746 bitmap all_ones = NULL;
2747 unsigned int i;
2749 sbitmap_ones (unreachable_blocks);
2751 FOR_ALL_BB (bb)
2753 bb_info_t bb_info = bb_table[bb->index];
2754 if (bb_info->gen)
2755 bitmap_clear (bb_info->gen);
2756 else
2757 bb_info->gen = BITMAP_ALLOC (NULL);
2759 if (bb->index == ENTRY_BLOCK)
2761 else if (bb->index == EXIT_BLOCK)
2762 dse_step3_exit_block_scan (bb_info);
2763 else
2764 dse_step3_scan (for_spills, bb);
2765 if (EDGE_COUNT (bb->succs) == 0)
2766 mark_reachable_blocks (unreachable_blocks, bb);
2768 /* If this is the second time dataflow is run, delete the old
2769 sets. */
2770 if (bb_info->in)
2771 BITMAP_FREE (bb_info->in);
2772 if (bb_info->out)
2773 BITMAP_FREE (bb_info->out);
2776 /* For any block in an infinite loop, we must initialize the out set
2777 to all ones. This could be expensive, but almost never occurs in
2778 practice. However, it is common in regression tests. */
2779 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
2781 if (bitmap_bit_p (all_blocks, i))
2783 bb_info_t bb_info = bb_table[i];
2784 if (!all_ones)
2786 unsigned int j;
2787 group_info_t group;
2789 all_ones = BITMAP_ALLOC (NULL);
2790 for (j = 0; VEC_iterate (group_info_t, rtx_group_vec, j, group); j++)
2791 bitmap_ior_into (all_ones, group->group_kill);
2793 if (!bb_info->out)
2795 bb_info->out = BITMAP_ALLOC (NULL);
2796 bitmap_copy (bb_info->out, all_ones);
2801 if (all_ones)
2802 BITMAP_FREE (all_ones);
2803 sbitmap_free (unreachable_blocks);
2808 /*----------------------------------------------------------------------------
2809 Fourth step.
2811 Solve the bitvector equations.
2812 ----------------------------------------------------------------------------*/
2815 /* Confluence function for blocks with no successors. Create an out
2816 set from the gen set of the exit block. This block logically has
2817 the exit block as a successor. */
2821 static void
2822 dse_confluence_0 (basic_block bb)
2824 bb_info_t bb_info = bb_table[bb->index];
2826 if (bb->index == EXIT_BLOCK)
2827 return;
2829 if (!bb_info->out)
2831 bb_info->out = BITMAP_ALLOC (NULL);
2832 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
2836 /* Propagate the information from the in set of the dest of E to the
2837 out set of the src of E. If the various in or out sets are not
2838 there, that means they are all ones. */
2840 static void
2841 dse_confluence_n (edge e)
2843 bb_info_t src_info = bb_table[e->src->index];
2844 bb_info_t dest_info = bb_table[e->dest->index];
2846 if (dest_info->in)
2848 if (src_info->out)
2849 bitmap_and_into (src_info->out, dest_info->in);
2850 else
2852 src_info->out = BITMAP_ALLOC (NULL);
2853 bitmap_copy (src_info->out, dest_info->in);
2859 /* Propagate the info from the out to the in set of BB_INDEX's basic
2860 block. There are three cases:
2862 1) The block has no kill set. In this case the kill set is all
2863 ones. It does not matter what the out set of the block is, none of
2864 the info can reach the top. The only thing that reaches the top is
2865 the gen set and we just copy the set.
2867 2) There is a kill set but no out set and bb has successors. In
2868 this case we just return. Eventually an out set will be created and
2869 it is better to wait than to create a set of ones.
2871 3) There is both a kill and out set. We apply the obvious transfer
2872 function.
2875 static bool
2876 dse_transfer_function (int bb_index)
2878 bb_info_t bb_info = bb_table[bb_index];
2880 if (bb_info->kill)
2882 if (bb_info->out)
2884 /* Case 3 above. */
2885 if (bb_info->in)
2886 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2887 bb_info->out, bb_info->kill);
2888 else
2890 bb_info->in = BITMAP_ALLOC (NULL);
2891 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
2892 bb_info->out, bb_info->kill);
2893 return true;
2896 else
2897 /* Case 2 above. */
2898 return false;
2900 else
2902 /* Case 1 above. If there is already an in set, nothing
2903 happens. */
2904 if (bb_info->in)
2905 return false;
2906 else
2908 bb_info->in = BITMAP_ALLOC (NULL);
2909 bitmap_copy (bb_info->in, bb_info->gen);
2910 return true;
2915 /* Solve the dataflow equations. */
2917 static void
2918 dse_step4 (void)
2920 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
2921 dse_confluence_n, dse_transfer_function,
2922 all_blocks, df_get_postorder (DF_BACKWARD),
2923 df_get_n_blocks (DF_BACKWARD));
2924 if (dump_file)
2926 basic_block bb;
2928 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
2929 FOR_ALL_BB (bb)
2931 bb_info_t bb_info = bb_table[bb->index];
2933 df_print_bb_index (bb, dump_file);
2934 if (bb_info->in)
2935 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
2936 else
2937 fprintf (dump_file, " in: *MISSING*\n");
2938 if (bb_info->gen)
2939 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
2940 else
2941 fprintf (dump_file, " gen: *MISSING*\n");
2942 if (bb_info->kill)
2943 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
2944 else
2945 fprintf (dump_file, " kill: *MISSING*\n");
2946 if (bb_info->out)
2947 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
2948 else
2949 fprintf (dump_file, " out: *MISSING*\n\n");
2956 /*----------------------------------------------------------------------------
2957 Fifth step.
2959 Delete the stores that can only be deleted using the global information.
2960 ----------------------------------------------------------------------------*/
2963 static void
2964 dse_step5_nospill (void)
2966 basic_block bb;
2967 FOR_EACH_BB (bb)
2969 bb_info_t bb_info = bb_table[bb->index];
2970 insn_info_t insn_info = bb_info->last_insn;
2971 bitmap v = bb_info->out;
2973 while (insn_info)
2975 bool deleted = false;
2976 if (dump_file && insn_info->insn)
2978 fprintf (dump_file, "starting to process insn %d\n",
2979 INSN_UID (insn_info->insn));
2980 bitmap_print (dump_file, v, " v: ", "\n");
2983 /* There may have been code deleted by the dce pass run before
2984 this phase. */
2985 if (insn_info->insn
2986 && INSN_P (insn_info->insn)
2987 && (!insn_info->cannot_delete)
2988 && (!bitmap_empty_p (v)))
2990 store_info_t store_info = insn_info->store_rec;
2992 /* Try to delete the current insn. */
2993 deleted = true;
2995 /* Skip the clobbers. */
2996 while (!store_info->is_set)
2997 store_info = store_info->next;
2999 if (store_info->alias_set)
3000 deleted = false;
3001 else
3003 HOST_WIDE_INT i;
3004 group_info_t group_info
3005 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3007 for (i = store_info->begin; i < store_info->end; i++)
3009 int index = get_bitmap_index (group_info, i);
3011 if (dump_file)
3012 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3013 if (index == 0 || !bitmap_bit_p (v, index))
3015 if (dump_file)
3016 fprintf (dump_file, "failing at i = %d\n", (int)i);
3017 deleted = false;
3018 break;
3022 if (deleted)
3024 if (dbg_cnt (dse))
3026 check_for_inc_dec (insn_info->insn);
3027 delete_insn (insn_info->insn);
3028 insn_info->insn = NULL;
3029 globally_deleted++;
3033 /* We do want to process the local info if the insn was
3034 deleted. For instance, if the insn did a wild read, we
3035 no longer need to trash the info. */
3036 if (insn_info->insn
3037 && INSN_P (insn_info->insn)
3038 && (!deleted))
3040 scan_stores_nospill (insn_info->store_rec, v, NULL);
3041 if (insn_info->wild_read)
3043 if (dump_file)
3044 fprintf (dump_file, "wild read\n");
3045 bitmap_clear (v);
3047 else if (insn_info->read_rec)
3049 if (dump_file)
3050 fprintf (dump_file, "regular read\n");
3051 scan_reads_nospill (insn_info, v, NULL);
3055 insn_info = insn_info->prev_insn;
3061 static void
3062 dse_step5_spill (void)
3064 basic_block bb;
3065 FOR_EACH_BB (bb)
3067 bb_info_t bb_info = bb_table[bb->index];
3068 insn_info_t insn_info = bb_info->last_insn;
3069 bitmap v = bb_info->out;
3071 while (insn_info)
3073 bool deleted = false;
3074 /* There may have been code deleted by the dce pass run before
3075 this phase. */
3076 if (insn_info->insn
3077 && INSN_P (insn_info->insn)
3078 && (!insn_info->cannot_delete)
3079 && (!bitmap_empty_p (v)))
3081 /* Try to delete the current insn. */
3082 store_info_t store_info = insn_info->store_rec;
3083 deleted = true;
3085 while (store_info)
3087 if (store_info->alias_set)
3089 int index = get_bitmap_index (clear_alias_group,
3090 store_info->alias_set);
3091 if (index == 0 || !bitmap_bit_p (v, index))
3093 deleted = false;
3094 break;
3097 else
3098 deleted = false;
3099 store_info = store_info->next;
3101 if (deleted && dbg_cnt (dse))
3103 if (dump_file)
3104 fprintf (dump_file, "Spill deleting insn %d\n",
3105 INSN_UID (insn_info->insn));
3106 check_for_inc_dec (insn_info->insn);
3107 delete_insn (insn_info->insn);
3108 spill_deleted++;
3109 insn_info->insn = NULL;
3113 if (insn_info->insn
3114 && INSN_P (insn_info->insn)
3115 && (!deleted))
3117 scan_stores_spill (insn_info->store_rec, v, NULL);
3118 scan_reads_spill (insn_info->read_rec, v, NULL);
3121 insn_info = insn_info->prev_insn;
3128 /*----------------------------------------------------------------------------
3129 Sixth step.
3131 Destroy everything left standing.
3132 ----------------------------------------------------------------------------*/
3134 static void
3135 dse_step6 (bool global_done)
3137 unsigned int i;
3138 group_info_t group;
3139 basic_block bb;
3141 if (global_done)
3143 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3145 free (group->offset_map_n);
3146 free (group->offset_map_p);
3147 BITMAP_FREE (group->store1_n);
3148 BITMAP_FREE (group->store1_p);
3149 BITMAP_FREE (group->store2_n);
3150 BITMAP_FREE (group->store2_p);
3151 BITMAP_FREE (group->group_kill);
3154 FOR_ALL_BB (bb)
3156 bb_info_t bb_info = bb_table[bb->index];
3157 BITMAP_FREE (bb_info->gen);
3158 if (bb_info->kill)
3159 BITMAP_FREE (bb_info->kill);
3160 if (bb_info->in)
3161 BITMAP_FREE (bb_info->in);
3162 if (bb_info->out)
3163 BITMAP_FREE (bb_info->out);
3166 else
3168 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3170 BITMAP_FREE (group->store1_n);
3171 BITMAP_FREE (group->store1_p);
3172 BITMAP_FREE (group->store2_n);
3173 BITMAP_FREE (group->store2_p);
3174 BITMAP_FREE (group->group_kill);
3178 if (clear_alias_sets)
3180 BITMAP_FREE (clear_alias_sets);
3181 BITMAP_FREE (disqualified_clear_alias_sets);
3182 free_alloc_pool (clear_alias_mode_pool);
3183 htab_delete (clear_alias_mode_table);
3186 end_alias_analysis ();
3187 free (bb_table);
3188 htab_delete (rtx_group_table);
3189 VEC_free (group_info_t, heap, rtx_group_vec);
3190 BITMAP_FREE (all_blocks);
3191 BITMAP_FREE (scratch);
3193 free_alloc_pool (rtx_store_info_pool);
3194 free_alloc_pool (read_info_pool);
3195 free_alloc_pool (insn_info_pool);
3196 free_alloc_pool (bb_info_pool);
3197 free_alloc_pool (rtx_group_info_pool);
3198 free_alloc_pool (deferred_change_pool);
3203 /* -------------------------------------------------------------------------
3205 ------------------------------------------------------------------------- */
3207 /* Callback for running pass_rtl_dse. */
3209 static unsigned int
3210 rest_of_handle_dse (void)
3212 bool did_global = false;
3214 df_set_flags (DF_DEFER_INSN_RESCAN);
3216 dse_step0 ();
3217 dse_step1 ();
3218 dse_step2_init ();
3219 if (dse_step2_nospill ())
3221 df_set_flags (DF_LR_RUN_DCE);
3222 df_analyze ();
3223 did_global = true;
3224 if (dump_file)
3225 fprintf (dump_file, "doing global processing\n");
3226 dse_step3 (false);
3227 dse_step4 ();
3228 dse_step5_nospill ();
3231 /* For the instance of dse that runs after reload, we make a special
3232 pass to process the spills. These are special in that they are
3233 totally transparent, i.e, there is no aliasing issues that need
3234 to be considered. This means that the wild reads that kill
3235 everything else do not apply here. */
3236 if (clear_alias_sets && dse_step2_spill ())
3238 if (!did_global)
3240 df_set_flags (DF_LR_RUN_DCE);
3241 df_analyze ();
3243 did_global = true;
3244 if (dump_file)
3245 fprintf (dump_file, "doing global spill processing\n");
3246 dse_step3 (true);
3247 dse_step4 ();
3248 dse_step5_spill ();
3251 dse_step6 (did_global);
3253 if (dump_file)
3254 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3255 locally_deleted, globally_deleted, spill_deleted);
3256 return 0;
3259 static bool
3260 gate_dse (void)
3262 return gate_dse1 () || gate_dse2 ();
3265 static bool
3266 gate_dse1 (void)
3268 return optimize > 0 && flag_dse
3269 && dbg_cnt (dse1);
3272 static bool
3273 gate_dse2 (void)
3275 return optimize > 0 && flag_dse
3276 && dbg_cnt (dse2);
3279 struct rtl_opt_pass pass_rtl_dse1 =
3282 RTL_PASS,
3283 "dse1", /* name */
3284 gate_dse1, /* gate */
3285 rest_of_handle_dse, /* execute */
3286 NULL, /* sub */
3287 NULL, /* next */
3288 0, /* static_pass_number */
3289 TV_DSE1, /* tv_id */
3290 0, /* properties_required */
3291 0, /* properties_provided */
3292 0, /* properties_destroyed */
3293 0, /* todo_flags_start */
3294 TODO_dump_func |
3295 TODO_df_finish | TODO_verify_rtl_sharing |
3296 TODO_ggc_collect /* todo_flags_finish */
3300 struct rtl_opt_pass pass_rtl_dse2 =
3303 RTL_PASS,
3304 "dse2", /* name */
3305 gate_dse2, /* gate */
3306 rest_of_handle_dse, /* execute */
3307 NULL, /* sub */
3308 NULL, /* next */
3309 0, /* static_pass_number */
3310 TV_DSE2, /* tv_id */
3311 0, /* properties_required */
3312 0, /* properties_provided */
3313 0, /* properties_destroyed */
3314 0, /* todo_flags_start */
3315 TODO_dump_func |
3316 TODO_df_finish | TODO_verify_rtl_sharing |
3317 TODO_ggc_collect /* todo_flags_finish */