Mark as release
[official-gcc.git] / gcc / dse.c
bloba35f07bb1134e5af9e43060973a43fa61b6eb227
1 /* RTL dead store elimination.
2 Copyright (C) 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc.
4 Contributed by Richard Sandiford <rsandifor@codesourcery.com>
5 and Kenneth Zadeck <zadeck@naturalbridge.com>
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
23 #undef BASELINE
25 #include "config.h"
26 #include "system.h"
27 #include "coretypes.h"
28 #include "hashtab.h"
29 #include "tm.h"
30 #include "rtl.h"
31 #include "tree.h"
32 #include "tm_p.h"
33 #include "regs.h"
34 #include "hard-reg-set.h"
35 #include "flags.h"
36 #include "df.h"
37 #include "cselib.h"
38 #include "timevar.h"
39 #include "tree-pass.h"
40 #include "alloc-pool.h"
41 #include "alias.h"
42 #include "insn-config.h"
43 #include "expr.h"
44 #include "recog.h"
45 #include "dse.h"
46 #include "optabs.h"
47 #include "dbgcnt.h"
48 #include "target.h"
50 /* This file contains three techniques for performing Dead Store
51 Elimination (dse).
53 * The first technique performs dse locally on any base address. It
54 is based on the cselib which is a local value numbering technique.
55 This technique is local to a basic block but deals with a fairly
56 general addresses.
58 * The second technique performs dse globally but is restricted to
59 base addresses that are either constant or are relative to the
60 frame_pointer.
62 * The third technique, (which is only done after register allocation)
63 processes the spill spill slots. This differs from the second
64 technique because it takes advantage of the fact that spilling is
65 completely free from the effects of aliasing.
67 Logically, dse is a backwards dataflow problem. A store can be
68 deleted if it if cannot be reached in the backward direction by any
69 use of the value being stored. However, the local technique uses a
70 forwards scan of the basic block because cselib requires that the
71 block be processed in that order.
73 The pass is logically broken into 7 steps:
75 0) Initialization.
77 1) The local algorithm, as well as scanning the insns for the two
78 global algorithms.
80 2) Analysis to see if the global algs are necessary. In the case
81 of stores base on a constant address, there must be at least two
82 stores to that address, to make it possible to delete some of the
83 stores. In the case of stores off of the frame or spill related
84 stores, only one store to an address is necessary because those
85 stores die at the end of the function.
87 3) Set up the global dataflow equations based on processing the
88 info parsed in the first step.
90 4) Solve the dataflow equations.
92 5) Delete the insns that the global analysis has indicated are
93 unnecessary.
95 6) Delete insns that store the same value as preceeding store
96 where the earlier store couldn't be eliminated.
98 7) Cleanup.
100 This step uses cselib and canon_rtx to build the largest expression
101 possible for each address. This pass is a forwards pass through
102 each basic block. From the point of view of the global technique,
103 the first pass could examine a block in either direction. The
104 forwards ordering is to accommodate cselib.
106 We a simplifying assumption: addresses fall into four broad
107 categories:
109 1) base has rtx_varies_p == false, offset is constant.
110 2) base has rtx_varies_p == false, offset variable.
111 3) base has rtx_varies_p == true, offset constant.
112 4) base has rtx_varies_p == true, offset variable.
114 The local passes are able to process all 4 kinds of addresses. The
115 global pass only handles (1).
117 The global problem is formulated as follows:
119 A store, S1, to address A, where A is not relative to the stack
120 frame, can be eliminated if all paths from S1 to the end of the
121 of the function contain another store to A before a read to A.
123 If the address A is relative to the stack frame, a store S2 to A
124 can be eliminated if there are no paths from S1 that reach the
125 end of the function that read A before another store to A. In
126 this case S2 can be deleted if there are paths to from S2 to the
127 end of the function that have no reads or writes to A. This
128 second case allows stores to the stack frame to be deleted that
129 would otherwise die when the function returns. This cannot be
130 done if stores_off_frame_dead_at_return is not true. See the doc
131 for that variable for when this variable is false.
133 The global problem is formulated as a backwards set union
134 dataflow problem where the stores are the gens and reads are the
135 kills. Set union problems are rare and require some special
136 handling given our representation of bitmaps. A straightforward
137 implementation of requires a lot of bitmaps filled with 1s.
138 These are expensive and cumbersome in our bitmap formulation so
139 care has been taken to avoid large vectors filled with 1s. See
140 the comments in bb_info and in the dataflow confluence functions
141 for details.
143 There are two places for further enhancements to this algorithm:
145 1) The original dse which was embedded in a pass called flow also
146 did local address forwarding. For example in
148 A <- r100
149 ... <- A
151 flow would replace the right hand side of the second insn with a
152 reference to r100. Most of the information is available to add this
153 to this pass. It has not done it because it is a lot of work in
154 the case that either r100 is assigned to between the first and
155 second insn and/or the second insn is a load of part of the value
156 stored by the first insn.
158 insn 5 in gcc.c-torture/compile/990203-1.c simple case.
159 insn 15 in gcc.c-torture/execute/20001017-2.c simple case.
160 insn 25 in gcc.c-torture/execute/20001026-1.c simple case.
161 insn 44 in gcc.c-torture/execute/20010910-1.c simple case.
163 2) The cleaning up of spill code is quite profitable. It currently
164 depends on reading tea leaves and chicken entrails left by reload.
165 This pass depends on reload creating a singleton alias set for each
166 spill slot and telling the next dse pass which of these alias sets
167 are the singletons. Rather than analyze the addresses of the
168 spills, dse's spill processing just does analysis of the loads and
169 stores that use those alias sets. There are three cases where this
170 falls short:
172 a) Reload sometimes creates the slot for one mode of access, and
173 then inserts loads and/or stores for a smaller mode. In this
174 case, the current code just punts on the slot. The proper thing
175 to do is to back out and use one bit vector position for each
176 byte of the entity associated with the slot. This depends on
177 KNOWING that reload always generates the accesses for each of the
178 bytes in some canonical (read that easy to understand several
179 passes after reload happens) way.
181 b) Reload sometimes decides that spill slot it allocated was not
182 large enough for the mode and goes back and allocates more slots
183 with the same mode and alias set. The backout in this case is a
184 little more graceful than (a). In this case the slot is unmarked
185 as being a spill slot and if final address comes out to be based
186 off the frame pointer, the global algorithm handles this slot.
188 c) For any pass that may prespill, there is currently no
189 mechanism to tell the dse pass that the slot being used has the
190 special properties that reload uses. It may be that all that is
191 required is to have those passes make the same calls that reload
192 does, assuming that the alias sets can be manipulated in the same
193 way. */
195 /* There are limits to the size of constant offsets we model for the
196 global problem. There are certainly test cases, that exceed this
197 limit, however, it is unlikely that there are important programs
198 that really have constant offsets this size. */
199 #define MAX_OFFSET (64 * 1024)
202 static bitmap scratch = NULL;
203 struct insn_info;
205 /* This structure holds information about a candidate store. */
206 struct store_info
209 /* False means this is a clobber. */
210 bool is_set;
212 /* False if a single HOST_WIDE_INT bitmap is used for positions_needed. */
213 bool is_large;
215 /* The id of the mem group of the base address. If rtx_varies_p is
216 true, this is -1. Otherwise, it is the index into the group
217 table. */
218 int group_id;
220 /* This is the cselib value. */
221 cselib_val *cse_base;
223 /* This canonized mem. */
224 rtx mem;
226 /* The result of get_addr on mem. */
227 rtx mem_addr;
229 /* If this is non-zero, it is the alias set of a spill location. */
230 alias_set_type alias_set;
232 /* The offset of the first and byte before the last byte associated
233 with the operation. */
234 HOST_WIDE_INT begin, end;
236 union
238 /* A bitmask as wide as the number of bytes in the word that
239 contains a 1 if the byte may be needed. The store is unused if
240 all of the bits are 0. This is used if IS_LARGE is false. */
241 unsigned HOST_WIDE_INT small_bitmask;
243 struct
245 /* A bitmap with one bit per byte. Cleared bit means the position
246 is needed. Used if IS_LARGE is false. */
247 bitmap bitmap;
249 /* Number of set bits (i.e. unneeded bytes) in BITMAP. If it is
250 equal to END - BEGIN, the whole store is unused. */
251 int count;
252 } large;
253 } positions_needed;
255 /* The next store info for this insn. */
256 struct store_info *next;
258 /* The right hand side of the store. This is used if there is a
259 subsequent reload of the mems address somewhere later in the
260 basic block. */
261 rtx rhs;
263 /* If rhs is or holds a constant, this contains that constant,
264 otherwise NULL. */
265 rtx const_rhs;
267 /* Set if this store stores the same constant value as REDUNDANT_REASON
268 insn stored. These aren't eliminated early, because doing that
269 might prevent the earlier larger store to be eliminated. */
270 struct insn_info *redundant_reason;
273 /* Return a bitmask with the first N low bits set. */
275 static unsigned HOST_WIDE_INT
276 lowpart_bitmask (int n)
278 unsigned HOST_WIDE_INT mask = ~(unsigned HOST_WIDE_INT) 0;
279 return mask >> (HOST_BITS_PER_WIDE_INT - n);
282 typedef struct store_info *store_info_t;
283 static alloc_pool cse_store_info_pool;
284 static alloc_pool rtx_store_info_pool;
286 /* This structure holds information about a load. These are only
287 built for rtx bases. */
288 struct read_info
290 /* The id of the mem group of the base address. */
291 int group_id;
293 /* If this is non-zero, it is the alias set of a spill location. */
294 alias_set_type alias_set;
296 /* The offset of the first and byte after the last byte associated
297 with the operation. If begin == end == 0, the read did not have
298 a constant offset. */
299 int begin, end;
301 /* The mem being read. */
302 rtx mem;
304 /* The next read_info for this insn. */
305 struct read_info *next;
307 typedef struct read_info *read_info_t;
308 static alloc_pool read_info_pool;
311 /* One of these records is created for each insn. */
313 struct insn_info
315 /* Set true if the insn contains a store but the insn itself cannot
316 be deleted. This is set if the insn is a parallel and there is
317 more than one non dead output or if the insn is in some way
318 volatile. */
319 bool cannot_delete;
321 /* This field is only used by the global algorithm. It is set true
322 if the insn contains any read of mem except for a (1). This is
323 also set if the insn is a call or has a clobber mem. If the insn
324 contains a wild read, the use_rec will be null. */
325 bool wild_read;
327 /* This field is only used for the processing of const functions.
328 These functions cannot read memory, but they can read the stack
329 because that is where they may get their parms. We need to be
330 this conservative because, like the store motion pass, we don't
331 consider CALL_INSN_FUNCTION_USAGE when processing call insns.
332 Moreover, we need to distinguish two cases:
333 1. Before reload (register elimination), the stores related to
334 outgoing arguments are stack pointer based and thus deemed
335 of non-constant base in this pass. This requires special
336 handling but also means that the frame pointer based stores
337 need not be killed upon encountering a const function call.
338 2. After reload, the stores related to outgoing arguments can be
339 either stack pointer or hard frame pointer based. This means
340 that we have no other choice than also killing all the frame
341 pointer based stores upon encountering a const function call.
342 This field is set after reload for const function calls. Having
343 this set is less severe than a wild read, it just means that all
344 the frame related stores are killed rather than all the stores. */
345 bool frame_read;
347 /* This field is only used for the processing of const functions.
348 It is set if the insn may contain a stack pointer based store. */
349 bool stack_pointer_based;
351 /* This is true if any of the sets within the store contains a
352 cselib base. Such stores can only be deleted by the local
353 algorithm. */
354 bool contains_cselib_groups;
356 /* The insn. */
357 rtx insn;
359 /* The list of mem sets or mem clobbers that are contained in this
360 insn. If the insn is deletable, it contains only one mem set.
361 But it could also contain clobbers. Insns that contain more than
362 one mem set are not deletable, but each of those mems are here in
363 order to provide info to delete other insns. */
364 store_info_t store_rec;
366 /* The linked list of mem uses in this insn. Only the reads from
367 rtx bases are listed here. The reads to cselib bases are
368 completely processed during the first scan and so are never
369 created. */
370 read_info_t read_rec;
372 /* The prev insn in the basic block. */
373 struct insn_info * prev_insn;
375 /* The linked list of insns that are in consideration for removal in
376 the forwards pass thru the basic block. This pointer may be
377 trash as it is not cleared when a wild read occurs. The only
378 time it is guaranteed to be correct is when the traversal starts
379 at active_local_stores. */
380 struct insn_info * next_local_store;
383 typedef struct insn_info *insn_info_t;
384 static alloc_pool insn_info_pool;
386 /* The linked list of stores that are under consideration in this
387 basic block. */
388 static insn_info_t active_local_stores;
390 struct bb_info
393 /* Pointer to the insn info for the last insn in the block. These
394 are linked so this is how all of the insns are reached. During
395 scanning this is the current insn being scanned. */
396 insn_info_t last_insn;
398 /* The info for the global dataflow problem. */
401 /* This is set if the transfer function should and in the wild_read
402 bitmap before applying the kill and gen sets. That vector knocks
403 out most of the bits in the bitmap and thus speeds up the
404 operations. */
405 bool apply_wild_read;
407 /* The following 4 bitvectors hold information about which positions
408 of which stores are live or dead. They are indexed by
409 get_bitmap_index. */
411 /* The set of store positions that exist in this block before a wild read. */
412 bitmap gen;
414 /* The set of load positions that exist in this block above the
415 same position of a store. */
416 bitmap kill;
418 /* The set of stores that reach the top of the block without being
419 killed by a read.
421 Do not represent the in if it is all ones. Note that this is
422 what the bitvector should logically be initialized to for a set
423 intersection problem. However, like the kill set, this is too
424 expensive. So initially, the in set will only be created for the
425 exit block and any block that contains a wild read. */
426 bitmap in;
428 /* The set of stores that reach the bottom of the block from it's
429 successors.
431 Do not represent the in if it is all ones. Note that this is
432 what the bitvector should logically be initialized to for a set
433 intersection problem. However, like the kill and in set, this is
434 too expensive. So what is done is that the confluence operator
435 just initializes the vector from one of the out sets of the
436 successors of the block. */
437 bitmap out;
439 /* The following bitvector is indexed by the reg number. It
440 contains the set of regs that are live at the current instruction
441 being processed. While it contains info for all of the
442 registers, only the pseudos are actually examined. It is used to
443 assure that shift sequences that are inserted do not accidently
444 clobber live hard regs. */
445 bitmap regs_live;
448 typedef struct bb_info *bb_info_t;
449 static alloc_pool bb_info_pool;
451 /* Table to hold all bb_infos. */
452 static bb_info_t *bb_table;
454 /* There is a group_info for each rtx base that is used to reference
455 memory. There are also not many of the rtx bases because they are
456 very limited in scope. */
458 struct group_info
460 /* The actual base of the address. */
461 rtx rtx_base;
463 /* The sequential id of the base. This allows us to have a
464 canonical ordering of these that is not based on addresses. */
465 int id;
467 /* True if there are any positions that are to be processed
468 globally. */
469 bool process_globally;
471 /* True if the base of this group is either the frame_pointer or
472 hard_frame_pointer. */
473 bool frame_related;
475 /* A mem wrapped around the base pointer for the group in order to
476 do read dependency. */
477 rtx base_mem;
479 /* Canonized version of base_mem, most likely the same thing. */
480 rtx canon_base_mem;
482 /* These two sets of two bitmaps are used to keep track of how many
483 stores are actually referencing that position from this base. We
484 only do this for rtx bases as this will be used to assign
485 positions in the bitmaps for the global problem. Bit N is set in
486 store1 on the first store for offset N. Bit N is set in store2
487 for the second store to offset N. This is all we need since we
488 only care about offsets that have two or more stores for them.
490 The "_n" suffix is for offsets less than 0 and the "_p" suffix is
491 for 0 and greater offsets.
493 There is one special case here, for stores into the stack frame,
494 we will or store1 into store2 before deciding which stores look
495 at globally. This is because stores to the stack frame that have
496 no other reads before the end of the function can also be
497 deleted. */
498 bitmap store1_n, store1_p, store2_n, store2_p;
500 /* The positions in this bitmap have the same assignments as the in,
501 out, gen and kill bitmaps. This bitmap is all zeros except for
502 the positions that are occupied by stores for this group. */
503 bitmap group_kill;
505 /* The offset_map is used to map the offsets from this base into
506 positions in the global bitmaps. It is only created after all of
507 the all of stores have been scanned and we know which ones we
508 care about. */
509 int *offset_map_n, *offset_map_p;
510 int offset_map_size_n, offset_map_size_p;
512 typedef struct group_info *group_info_t;
513 typedef const struct group_info *const_group_info_t;
514 static alloc_pool rtx_group_info_pool;
516 /* Tables of group_info structures, hashed by base value. */
517 static htab_t rtx_group_table;
519 /* Index into the rtx_group_vec. */
520 static int rtx_group_next_id;
522 DEF_VEC_P(group_info_t);
523 DEF_VEC_ALLOC_P(group_info_t,heap);
525 static VEC(group_info_t,heap) *rtx_group_vec;
528 /* This structure holds the set of changes that are being deferred
529 when removing read operation. See replace_read. */
530 struct deferred_change
533 /* The mem that is being replaced. */
534 rtx *loc;
536 /* The reg it is being replaced with. */
537 rtx reg;
539 struct deferred_change *next;
542 typedef struct deferred_change *deferred_change_t;
543 static alloc_pool deferred_change_pool;
545 static deferred_change_t deferred_change_list = NULL;
547 /* This are used to hold the alias sets of spill variables. Since
548 these are never aliased and there may be a lot of them, it makes
549 sense to treat them specially. This bitvector is only allocated in
550 calls from dse_record_singleton_alias_set which currently is only
551 made during reload1. So when dse is called before reload this
552 mechanism does nothing. */
554 static bitmap clear_alias_sets = NULL;
556 /* The set of clear_alias_sets that have been disqualified because
557 there are loads or stores using a different mode than the alias set
558 was registered with. */
559 static bitmap disqualified_clear_alias_sets = NULL;
561 /* The group that holds all of the clear_alias_sets. */
562 static group_info_t clear_alias_group;
564 /* The modes of the clear_alias_sets. */
565 static htab_t clear_alias_mode_table;
567 /* Hash table element to look up the mode for an alias set. */
568 struct clear_alias_mode_holder
570 alias_set_type alias_set;
571 enum machine_mode mode;
574 static alloc_pool clear_alias_mode_pool;
576 /* This is true except if cfun->stdarg -- i.e. we cannot do
577 this for vararg functions because they play games with the frame. */
578 static bool stores_off_frame_dead_at_return;
580 /* Counter for stats. */
581 static int globally_deleted;
582 static int locally_deleted;
583 static int spill_deleted;
585 static bitmap all_blocks;
587 /* The number of bits used in the global bitmaps. */
588 static unsigned int current_position;
591 static bool gate_dse (void);
592 static bool gate_dse1 (void);
593 static bool gate_dse2 (void);
596 /*----------------------------------------------------------------------------
597 Zeroth step.
599 Initialization.
600 ----------------------------------------------------------------------------*/
602 /* Hashtable callbacks for maintaining the "bases" field of
603 store_group_info, given that the addresses are function invariants. */
605 static int
606 clear_alias_mode_eq (const void *p1, const void *p2)
608 const struct clear_alias_mode_holder * h1
609 = (const struct clear_alias_mode_holder *) p1;
610 const struct clear_alias_mode_holder * h2
611 = (const struct clear_alias_mode_holder *) p2;
612 return h1->alias_set == h2->alias_set;
616 static hashval_t
617 clear_alias_mode_hash (const void *p)
619 const struct clear_alias_mode_holder *holder
620 = (const struct clear_alias_mode_holder *) p;
621 return holder->alias_set;
625 /* Find the entry associated with ALIAS_SET. */
627 static struct clear_alias_mode_holder *
628 clear_alias_set_lookup (alias_set_type alias_set)
630 struct clear_alias_mode_holder tmp_holder;
631 void **slot;
633 tmp_holder.alias_set = alias_set;
634 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, NO_INSERT);
635 gcc_assert (*slot);
637 return (struct clear_alias_mode_holder *) *slot;
641 /* Hashtable callbacks for maintaining the "bases" field of
642 store_group_info, given that the addresses are function invariants. */
644 static int
645 invariant_group_base_eq (const void *p1, const void *p2)
647 const_group_info_t gi1 = (const_group_info_t) p1;
648 const_group_info_t gi2 = (const_group_info_t) p2;
649 return rtx_equal_p (gi1->rtx_base, gi2->rtx_base);
653 static hashval_t
654 invariant_group_base_hash (const void *p)
656 const_group_info_t gi = (const_group_info_t) p;
657 int do_not_record;
658 return hash_rtx (gi->rtx_base, Pmode, &do_not_record, NULL, false);
662 /* Get the GROUP for BASE. Add a new group if it is not there. */
664 static group_info_t
665 get_group_info (rtx base)
667 struct group_info tmp_gi;
668 group_info_t gi;
669 void **slot;
671 if (base)
673 /* Find the store_base_info structure for BASE, creating a new one
674 if necessary. */
675 tmp_gi.rtx_base = base;
676 slot = htab_find_slot (rtx_group_table, &tmp_gi, INSERT);
677 gi = (group_info_t) *slot;
679 else
681 if (!clear_alias_group)
683 clear_alias_group = gi =
684 (group_info_t) pool_alloc (rtx_group_info_pool);
685 memset (gi, 0, sizeof (struct group_info));
686 gi->id = rtx_group_next_id++;
687 gi->store1_n = BITMAP_ALLOC (NULL);
688 gi->store1_p = BITMAP_ALLOC (NULL);
689 gi->store2_n = BITMAP_ALLOC (NULL);
690 gi->store2_p = BITMAP_ALLOC (NULL);
691 gi->group_kill = BITMAP_ALLOC (NULL);
692 gi->process_globally = false;
693 gi->offset_map_size_n = 0;
694 gi->offset_map_size_p = 0;
695 gi->offset_map_n = NULL;
696 gi->offset_map_p = NULL;
697 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
699 return clear_alias_group;
702 if (gi == NULL)
704 *slot = gi = (group_info_t) pool_alloc (rtx_group_info_pool);
705 gi->rtx_base = base;
706 gi->id = rtx_group_next_id++;
707 gi->base_mem = gen_rtx_MEM (QImode, base);
708 gi->canon_base_mem = canon_rtx (gi->base_mem);
709 gi->store1_n = BITMAP_ALLOC (NULL);
710 gi->store1_p = BITMAP_ALLOC (NULL);
711 gi->store2_n = BITMAP_ALLOC (NULL);
712 gi->store2_p = BITMAP_ALLOC (NULL);
713 gi->group_kill = BITMAP_ALLOC (NULL);
714 gi->process_globally = false;
715 gi->frame_related =
716 (base == frame_pointer_rtx) || (base == hard_frame_pointer_rtx);
717 gi->offset_map_size_n = 0;
718 gi->offset_map_size_p = 0;
719 gi->offset_map_n = NULL;
720 gi->offset_map_p = NULL;
721 VEC_safe_push (group_info_t, heap, rtx_group_vec, gi);
724 return gi;
728 /* Initialization of data structures. */
730 static void
731 dse_step0 (void)
733 locally_deleted = 0;
734 globally_deleted = 0;
735 spill_deleted = 0;
737 scratch = BITMAP_ALLOC (NULL);
739 rtx_store_info_pool
740 = create_alloc_pool ("rtx_store_info_pool",
741 sizeof (struct store_info), 100);
742 read_info_pool
743 = create_alloc_pool ("read_info_pool",
744 sizeof (struct read_info), 100);
745 insn_info_pool
746 = create_alloc_pool ("insn_info_pool",
747 sizeof (struct insn_info), 100);
748 bb_info_pool
749 = create_alloc_pool ("bb_info_pool",
750 sizeof (struct bb_info), 100);
751 rtx_group_info_pool
752 = create_alloc_pool ("rtx_group_info_pool",
753 sizeof (struct group_info), 100);
754 deferred_change_pool
755 = create_alloc_pool ("deferred_change_pool",
756 sizeof (struct deferred_change), 10);
758 rtx_group_table = htab_create (11, invariant_group_base_hash,
759 invariant_group_base_eq, NULL);
761 bb_table = XCNEWVEC (bb_info_t, last_basic_block);
762 rtx_group_next_id = 0;
764 stores_off_frame_dead_at_return = !cfun->stdarg;
766 init_alias_analysis ();
768 if (clear_alias_sets)
769 clear_alias_group = get_group_info (NULL);
770 else
771 clear_alias_group = NULL;
776 /*----------------------------------------------------------------------------
777 First step.
779 Scan all of the insns. Any random ordering of the blocks is fine.
780 Each block is scanned in forward order to accommodate cselib which
781 is used to remove stores with non-constant bases.
782 ----------------------------------------------------------------------------*/
784 /* Delete all of the store_info recs from INSN_INFO. */
786 static void
787 free_store_info (insn_info_t insn_info)
789 store_info_t store_info = insn_info->store_rec;
790 while (store_info)
792 store_info_t next = store_info->next;
793 if (store_info->is_large)
794 BITMAP_FREE (store_info->positions_needed.large.bitmap);
795 if (store_info->cse_base)
796 pool_free (cse_store_info_pool, store_info);
797 else
798 pool_free (rtx_store_info_pool, store_info);
799 store_info = next;
802 insn_info->cannot_delete = true;
803 insn_info->contains_cselib_groups = false;
804 insn_info->store_rec = NULL;
808 struct insn_size {
809 int size;
810 rtx insn;
814 /* Add an insn to do the add inside a x if it is a
815 PRE/POST-INC/DEC/MODIFY. D is an structure containing the insn and
816 the size of the mode of the MEM that this is inside of. */
818 static int
819 replace_inc_dec (rtx *r, void *d)
821 rtx x = *r;
822 struct insn_size *data = (struct insn_size *)d;
823 switch (GET_CODE (x))
825 case PRE_INC:
826 case POST_INC:
828 rtx r1 = XEXP (x, 0);
829 rtx c = gen_int_mode (Pmode, data->size);
830 emit_insn_before (gen_rtx_SET (Pmode, r1,
831 gen_rtx_PLUS (Pmode, r1, c)),
832 data->insn);
833 return -1;
836 case PRE_DEC:
837 case POST_DEC:
839 rtx r1 = XEXP (x, 0);
840 rtx c = gen_int_mode (Pmode, -data->size);
841 emit_insn_before (gen_rtx_SET (Pmode, r1,
842 gen_rtx_PLUS (Pmode, r1, c)),
843 data->insn);
844 return -1;
847 case PRE_MODIFY:
848 case POST_MODIFY:
850 /* We can reuse the add because we are about to delete the
851 insn that contained it. */
852 rtx add = XEXP (x, 0);
853 rtx r1 = XEXP (add, 0);
854 emit_insn_before (gen_rtx_SET (Pmode, r1, add), data->insn);
855 return -1;
858 default:
859 return 0;
864 /* If X is a MEM, check the address to see if it is PRE/POST-INC/DEC/MODIFY
865 and generate an add to replace that. */
867 static int
868 replace_inc_dec_mem (rtx *r, void *d)
870 rtx x = *r;
871 if (x != NULL_RTX && MEM_P (x))
873 struct insn_size data;
875 data.size = GET_MODE_SIZE (GET_MODE (x));
876 data.insn = (rtx) d;
878 for_each_rtx (&XEXP (x, 0), replace_inc_dec, &data);
880 return -1;
882 return 0;
885 /* Before we delete INSN, make sure that the auto inc/dec, if it is
886 there, is split into a separate insn. */
888 static void
889 check_for_inc_dec (rtx insn)
891 rtx note = find_reg_note (insn, REG_INC, NULL_RTX);
892 if (note)
893 for_each_rtx (&insn, replace_inc_dec_mem, insn);
897 /* Delete the insn and free all of the fields inside INSN_INFO. */
899 static void
900 delete_dead_store_insn (insn_info_t insn_info)
902 read_info_t read_info;
904 if (!dbg_cnt (dse))
905 return;
907 check_for_inc_dec (insn_info->insn);
908 if (dump_file)
910 fprintf (dump_file, "Locally deleting insn %d ",
911 INSN_UID (insn_info->insn));
912 if (insn_info->store_rec->alias_set)
913 fprintf (dump_file, "alias set %d\n",
914 (int) insn_info->store_rec->alias_set);
915 else
916 fprintf (dump_file, "\n");
919 free_store_info (insn_info);
920 read_info = insn_info->read_rec;
922 while (read_info)
924 read_info_t next = read_info->next;
925 pool_free (read_info_pool, read_info);
926 read_info = next;
928 insn_info->read_rec = NULL;
930 delete_insn (insn_info->insn);
931 locally_deleted++;
932 insn_info->insn = NULL;
934 insn_info->wild_read = false;
938 /* Set the store* bitmaps offset_map_size* fields in GROUP based on
939 OFFSET and WIDTH. */
941 static void
942 set_usage_bits (group_info_t group, HOST_WIDE_INT offset, HOST_WIDE_INT width)
944 HOST_WIDE_INT i;
946 if (offset > -MAX_OFFSET && offset + width < MAX_OFFSET)
947 for (i=offset; i<offset+width; i++)
949 bitmap store1;
950 bitmap store2;
951 int ai;
952 if (i < 0)
954 store1 = group->store1_n;
955 store2 = group->store2_n;
956 ai = -i;
958 else
960 store1 = group->store1_p;
961 store2 = group->store2_p;
962 ai = i;
965 if (bitmap_bit_p (store1, ai))
966 bitmap_set_bit (store2, ai);
967 else
969 bitmap_set_bit (store1, ai);
970 if (i < 0)
972 if (group->offset_map_size_n < ai)
973 group->offset_map_size_n = ai;
975 else
977 if (group->offset_map_size_p < ai)
978 group->offset_map_size_p = ai;
985 /* Set the BB_INFO so that the last insn is marked as a wild read. */
987 static void
988 add_wild_read (bb_info_t bb_info)
990 insn_info_t insn_info = bb_info->last_insn;
991 read_info_t *ptr = &insn_info->read_rec;
993 while (*ptr)
995 read_info_t next = (*ptr)->next;
996 if ((*ptr)->alias_set == 0)
998 pool_free (read_info_pool, *ptr);
999 *ptr = next;
1001 else
1002 ptr = &(*ptr)->next;
1004 insn_info->wild_read = true;
1005 active_local_stores = NULL;
1009 /* Return true if X is a constant or one of the registers that behave
1010 as a constant over the life of a function. This is equivalent to
1011 !rtx_varies_p for memory addresses. */
1013 static bool
1014 const_or_frame_p (rtx x)
1016 switch (GET_CODE (x))
1018 case MEM:
1019 return MEM_READONLY_P (x);
1021 case CONST:
1022 case CONST_INT:
1023 case CONST_DOUBLE:
1024 case CONST_VECTOR:
1025 case SYMBOL_REF:
1026 case LABEL_REF:
1027 return true;
1029 case REG:
1030 /* Note that we have to test for the actual rtx used for the frame
1031 and arg pointers and not just the register number in case we have
1032 eliminated the frame and/or arg pointer and are using it
1033 for pseudos. */
1034 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
1035 /* The arg pointer varies if it is not a fixed register. */
1036 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
1037 || x == pic_offset_table_rtx)
1038 return true;
1039 return false;
1041 default:
1042 return false;
1046 /* Take all reasonable action to put the address of MEM into the form
1047 that we can do analysis on.
1049 The gold standard is to get the address into the form: address +
1050 OFFSET where address is something that rtx_varies_p considers a
1051 constant. When we can get the address in this form, we can do
1052 global analysis on it. Note that for constant bases, address is
1053 not actually returned, only the group_id. The address can be
1054 obtained from that.
1056 If that fails, we try cselib to get a value we can at least use
1057 locally. If that fails we return false.
1059 The GROUP_ID is set to -1 for cselib bases and the index of the
1060 group for non_varying bases.
1062 FOR_READ is true if this is a mem read and false if not. */
1064 static bool
1065 canon_address (rtx mem,
1066 alias_set_type *alias_set_out,
1067 int *group_id,
1068 HOST_WIDE_INT *offset,
1069 cselib_val **base)
1071 rtx mem_address = XEXP (mem, 0);
1072 rtx expanded_address, address;
1073 /* Make sure that cselib is has initialized all of the operands of
1074 the address before asking it to do the subst. */
1076 if (clear_alias_sets)
1078 /* If this is a spill, do not do any further processing. */
1079 alias_set_type alias_set = MEM_ALIAS_SET (mem);
1080 if (dump_file)
1081 fprintf (dump_file, "found alias set %d\n", (int) alias_set);
1082 if (bitmap_bit_p (clear_alias_sets, alias_set))
1084 struct clear_alias_mode_holder *entry
1085 = clear_alias_set_lookup (alias_set);
1087 /* If the modes do not match, we cannot process this set. */
1088 if (entry->mode != GET_MODE (mem))
1090 if (dump_file)
1091 fprintf (dump_file,
1092 "disqualifying alias set %d, (%s) != (%s)\n",
1093 (int) alias_set, GET_MODE_NAME (entry->mode),
1094 GET_MODE_NAME (GET_MODE (mem)));
1096 bitmap_set_bit (disqualified_clear_alias_sets, alias_set);
1097 return false;
1100 *alias_set_out = alias_set;
1101 *group_id = clear_alias_group->id;
1102 return true;
1106 *alias_set_out = 0;
1108 cselib_lookup (mem_address, Pmode, 1);
1110 if (dump_file)
1112 fprintf (dump_file, " mem: ");
1113 print_inline_rtx (dump_file, mem_address, 0);
1114 fprintf (dump_file, "\n");
1117 /* Use cselib to replace all of the reg references with the full
1118 expression. This will take care of the case where we have
1120 r_x = base + offset;
1121 val = *r_x;
1123 by making it into
1125 val = *(base + offset);
1128 expanded_address = cselib_expand_value_rtx (mem_address, scratch, 5);
1130 /* If this fails, just go with the mem_address. */
1131 if (!expanded_address)
1132 expanded_address = mem_address;
1134 /* Split the address into canonical BASE + OFFSET terms. */
1135 address = canon_rtx (expanded_address);
1137 *offset = 0;
1139 if (dump_file)
1141 fprintf (dump_file, "\n after cselib_expand address: ");
1142 print_inline_rtx (dump_file, expanded_address, 0);
1143 fprintf (dump_file, "\n");
1145 fprintf (dump_file, "\n after canon_rtx address: ");
1146 print_inline_rtx (dump_file, address, 0);
1147 fprintf (dump_file, "\n");
1150 if (GET_CODE (address) == CONST)
1151 address = XEXP (address, 0);
1153 if (GET_CODE (address) == PLUS && GET_CODE (XEXP (address, 1)) == CONST_INT)
1155 *offset = INTVAL (XEXP (address, 1));
1156 address = XEXP (address, 0);
1159 if (const_or_frame_p (address))
1161 group_info_t group = get_group_info (address);
1163 if (dump_file)
1164 fprintf (dump_file, " gid=%d offset=%d \n", group->id, (int)*offset);
1165 *base = NULL;
1166 *group_id = group->id;
1168 else
1170 *base = cselib_lookup (address, Pmode, true);
1171 *group_id = -1;
1173 if (*base == NULL)
1175 if (dump_file)
1176 fprintf (dump_file, " no cselib val - should be a wild read.\n");
1177 return false;
1179 if (dump_file)
1180 fprintf (dump_file, " varying cselib base=%d offset = %d\n",
1181 (*base)->value, (int)*offset);
1183 return true;
1187 /* Clear the rhs field from the active_local_stores array. */
1189 static void
1190 clear_rhs_from_active_local_stores (void)
1192 insn_info_t ptr = active_local_stores;
1194 while (ptr)
1196 store_info_t store_info = ptr->store_rec;
1197 /* Skip the clobbers. */
1198 while (!store_info->is_set)
1199 store_info = store_info->next;
1201 store_info->rhs = NULL;
1202 store_info->const_rhs = NULL;
1204 ptr = ptr->next_local_store;
1209 /* Mark byte POS bytes from the beginning of store S_INFO as unneeded. */
1211 static inline void
1212 set_position_unneeded (store_info_t s_info, int pos)
1214 if (__builtin_expect (s_info->is_large, false))
1216 if (!bitmap_bit_p (s_info->positions_needed.large.bitmap, pos))
1218 s_info->positions_needed.large.count++;
1219 bitmap_set_bit (s_info->positions_needed.large.bitmap, pos);
1222 else
1223 s_info->positions_needed.small_bitmask
1224 &= ~(((unsigned HOST_WIDE_INT) 1) << pos);
1227 /* Mark the whole store S_INFO as unneeded. */
1229 static inline void
1230 set_all_positions_unneeded (store_info_t s_info)
1232 if (__builtin_expect (s_info->is_large, false))
1234 int pos, end = s_info->end - s_info->begin;
1235 for (pos = 0; pos < end; pos++)
1236 bitmap_set_bit (s_info->positions_needed.large.bitmap, pos);
1237 s_info->positions_needed.large.count = end;
1239 else
1240 s_info->positions_needed.small_bitmask = (unsigned HOST_WIDE_INT) 0;
1243 /* Return TRUE if any bytes from S_INFO store are needed. */
1245 static inline bool
1246 any_positions_needed_p (store_info_t s_info)
1248 if (__builtin_expect (s_info->is_large, false))
1249 return (s_info->positions_needed.large.count
1250 < s_info->end - s_info->begin);
1251 else
1252 return (s_info->positions_needed.small_bitmask
1253 != (unsigned HOST_WIDE_INT) 0);
1256 /* Return TRUE if all bytes START through START+WIDTH-1 from S_INFO
1257 store are needed. */
1259 static inline bool
1260 all_positions_needed_p (store_info_t s_info, int start, int width)
1262 if (__builtin_expect (s_info->is_large, false))
1264 int end = start + width;
1265 while (start < end)
1266 if (bitmap_bit_p (s_info->positions_needed.large.bitmap, start++))
1267 return false;
1268 return true;
1270 else
1272 unsigned HOST_WIDE_INT mask = lowpart_bitmask (width) << start;
1273 return (s_info->positions_needed.small_bitmask & mask) == mask;
1278 static rtx get_stored_val (store_info_t, enum machine_mode, HOST_WIDE_INT,
1279 HOST_WIDE_INT, basic_block, bool);
1282 /* BODY is an instruction pattern that belongs to INSN. Return 1 if
1283 there is a candidate store, after adding it to the appropriate
1284 local store group if so. */
1286 static int
1287 record_store (rtx body, bb_info_t bb_info)
1289 rtx mem, rhs, const_rhs;
1290 HOST_WIDE_INT offset = 0;
1291 HOST_WIDE_INT width = 0;
1292 alias_set_type spill_alias_set;
1293 insn_info_t insn_info = bb_info->last_insn;
1294 store_info_t store_info = NULL;
1295 int group_id;
1296 cselib_val *base = NULL;
1297 insn_info_t ptr, last, redundant_reason;
1298 bool store_is_unused;
1300 if (GET_CODE (body) != SET && GET_CODE (body) != CLOBBER)
1301 return 0;
1303 mem = SET_DEST (body);
1305 /* If this is not used, then this cannot be used to keep the insn
1306 from being deleted. On the other hand, it does provide something
1307 that can be used to prove that another store is dead. */
1308 store_is_unused
1309 = (find_reg_note (insn_info->insn, REG_UNUSED, mem) != NULL);
1311 /* Check whether that value is a suitable memory location. */
1312 if (!MEM_P (mem))
1314 /* If the set or clobber is unused, then it does not effect our
1315 ability to get rid of the entire insn. */
1316 if (!store_is_unused)
1317 insn_info->cannot_delete = true;
1318 return 0;
1321 /* At this point we know mem is a mem. */
1322 if (GET_MODE (mem) == BLKmode)
1324 if (GET_CODE (XEXP (mem, 0)) == SCRATCH)
1326 if (dump_file)
1327 fprintf (dump_file, " adding wild read for (clobber (mem:BLK (scratch))\n");
1328 add_wild_read (bb_info);
1329 insn_info->cannot_delete = true;
1330 return 0;
1332 /* Handle (set (mem:BLK (addr) [... S36 ...]) (const_int 0))
1333 as memset (addr, 0, 36); */
1334 else if (!MEM_SIZE (mem)
1335 || !CONST_INT_P (MEM_SIZE (mem))
1336 || GET_CODE (body) != SET
1337 || INTVAL (MEM_SIZE (mem)) <= 0
1338 || INTVAL (MEM_SIZE (mem)) > MAX_OFFSET
1339 || !CONST_INT_P (SET_SRC (body)))
1341 if (!store_is_unused)
1343 /* If the set or clobber is unused, then it does not effect our
1344 ability to get rid of the entire insn. */
1345 insn_info->cannot_delete = true;
1346 clear_rhs_from_active_local_stores ();
1348 return 0;
1352 /* We can still process a volatile mem, we just cannot delete it. */
1353 if (MEM_VOLATILE_P (mem))
1354 insn_info->cannot_delete = true;
1356 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
1358 clear_rhs_from_active_local_stores ();
1359 return 0;
1362 if (GET_MODE (mem) == BLKmode)
1363 width = INTVAL (MEM_SIZE (mem));
1364 else
1366 width = GET_MODE_SIZE (GET_MODE (mem));
1367 gcc_assert ((unsigned) width <= HOST_BITS_PER_WIDE_INT);
1370 if (spill_alias_set)
1372 bitmap store1 = clear_alias_group->store1_p;
1373 bitmap store2 = clear_alias_group->store2_p;
1375 gcc_assert (GET_MODE (mem) != BLKmode);
1377 if (bitmap_bit_p (store1, spill_alias_set))
1378 bitmap_set_bit (store2, spill_alias_set);
1379 else
1380 bitmap_set_bit (store1, spill_alias_set);
1382 if (clear_alias_group->offset_map_size_p < spill_alias_set)
1383 clear_alias_group->offset_map_size_p = spill_alias_set;
1385 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1387 if (dump_file)
1388 fprintf (dump_file, " processing spill store %d(%s)\n",
1389 (int) spill_alias_set, GET_MODE_NAME (GET_MODE (mem)));
1391 else if (group_id >= 0)
1393 /* In the restrictive case where the base is a constant or the
1394 frame pointer we can do global analysis. */
1396 group_info_t group
1397 = VEC_index (group_info_t, rtx_group_vec, group_id);
1399 store_info = (store_info_t) pool_alloc (rtx_store_info_pool);
1400 set_usage_bits (group, offset, width);
1402 if (dump_file)
1403 fprintf (dump_file, " processing const base store gid=%d[%d..%d)\n",
1404 group_id, (int)offset, (int)(offset+width));
1406 else
1408 rtx base_term = find_base_term (XEXP (mem, 0));
1409 if (!base_term
1410 || (GET_CODE (base_term) == ADDRESS
1411 && GET_MODE (base_term) == Pmode
1412 && XEXP (base_term, 0) == stack_pointer_rtx))
1413 insn_info->stack_pointer_based = true;
1414 insn_info->contains_cselib_groups = true;
1416 store_info = (store_info_t) pool_alloc (cse_store_info_pool);
1417 group_id = -1;
1419 if (dump_file)
1420 fprintf (dump_file, " processing cselib store [%d..%d)\n",
1421 (int)offset, (int)(offset+width));
1424 const_rhs = rhs = NULL_RTX;
1425 if (GET_CODE (body) == SET
1426 /* No place to keep the value after ra. */
1427 && !reload_completed
1428 && (REG_P (SET_SRC (body))
1429 || GET_CODE (SET_SRC (body)) == SUBREG
1430 || CONSTANT_P (SET_SRC (body)))
1431 && !MEM_VOLATILE_P (mem)
1432 /* Sometimes the store and reload is used for truncation and
1433 rounding. */
1434 && !(FLOAT_MODE_P (GET_MODE (mem)) && (flag_float_store)))
1436 rhs = SET_SRC (body);
1437 if (CONSTANT_P (rhs))
1438 const_rhs = rhs;
1439 else if (body == PATTERN (insn_info->insn))
1441 rtx tem = find_reg_note (insn_info->insn, REG_EQUAL, NULL_RTX);
1442 if (tem && CONSTANT_P (XEXP (tem, 0)))
1443 const_rhs = XEXP (tem, 0);
1445 if (const_rhs == NULL_RTX && REG_P (rhs))
1447 rtx tem = cselib_expand_value_rtx (rhs, scratch, 5);
1449 if (tem && CONSTANT_P (tem))
1450 const_rhs = tem;
1454 /* Check to see if this stores causes some other stores to be
1455 dead. */
1456 ptr = active_local_stores;
1457 last = NULL;
1458 redundant_reason = NULL;
1460 while (ptr)
1462 insn_info_t next = ptr->next_local_store;
1463 store_info_t s_info = ptr->store_rec;
1464 bool del = true;
1466 /* Skip the clobbers. We delete the active insn if this insn
1467 shadows the set. To have been put on the active list, it
1468 has exactly on set. */
1469 while (!s_info->is_set)
1470 s_info = s_info->next;
1472 if (s_info->alias_set != spill_alias_set)
1473 del = false;
1474 else if (s_info->alias_set)
1476 struct clear_alias_mode_holder *entry
1477 = clear_alias_set_lookup (s_info->alias_set);
1478 /* Generally, spills cannot be processed if and of the
1479 references to the slot have a different mode. But if
1480 we are in the same block and mode is exactly the same
1481 between this store and one before in the same block,
1482 we can still delete it. */
1483 if ((GET_MODE (mem) == GET_MODE (s_info->mem))
1484 && (GET_MODE (mem) == entry->mode))
1486 del = true;
1487 set_all_positions_unneeded (s_info);
1489 if (dump_file)
1490 fprintf (dump_file, " trying spill store in insn=%d alias_set=%d\n",
1491 INSN_UID (ptr->insn), (int) s_info->alias_set);
1493 else if ((s_info->group_id == group_id)
1494 && (s_info->cse_base == base))
1496 HOST_WIDE_INT i;
1497 if (dump_file)
1498 fprintf (dump_file, " trying store in insn=%d gid=%d[%d..%d)\n",
1499 INSN_UID (ptr->insn), s_info->group_id,
1500 (int)s_info->begin, (int)s_info->end);
1502 /* Even if PTR won't be eliminated as unneeded, if both
1503 PTR and this insn store the same constant value, we might
1504 eliminate this insn instead. */
1505 if (s_info->const_rhs
1506 && const_rhs
1507 && offset >= s_info->begin
1508 && offset + width <= s_info->end
1509 && all_positions_needed_p (s_info, offset - s_info->begin,
1510 width))
1512 if (GET_MODE (mem) == BLKmode)
1514 if (GET_MODE (s_info->mem) == BLKmode
1515 && s_info->const_rhs == const_rhs)
1516 redundant_reason = ptr;
1518 else if (s_info->const_rhs == const0_rtx
1519 && const_rhs == const0_rtx)
1520 redundant_reason = ptr;
1521 else
1523 rtx val;
1524 start_sequence ();
1525 val = get_stored_val (s_info, GET_MODE (mem),
1526 offset, offset + width,
1527 BLOCK_FOR_INSN (insn_info->insn),
1528 true);
1529 if (get_insns () != NULL)
1530 val = NULL_RTX;
1531 end_sequence ();
1532 if (val && rtx_equal_p (val, const_rhs))
1533 redundant_reason = ptr;
1537 for (i = MAX (offset, s_info->begin);
1538 i < offset + width && i < s_info->end;
1539 i++)
1540 set_position_unneeded (s_info, i - s_info->begin);
1542 else if (s_info->rhs)
1543 /* Need to see if it is possible for this store to overwrite
1544 the value of store_info. If it is, set the rhs to NULL to
1545 keep it from being used to remove a load. */
1547 if (canon_true_dependence (s_info->mem,
1548 GET_MODE (s_info->mem),
1549 s_info->mem_addr,
1550 mem, rtx_varies_p))
1552 s_info->rhs = NULL;
1553 s_info->const_rhs = NULL;
1557 /* An insn can be deleted if every position of every one of
1558 its s_infos is zero. */
1559 if (any_positions_needed_p (s_info)
1560 || ptr->cannot_delete)
1561 del = false;
1563 if (del)
1565 insn_info_t insn_to_delete = ptr;
1567 if (last)
1568 last->next_local_store = ptr->next_local_store;
1569 else
1570 active_local_stores = ptr->next_local_store;
1572 delete_dead_store_insn (insn_to_delete);
1574 else
1575 last = ptr;
1577 ptr = next;
1580 /* Finish filling in the store_info. */
1581 store_info->next = insn_info->store_rec;
1582 insn_info->store_rec = store_info;
1583 store_info->mem = canon_rtx (mem);
1584 store_info->alias_set = spill_alias_set;
1585 store_info->mem_addr = get_addr (XEXP (mem, 0));
1586 store_info->cse_base = base;
1587 if (width > HOST_BITS_PER_WIDE_INT)
1589 store_info->is_large = true;
1590 store_info->positions_needed.large.count = 0;
1591 store_info->positions_needed.large.bitmap = BITMAP_ALLOC (NULL);
1593 else
1595 store_info->is_large = false;
1596 store_info->positions_needed.small_bitmask = lowpart_bitmask (width);
1598 store_info->group_id = group_id;
1599 store_info->begin = offset;
1600 store_info->end = offset + width;
1601 store_info->is_set = GET_CODE (body) == SET;
1602 store_info->rhs = rhs;
1603 store_info->const_rhs = const_rhs;
1604 store_info->redundant_reason = redundant_reason;
1606 /* If this is a clobber, we return 0. We will only be able to
1607 delete this insn if there is only one store USED store, but we
1608 can use the clobber to delete other stores earlier. */
1609 return store_info->is_set ? 1 : 0;
1613 static void
1614 dump_insn_info (const char * start, insn_info_t insn_info)
1616 fprintf (dump_file, "%s insn=%d %s\n", start,
1617 INSN_UID (insn_info->insn),
1618 insn_info->store_rec ? "has store" : "naked");
1622 /* If the modes are different and the value's source and target do not
1623 line up, we need to extract the value from lower part of the rhs of
1624 the store, shift it, and then put it into a form that can be shoved
1625 into the read_insn. This function generates a right SHIFT of a
1626 value that is at least ACCESS_SIZE bytes wide of READ_MODE. The
1627 shift sequence is returned or NULL if we failed to find a
1628 shift. */
1630 static rtx
1631 find_shift_sequence (int access_size,
1632 store_info_t store_info,
1633 enum machine_mode read_mode,
1634 int shift, bool speed, bool require_cst)
1636 enum machine_mode store_mode = GET_MODE (store_info->mem);
1637 enum machine_mode new_mode;
1638 rtx read_reg = NULL;
1640 /* Some machines like the x86 have shift insns for each size of
1641 operand. Other machines like the ppc or the ia-64 may only have
1642 shift insns that shift values within 32 or 64 bit registers.
1643 This loop tries to find the smallest shift insn that will right
1644 justify the value we want to read but is available in one insn on
1645 the machine. */
1647 for (new_mode = smallest_mode_for_size (access_size * BITS_PER_UNIT,
1648 MODE_INT);
1649 GET_MODE_BITSIZE (new_mode) <= BITS_PER_WORD;
1650 new_mode = GET_MODE_WIDER_MODE (new_mode))
1652 rtx target, new_reg, shift_seq, insn, new_lhs;
1653 int cost;
1655 /* If a constant was stored into memory, try to simplify it here,
1656 otherwise the cost of the shift might preclude this optimization
1657 e.g. at -Os, even when no actual shift will be needed. */
1658 if (store_info->const_rhs)
1660 unsigned int byte = subreg_lowpart_offset (new_mode, store_mode);
1661 rtx ret = simplify_subreg (new_mode, store_info->const_rhs,
1662 store_mode, byte);
1663 if (ret && CONSTANT_P (ret))
1665 ret = simplify_const_binary_operation (LSHIFTRT, new_mode,
1666 ret, GEN_INT (shift));
1667 if (ret && CONSTANT_P (ret))
1669 byte = subreg_lowpart_offset (read_mode, new_mode);
1670 ret = simplify_subreg (read_mode, ret, new_mode, byte);
1671 if (ret && CONSTANT_P (ret)
1672 && rtx_cost (ret, SET, speed) <= COSTS_N_INSNS (1))
1673 return ret;
1678 if (require_cst)
1679 return NULL_RTX;
1681 /* Try a wider mode if truncating the store mode to NEW_MODE
1682 requires a real instruction. */
1683 if (GET_MODE_BITSIZE (new_mode) < GET_MODE_BITSIZE (store_mode)
1684 && !TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (new_mode),
1685 GET_MODE_BITSIZE (store_mode)))
1686 continue;
1688 /* Also try a wider mode if the necessary punning is either not
1689 desirable or not possible. */
1690 if (!CONSTANT_P (store_info->rhs)
1691 && !MODES_TIEABLE_P (new_mode, store_mode))
1692 continue;
1694 new_reg = gen_reg_rtx (new_mode);
1696 start_sequence ();
1698 /* In theory we could also check for an ashr. Ian Taylor knows
1699 of one dsp where the cost of these two was not the same. But
1700 this really is a rare case anyway. */
1701 target = expand_binop (new_mode, lshr_optab, new_reg,
1702 GEN_INT (shift), new_reg, 1, OPTAB_DIRECT);
1704 shift_seq = get_insns ();
1705 end_sequence ();
1707 if (target != new_reg || shift_seq == NULL)
1708 continue;
1710 cost = 0;
1711 for (insn = shift_seq; insn != NULL_RTX; insn = NEXT_INSN (insn))
1712 if (INSN_P (insn))
1713 cost += insn_rtx_cost (PATTERN (insn), speed);
1715 /* The computation up to here is essentially independent
1716 of the arguments and could be precomputed. It may
1717 not be worth doing so. We could precompute if
1718 worthwhile or at least cache the results. The result
1719 technically depends on both SHIFT and ACCESS_SIZE,
1720 but in practice the answer will depend only on ACCESS_SIZE. */
1722 if (cost > COSTS_N_INSNS (1))
1723 continue;
1725 new_lhs = extract_low_bits (new_mode, store_mode,
1726 copy_rtx (store_info->rhs));
1727 if (new_lhs == NULL_RTX)
1728 continue;
1730 /* We found an acceptable shift. Generate a move to
1731 take the value from the store and put it into the
1732 shift pseudo, then shift it, then generate another
1733 move to put in into the target of the read. */
1734 emit_move_insn (new_reg, new_lhs);
1735 emit_insn (shift_seq);
1736 read_reg = extract_low_bits (read_mode, new_mode, new_reg);
1737 break;
1740 return read_reg;
1744 /* Call back for note_stores to find the hard regs set or clobbered by
1745 insn. Data is a bitmap of the hardregs set so far. */
1747 static void
1748 look_for_hardregs (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1750 bitmap regs_set = (bitmap) data;
1752 if (REG_P (x)
1753 && REGNO (x) < FIRST_PSEUDO_REGISTER)
1755 int regno = REGNO (x);
1756 int n = hard_regno_nregs[regno][GET_MODE (x)];
1757 while (--n >= 0)
1758 bitmap_set_bit (regs_set, regno + n);
1762 /* Helper function for replace_read and record_store.
1763 Attempt to return a value stored in STORE_INFO, from READ_BEGIN
1764 to one before READ_END bytes read in READ_MODE. Return NULL
1765 if not successful. If REQUIRE_CST is true, return always constant. */
1767 static rtx
1768 get_stored_val (store_info_t store_info, enum machine_mode read_mode,
1769 HOST_WIDE_INT read_begin, HOST_WIDE_INT read_end,
1770 basic_block bb, bool require_cst)
1772 enum machine_mode store_mode = GET_MODE (store_info->mem);
1773 int shift;
1774 int access_size; /* In bytes. */
1775 rtx read_reg;
1777 /* To get here the read is within the boundaries of the write so
1778 shift will never be negative. Start out with the shift being in
1779 bytes. */
1780 if (store_mode == BLKmode)
1781 shift = 0;
1782 else if (BYTES_BIG_ENDIAN)
1783 shift = store_info->end - read_end;
1784 else
1785 shift = read_begin - store_info->begin;
1787 access_size = shift + GET_MODE_SIZE (read_mode);
1789 /* From now on it is bits. */
1790 shift *= BITS_PER_UNIT;
1792 if (shift)
1793 read_reg = find_shift_sequence (access_size, store_info, read_mode, shift,
1794 optimize_bb_for_speed_p (bb),
1795 require_cst);
1796 else if (store_mode == BLKmode)
1798 /* The store is a memset (addr, const_val, const_size). */
1799 gcc_assert (CONST_INT_P (store_info->rhs));
1800 store_mode = int_mode_for_mode (read_mode);
1801 if (store_mode == BLKmode)
1802 read_reg = NULL_RTX;
1803 else if (store_info->rhs == const0_rtx)
1804 read_reg = extract_low_bits (read_mode, store_mode, const0_rtx);
1805 else if (GET_MODE_BITSIZE (store_mode) > HOST_BITS_PER_WIDE_INT
1806 || BITS_PER_UNIT >= HOST_BITS_PER_WIDE_INT)
1807 read_reg = NULL_RTX;
1808 else
1810 unsigned HOST_WIDE_INT c
1811 = INTVAL (store_info->rhs)
1812 & (((HOST_WIDE_INT) 1 << BITS_PER_UNIT) - 1);
1813 int shift = BITS_PER_UNIT;
1814 while (shift < HOST_BITS_PER_WIDE_INT)
1816 c |= (c << shift);
1817 shift <<= 1;
1819 read_reg = GEN_INT (trunc_int_for_mode (c, store_mode));
1820 read_reg = extract_low_bits (read_mode, store_mode, read_reg);
1823 else if (store_info->const_rhs
1824 && (require_cst
1825 || GET_MODE_CLASS (read_mode) != GET_MODE_CLASS (store_mode)))
1826 read_reg = extract_low_bits (read_mode, store_mode,
1827 copy_rtx (store_info->const_rhs));
1828 else
1829 read_reg = extract_low_bits (read_mode, store_mode,
1830 copy_rtx (store_info->rhs));
1831 if (require_cst && read_reg && !CONSTANT_P (read_reg))
1832 read_reg = NULL_RTX;
1833 return read_reg;
1836 /* Take a sequence of:
1837 A <- r1
1839 ... <- A
1841 and change it into
1842 r2 <- r1
1843 A <- r1
1845 ... <- r2
1849 r3 <- extract (r1)
1850 r3 <- r3 >> shift
1851 r2 <- extract (r3)
1852 ... <- r2
1856 r2 <- extract (r1)
1857 ... <- r2
1859 Depending on the alignment and the mode of the store and
1860 subsequent load.
1863 The STORE_INFO and STORE_INSN are for the store and READ_INFO
1864 and READ_INSN are for the read. Return true if the replacement
1865 went ok. */
1867 static bool
1868 replace_read (store_info_t store_info, insn_info_t store_insn,
1869 read_info_t read_info, insn_info_t read_insn, rtx *loc,
1870 bitmap regs_live)
1872 enum machine_mode store_mode = GET_MODE (store_info->mem);
1873 enum machine_mode read_mode = GET_MODE (read_info->mem);
1874 rtx insns, this_insn, read_reg;
1875 basic_block bb;
1877 if (!dbg_cnt (dse))
1878 return false;
1880 /* Create a sequence of instructions to set up the read register.
1881 This sequence goes immediately before the store and its result
1882 is read by the load.
1884 We need to keep this in perspective. We are replacing a read
1885 with a sequence of insns, but the read will almost certainly be
1886 in cache, so it is not going to be an expensive one. Thus, we
1887 are not willing to do a multi insn shift or worse a subroutine
1888 call to get rid of the read. */
1889 if (dump_file)
1890 fprintf (dump_file, "trying to replace %smode load in insn %d"
1891 " from %smode store in insn %d\n",
1892 GET_MODE_NAME (read_mode), INSN_UID (read_insn->insn),
1893 GET_MODE_NAME (store_mode), INSN_UID (store_insn->insn));
1894 start_sequence ();
1895 bb = BLOCK_FOR_INSN (read_insn->insn);
1896 read_reg = get_stored_val (store_info,
1897 read_mode, read_info->begin, read_info->end,
1898 bb, false);
1899 if (read_reg == NULL_RTX)
1901 end_sequence ();
1902 if (dump_file)
1903 fprintf (dump_file, " -- could not extract bits of stored value\n");
1904 return false;
1906 /* Force the value into a new register so that it won't be clobbered
1907 between the store and the load. */
1908 read_reg = copy_to_mode_reg (read_mode, read_reg);
1909 insns = get_insns ();
1910 end_sequence ();
1912 if (insns != NULL_RTX)
1914 /* Now we have to scan the set of new instructions to see if the
1915 sequence contains and sets of hardregs that happened to be
1916 live at this point. For instance, this can happen if one of
1917 the insns sets the CC and the CC happened to be live at that
1918 point. This does occasionally happen, see PR 37922. */
1919 bitmap regs_set = BITMAP_ALLOC (NULL);
1921 for (this_insn = insns; this_insn != NULL_RTX; this_insn = NEXT_INSN (this_insn))
1922 note_stores (PATTERN (this_insn), look_for_hardregs, regs_set);
1924 bitmap_and_into (regs_set, regs_live);
1925 if (!bitmap_empty_p (regs_set))
1927 if (dump_file)
1929 fprintf (dump_file,
1930 "abandoning replacement because sequence clobbers live hardregs:");
1931 df_print_regset (dump_file, regs_set);
1934 BITMAP_FREE (regs_set);
1935 return false;
1937 BITMAP_FREE (regs_set);
1940 if (validate_change (read_insn->insn, loc, read_reg, 0))
1942 deferred_change_t deferred_change =
1943 (deferred_change_t) pool_alloc (deferred_change_pool);
1945 /* Insert this right before the store insn where it will be safe
1946 from later insns that might change it before the read. */
1947 emit_insn_before (insns, store_insn->insn);
1949 /* And now for the kludge part: cselib croaks if you just
1950 return at this point. There are two reasons for this:
1952 1) Cselib has an idea of how many pseudos there are and
1953 that does not include the new ones we just added.
1955 2) Cselib does not know about the move insn we added
1956 above the store_info, and there is no way to tell it
1957 about it, because it has "moved on".
1959 Problem (1) is fixable with a certain amount of engineering.
1960 Problem (2) is requires starting the bb from scratch. This
1961 could be expensive.
1963 So we are just going to have to lie. The move/extraction
1964 insns are not really an issue, cselib did not see them. But
1965 the use of the new pseudo read_insn is a real problem because
1966 cselib has not scanned this insn. The way that we solve this
1967 problem is that we are just going to put the mem back for now
1968 and when we are finished with the block, we undo this. We
1969 keep a table of mems to get rid of. At the end of the basic
1970 block we can put them back. */
1972 *loc = read_info->mem;
1973 deferred_change->next = deferred_change_list;
1974 deferred_change_list = deferred_change;
1975 deferred_change->loc = loc;
1976 deferred_change->reg = read_reg;
1978 /* Get rid of the read_info, from the point of view of the
1979 rest of dse, play like this read never happened. */
1980 read_insn->read_rec = read_info->next;
1981 pool_free (read_info_pool, read_info);
1982 if (dump_file)
1984 fprintf (dump_file, " -- replaced the loaded MEM with ");
1985 print_simple_rtl (dump_file, read_reg);
1986 fprintf (dump_file, "\n");
1988 return true;
1990 else
1992 if (dump_file)
1994 fprintf (dump_file, " -- replacing the loaded MEM with ");
1995 print_simple_rtl (dump_file, read_reg);
1996 fprintf (dump_file, " led to an invalid instruction\n");
1998 return false;
2002 /* A for_each_rtx callback in which DATA is the bb_info. Check to see
2003 if LOC is a mem and if it is look at the address and kill any
2004 appropriate stores that may be active. */
2006 static int
2007 check_mem_read_rtx (rtx *loc, void *data)
2009 rtx mem = *loc;
2010 bb_info_t bb_info;
2011 insn_info_t insn_info;
2012 HOST_WIDE_INT offset = 0;
2013 HOST_WIDE_INT width = 0;
2014 alias_set_type spill_alias_set = 0;
2015 cselib_val *base = NULL;
2016 int group_id;
2017 read_info_t read_info;
2019 if (!mem || !MEM_P (mem))
2020 return 0;
2022 bb_info = (bb_info_t) data;
2023 insn_info = bb_info->last_insn;
2025 if ((MEM_ALIAS_SET (mem) == ALIAS_SET_MEMORY_BARRIER)
2026 || (MEM_VOLATILE_P (mem)))
2028 if (dump_file)
2029 fprintf (dump_file, " adding wild read, volatile or barrier.\n");
2030 add_wild_read (bb_info);
2031 insn_info->cannot_delete = true;
2032 return 0;
2035 /* If it is reading readonly mem, then there can be no conflict with
2036 another write. */
2037 if (MEM_READONLY_P (mem))
2038 return 0;
2040 if (!canon_address (mem, &spill_alias_set, &group_id, &offset, &base))
2042 if (dump_file)
2043 fprintf (dump_file, " adding wild read, canon_address failure.\n");
2044 add_wild_read (bb_info);
2045 return 0;
2048 if (GET_MODE (mem) == BLKmode)
2049 width = -1;
2050 else
2051 width = GET_MODE_SIZE (GET_MODE (mem));
2053 read_info = (read_info_t) pool_alloc (read_info_pool);
2054 read_info->group_id = group_id;
2055 read_info->mem = mem;
2056 read_info->alias_set = spill_alias_set;
2057 read_info->begin = offset;
2058 read_info->end = offset + width;
2059 read_info->next = insn_info->read_rec;
2060 insn_info->read_rec = read_info;
2062 /* We ignore the clobbers in store_info. The is mildly aggressive,
2063 but there really should not be a clobber followed by a read. */
2065 if (spill_alias_set)
2067 insn_info_t i_ptr = active_local_stores;
2068 insn_info_t last = NULL;
2070 if (dump_file)
2071 fprintf (dump_file, " processing spill load %d\n",
2072 (int) spill_alias_set);
2074 while (i_ptr)
2076 store_info_t store_info = i_ptr->store_rec;
2078 /* Skip the clobbers. */
2079 while (!store_info->is_set)
2080 store_info = store_info->next;
2082 if (store_info->alias_set == spill_alias_set)
2084 if (dump_file)
2085 dump_insn_info ("removing from active", i_ptr);
2087 if (last)
2088 last->next_local_store = i_ptr->next_local_store;
2089 else
2090 active_local_stores = i_ptr->next_local_store;
2092 else
2093 last = i_ptr;
2094 i_ptr = i_ptr->next_local_store;
2097 else if (group_id >= 0)
2099 /* This is the restricted case where the base is a constant or
2100 the frame pointer and offset is a constant. */
2101 insn_info_t i_ptr = active_local_stores;
2102 insn_info_t last = NULL;
2104 if (dump_file)
2106 if (width == -1)
2107 fprintf (dump_file, " processing const load gid=%d[BLK]\n",
2108 group_id);
2109 else
2110 fprintf (dump_file, " processing const load gid=%d[%d..%d)\n",
2111 group_id, (int)offset, (int)(offset+width));
2114 while (i_ptr)
2116 bool remove = false;
2117 store_info_t store_info = i_ptr->store_rec;
2119 /* Skip the clobbers. */
2120 while (!store_info->is_set)
2121 store_info = store_info->next;
2123 /* There are three cases here. */
2124 if (store_info->group_id < 0)
2125 /* We have a cselib store followed by a read from a
2126 const base. */
2127 remove
2128 = canon_true_dependence (store_info->mem,
2129 GET_MODE (store_info->mem),
2130 store_info->mem_addr,
2131 mem, rtx_varies_p);
2133 else if (group_id == store_info->group_id)
2135 /* This is a block mode load. We may get lucky and
2136 canon_true_dependence may save the day. */
2137 if (width == -1)
2138 remove
2139 = canon_true_dependence (store_info->mem,
2140 GET_MODE (store_info->mem),
2141 store_info->mem_addr,
2142 mem, rtx_varies_p);
2144 /* If this read is just reading back something that we just
2145 stored, rewrite the read. */
2146 else
2148 if (store_info->rhs
2149 && offset >= store_info->begin
2150 && offset + width <= store_info->end
2151 && all_positions_needed_p (store_info,
2152 offset - store_info->begin,
2153 width)
2154 && replace_read (store_info, i_ptr, read_info,
2155 insn_info, loc, bb_info->regs_live))
2156 return 0;
2158 /* The bases are the same, just see if the offsets
2159 overlap. */
2160 if ((offset < store_info->end)
2161 && (offset + width > store_info->begin))
2162 remove = true;
2166 /* else
2167 The else case that is missing here is that the
2168 bases are constant but different. There is nothing
2169 to do here because there is no overlap. */
2171 if (remove)
2173 if (dump_file)
2174 dump_insn_info ("removing from active", i_ptr);
2176 if (last)
2177 last->next_local_store = i_ptr->next_local_store;
2178 else
2179 active_local_stores = i_ptr->next_local_store;
2181 else
2182 last = i_ptr;
2183 i_ptr = i_ptr->next_local_store;
2186 else
2188 insn_info_t i_ptr = active_local_stores;
2189 insn_info_t last = NULL;
2190 if (dump_file)
2192 fprintf (dump_file, " processing cselib load mem:");
2193 print_inline_rtx (dump_file, mem, 0);
2194 fprintf (dump_file, "\n");
2197 while (i_ptr)
2199 bool remove = false;
2200 store_info_t store_info = i_ptr->store_rec;
2202 if (dump_file)
2203 fprintf (dump_file, " processing cselib load against insn %d\n",
2204 INSN_UID (i_ptr->insn));
2206 /* Skip the clobbers. */
2207 while (!store_info->is_set)
2208 store_info = store_info->next;
2210 /* If this read is just reading back something that we just
2211 stored, rewrite the read. */
2212 if (store_info->rhs
2213 && store_info->group_id == -1
2214 && store_info->cse_base == base
2215 && offset >= store_info->begin
2216 && offset + width <= store_info->end
2217 && all_positions_needed_p (store_info,
2218 offset - store_info->begin, width)
2219 && replace_read (store_info, i_ptr, read_info, insn_info, loc,
2220 bb_info->regs_live))
2221 return 0;
2223 if (!store_info->alias_set)
2224 remove = canon_true_dependence (store_info->mem,
2225 GET_MODE (store_info->mem),
2226 store_info->mem_addr,
2227 mem, rtx_varies_p);
2229 if (remove)
2231 if (dump_file)
2232 dump_insn_info ("removing from active", i_ptr);
2234 if (last)
2235 last->next_local_store = i_ptr->next_local_store;
2236 else
2237 active_local_stores = i_ptr->next_local_store;
2239 else
2240 last = i_ptr;
2241 i_ptr = i_ptr->next_local_store;
2244 return 0;
2247 /* A for_each_rtx callback in which DATA points the INSN_INFO for
2248 as check_mem_read_rtx. Nullify the pointer if i_m_r_m_r returns
2249 true for any part of *LOC. */
2251 static void
2252 check_mem_read_use (rtx *loc, void *data)
2254 for_each_rtx (loc, check_mem_read_rtx, data);
2258 /* Get arguments passed to CALL_INSN. Return TRUE if successful.
2259 So far it only handles arguments passed in registers. */
2261 static bool
2262 get_call_args (rtx call_insn, tree fn, rtx *args, int nargs)
2264 CUMULATIVE_ARGS args_so_far;
2265 tree arg;
2266 int idx;
2268 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0, 3);
2270 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
2271 for (idx = 0;
2272 arg != void_list_node && idx < nargs;
2273 arg = TREE_CHAIN (arg), idx++)
2275 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
2276 rtx reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1), link, tmp;
2277 if (!reg || !REG_P (reg) || GET_MODE (reg) != mode
2278 || GET_MODE_CLASS (mode) != MODE_INT)
2279 return false;
2281 for (link = CALL_INSN_FUNCTION_USAGE (call_insn);
2282 link;
2283 link = XEXP (link, 1))
2284 if (GET_CODE (XEXP (link, 0)) == USE)
2286 args[idx] = XEXP (XEXP (link, 0), 0);
2287 if (REG_P (args[idx])
2288 && REGNO (args[idx]) == REGNO (reg)
2289 && (GET_MODE (args[idx]) == mode
2290 || (GET_MODE_CLASS (GET_MODE (args[idx])) == MODE_INT
2291 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2292 <= UNITS_PER_WORD)
2293 && (GET_MODE_SIZE (GET_MODE (args[idx]))
2294 > GET_MODE_SIZE (mode)))))
2295 break;
2297 if (!link)
2298 return false;
2300 tmp = cselib_expand_value_rtx (args[idx], scratch, 5);
2301 if (GET_MODE (args[idx]) != mode)
2303 if (!tmp || !CONST_INT_P (tmp))
2304 return false;
2305 tmp = GEN_INT (trunc_int_for_mode (INTVAL (tmp), mode));
2307 if (tmp)
2308 args[idx] = tmp;
2310 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
2312 if (arg != void_list_node || idx != nargs)
2313 return false;
2314 return true;
2318 /* Apply record_store to all candidate stores in INSN. Mark INSN
2319 if some part of it is not a candidate store and assigns to a
2320 non-register target. */
2322 static void
2323 scan_insn (bb_info_t bb_info, rtx insn)
2325 rtx body;
2326 insn_info_t insn_info = (insn_info_t) pool_alloc (insn_info_pool);
2327 int mems_found = 0;
2328 memset (insn_info, 0, sizeof (struct insn_info));
2330 if (dump_file)
2331 fprintf (dump_file, "\n**scanning insn=%d\n",
2332 INSN_UID (insn));
2334 insn_info->prev_insn = bb_info->last_insn;
2335 insn_info->insn = insn;
2336 bb_info->last_insn = insn_info;
2339 /* Cselib clears the table for this case, so we have to essentially
2340 do the same. */
2341 if (NONJUMP_INSN_P (insn)
2342 && GET_CODE (PATTERN (insn)) == ASM_OPERANDS
2343 && MEM_VOLATILE_P (PATTERN (insn)))
2345 add_wild_read (bb_info);
2346 insn_info->cannot_delete = true;
2347 return;
2350 /* Look at all of the uses in the insn. */
2351 note_uses (&PATTERN (insn), check_mem_read_use, bb_info);
2353 if (CALL_P (insn))
2355 bool const_call;
2356 tree memset_call = NULL_TREE;
2358 insn_info->cannot_delete = true;
2360 /* Const functions cannot do anything bad i.e. read memory,
2361 however, they can read their parameters which may have
2362 been pushed onto the stack.
2363 memset and bzero don't read memory either. */
2364 const_call = RTL_CONST_CALL_P (insn);
2365 if (!const_call)
2367 rtx call = PATTERN (insn);
2368 if (GET_CODE (call) == PARALLEL)
2369 call = XVECEXP (call, 0, 0);
2370 if (GET_CODE (call) == SET)
2371 call = SET_SRC (call);
2372 if (GET_CODE (call) == CALL
2373 && MEM_P (XEXP (call, 0))
2374 && GET_CODE (XEXP (XEXP (call, 0), 0)) == SYMBOL_REF)
2376 rtx symbol = XEXP (XEXP (call, 0), 0);
2377 if (SYMBOL_REF_DECL (symbol)
2378 && TREE_CODE (SYMBOL_REF_DECL (symbol)) == FUNCTION_DECL)
2380 if ((DECL_BUILT_IN_CLASS (SYMBOL_REF_DECL (symbol))
2381 == BUILT_IN_NORMAL
2382 && (DECL_FUNCTION_CODE (SYMBOL_REF_DECL (symbol))
2383 == BUILT_IN_MEMSET))
2384 || SYMBOL_REF_DECL (symbol) == block_clear_fn)
2385 memset_call = SYMBOL_REF_DECL (symbol);
2389 if (const_call || memset_call)
2391 insn_info_t i_ptr = active_local_stores;
2392 insn_info_t last = NULL;
2394 if (dump_file)
2395 fprintf (dump_file, "%s call %d\n",
2396 const_call ? "const" : "memset", INSN_UID (insn));
2398 /* See the head comment of the frame_read field. */
2399 if (reload_completed)
2400 insn_info->frame_read = true;
2402 /* Loop over the active stores and remove those which are
2403 killed by the const function call. */
2404 while (i_ptr)
2406 bool remove_store = false;
2408 /* The stack pointer based stores are always killed. */
2409 if (i_ptr->stack_pointer_based)
2410 remove_store = true;
2412 /* If the frame is read, the frame related stores are killed. */
2413 else if (insn_info->frame_read)
2415 store_info_t store_info = i_ptr->store_rec;
2417 /* Skip the clobbers. */
2418 while (!store_info->is_set)
2419 store_info = store_info->next;
2421 if (store_info->group_id >= 0
2422 && VEC_index (group_info_t, rtx_group_vec,
2423 store_info->group_id)->frame_related)
2424 remove_store = true;
2427 if (remove_store)
2429 if (dump_file)
2430 dump_insn_info ("removing from active", i_ptr);
2432 if (last)
2433 last->next_local_store = i_ptr->next_local_store;
2434 else
2435 active_local_stores = i_ptr->next_local_store;
2437 else
2438 last = i_ptr;
2440 i_ptr = i_ptr->next_local_store;
2443 if (memset_call)
2445 rtx args[3];
2446 if (get_call_args (insn, memset_call, args, 3)
2447 && CONST_INT_P (args[1])
2448 && CONST_INT_P (args[2])
2449 && INTVAL (args[2]) > 0)
2451 rtx mem = gen_rtx_MEM (BLKmode, args[0]);
2452 set_mem_size (mem, args[2]);
2453 body = gen_rtx_SET (VOIDmode, mem, args[1]);
2454 mems_found += record_store (body, bb_info);
2455 if (dump_file)
2456 fprintf (dump_file, "handling memset as BLKmode store\n");
2457 if (mems_found == 1)
2459 insn_info->next_local_store = active_local_stores;
2460 active_local_stores = insn_info;
2466 else
2467 /* Every other call, including pure functions, may read memory. */
2468 add_wild_read (bb_info);
2470 return;
2473 /* Assuming that there are sets in these insns, we cannot delete
2474 them. */
2475 if ((GET_CODE (PATTERN (insn)) == CLOBBER)
2476 || volatile_refs_p (PATTERN (insn))
2477 || (flag_non_call_exceptions && may_trap_p (PATTERN (insn)))
2478 || (RTX_FRAME_RELATED_P (insn))
2479 || find_reg_note (insn, REG_FRAME_RELATED_EXPR, NULL_RTX))
2480 insn_info->cannot_delete = true;
2482 body = PATTERN (insn);
2483 if (GET_CODE (body) == PARALLEL)
2485 int i;
2486 for (i = 0; i < XVECLEN (body, 0); i++)
2487 mems_found += record_store (XVECEXP (body, 0, i), bb_info);
2489 else
2490 mems_found += record_store (body, bb_info);
2492 if (dump_file)
2493 fprintf (dump_file, "mems_found = %d, cannot_delete = %s\n",
2494 mems_found, insn_info->cannot_delete ? "true" : "false");
2496 /* If we found some sets of mems, add it into the active_local_stores so
2497 that it can be locally deleted if found dead or used for
2498 replace_read and redundant constant store elimination. Otherwise mark
2499 it as cannot delete. This simplifies the processing later. */
2500 if (mems_found == 1)
2502 insn_info->next_local_store = active_local_stores;
2503 active_local_stores = insn_info;
2505 else
2506 insn_info->cannot_delete = true;
2510 /* Remove BASE from the set of active_local_stores. This is a
2511 callback from cselib that is used to get rid of the stores in
2512 active_local_stores. */
2514 static void
2515 remove_useless_values (cselib_val *base)
2517 insn_info_t insn_info = active_local_stores;
2518 insn_info_t last = NULL;
2520 while (insn_info)
2522 store_info_t store_info = insn_info->store_rec;
2523 bool del = false;
2525 /* If ANY of the store_infos match the cselib group that is
2526 being deleted, then the insn can not be deleted. */
2527 while (store_info)
2529 if ((store_info->group_id == -1)
2530 && (store_info->cse_base == base))
2532 del = true;
2533 break;
2535 store_info = store_info->next;
2538 if (del)
2540 if (last)
2541 last->next_local_store = insn_info->next_local_store;
2542 else
2543 active_local_stores = insn_info->next_local_store;
2544 free_store_info (insn_info);
2546 else
2547 last = insn_info;
2549 insn_info = insn_info->next_local_store;
2554 /* Do all of step 1. */
2556 static void
2557 dse_step1 (void)
2559 basic_block bb;
2560 bitmap regs_live = BITMAP_ALLOC (NULL);
2562 cselib_init (false);
2563 all_blocks = BITMAP_ALLOC (NULL);
2564 bitmap_set_bit (all_blocks, ENTRY_BLOCK);
2565 bitmap_set_bit (all_blocks, EXIT_BLOCK);
2567 FOR_ALL_BB (bb)
2569 insn_info_t ptr;
2570 bb_info_t bb_info = (bb_info_t) pool_alloc (bb_info_pool);
2572 memset (bb_info, 0, sizeof (struct bb_info));
2573 bitmap_set_bit (all_blocks, bb->index);
2574 bb_info->regs_live = regs_live;
2576 bitmap_copy (regs_live, DF_LR_IN (bb));
2577 df_simulate_initialize_forwards (bb, regs_live);
2579 bb_table[bb->index] = bb_info;
2580 cselib_discard_hook = remove_useless_values;
2582 if (bb->index >= NUM_FIXED_BLOCKS)
2584 rtx insn;
2586 cse_store_info_pool
2587 = create_alloc_pool ("cse_store_info_pool",
2588 sizeof (struct store_info), 100);
2589 active_local_stores = NULL;
2590 cselib_clear_table ();
2592 /* Scan the insns. */
2593 FOR_BB_INSNS (bb, insn)
2595 if (INSN_P (insn))
2596 scan_insn (bb_info, insn);
2597 cselib_process_insn (insn);
2598 if (INSN_P (insn))
2599 df_simulate_one_insn_forwards (bb, insn, regs_live);
2602 /* This is something of a hack, because the global algorithm
2603 is supposed to take care of the case where stores go dead
2604 at the end of the function. However, the global
2605 algorithm must take a more conservative view of block
2606 mode reads than the local alg does. So to get the case
2607 where you have a store to the frame followed by a non
2608 overlapping block more read, we look at the active local
2609 stores at the end of the function and delete all of the
2610 frame and spill based ones. */
2611 if (stores_off_frame_dead_at_return
2612 && (EDGE_COUNT (bb->succs) == 0
2613 || (single_succ_p (bb)
2614 && single_succ (bb) == EXIT_BLOCK_PTR
2615 && ! crtl->calls_eh_return)))
2617 insn_info_t i_ptr = active_local_stores;
2618 while (i_ptr)
2620 store_info_t store_info = i_ptr->store_rec;
2622 /* Skip the clobbers. */
2623 while (!store_info->is_set)
2624 store_info = store_info->next;
2625 if (store_info->alias_set && !i_ptr->cannot_delete)
2626 delete_dead_store_insn (i_ptr);
2627 else
2628 if (store_info->group_id >= 0)
2630 group_info_t group
2631 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2632 if (group->frame_related && !i_ptr->cannot_delete)
2633 delete_dead_store_insn (i_ptr);
2636 i_ptr = i_ptr->next_local_store;
2640 /* Get rid of the loads that were discovered in
2641 replace_read. Cselib is finished with this block. */
2642 while (deferred_change_list)
2644 deferred_change_t next = deferred_change_list->next;
2646 /* There is no reason to validate this change. That was
2647 done earlier. */
2648 *deferred_change_list->loc = deferred_change_list->reg;
2649 pool_free (deferred_change_pool, deferred_change_list);
2650 deferred_change_list = next;
2653 /* Get rid of all of the cselib based store_infos in this
2654 block and mark the containing insns as not being
2655 deletable. */
2656 ptr = bb_info->last_insn;
2657 while (ptr)
2659 if (ptr->contains_cselib_groups)
2661 store_info_t s_info = ptr->store_rec;
2662 while (s_info && !s_info->is_set)
2663 s_info = s_info->next;
2664 if (s_info
2665 && s_info->redundant_reason
2666 && s_info->redundant_reason->insn
2667 && !ptr->cannot_delete)
2669 if (dump_file)
2670 fprintf (dump_file, "Locally deleting insn %d "
2671 "because insn %d stores the "
2672 "same value and couldn't be "
2673 "eliminated\n",
2674 INSN_UID (ptr->insn),
2675 INSN_UID (s_info->redundant_reason->insn));
2676 delete_dead_store_insn (ptr);
2678 if (s_info)
2679 s_info->redundant_reason = NULL;
2680 free_store_info (ptr);
2682 else
2684 store_info_t s_info;
2686 /* Free at least positions_needed bitmaps. */
2687 for (s_info = ptr->store_rec; s_info; s_info = s_info->next)
2688 if (s_info->is_large)
2690 BITMAP_FREE (s_info->positions_needed.large.bitmap);
2691 s_info->is_large = false;
2694 ptr = ptr->prev_insn;
2697 free_alloc_pool (cse_store_info_pool);
2699 bb_info->regs_live = NULL;
2702 BITMAP_FREE (regs_live);
2703 cselib_finish ();
2704 htab_empty (rtx_group_table);
2708 /*----------------------------------------------------------------------------
2709 Second step.
2711 Assign each byte position in the stores that we are going to
2712 analyze globally to a position in the bitmaps. Returns true if
2713 there are any bit positions assigned.
2714 ----------------------------------------------------------------------------*/
2716 static void
2717 dse_step2_init (void)
2719 unsigned int i;
2720 group_info_t group;
2722 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2724 /* For all non stack related bases, we only consider a store to
2725 be deletable if there are two or more stores for that
2726 position. This is because it takes one store to make the
2727 other store redundant. However, for the stores that are
2728 stack related, we consider them if there is only one store
2729 for the position. We do this because the stack related
2730 stores can be deleted if their is no read between them and
2731 the end of the function.
2733 To make this work in the current framework, we take the stack
2734 related bases add all of the bits from store1 into store2.
2735 This has the effect of making the eligible even if there is
2736 only one store. */
2738 if (stores_off_frame_dead_at_return && group->frame_related)
2740 bitmap_ior_into (group->store2_n, group->store1_n);
2741 bitmap_ior_into (group->store2_p, group->store1_p);
2742 if (dump_file)
2743 fprintf (dump_file, "group %d is frame related ", i);
2746 group->offset_map_size_n++;
2747 group->offset_map_n = XNEWVEC (int, group->offset_map_size_n);
2748 group->offset_map_size_p++;
2749 group->offset_map_p = XNEWVEC (int, group->offset_map_size_p);
2750 group->process_globally = false;
2751 if (dump_file)
2753 fprintf (dump_file, "group %d(%d+%d): ", i,
2754 (int)bitmap_count_bits (group->store2_n),
2755 (int)bitmap_count_bits (group->store2_p));
2756 bitmap_print (dump_file, group->store2_n, "n ", " ");
2757 bitmap_print (dump_file, group->store2_p, "p ", "\n");
2763 /* Init the offset tables for the normal case. */
2765 static bool
2766 dse_step2_nospill (void)
2768 unsigned int i;
2769 group_info_t group;
2770 /* Position 0 is unused because 0 is used in the maps to mean
2771 unused. */
2772 current_position = 1;
2774 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
2776 bitmap_iterator bi;
2777 unsigned int j;
2779 if (group == clear_alias_group)
2780 continue;
2782 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2783 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2784 bitmap_clear (group->group_kill);
2786 EXECUTE_IF_SET_IN_BITMAP (group->store2_n, 0, j, bi)
2788 bitmap_set_bit (group->group_kill, current_position);
2789 group->offset_map_n[j] = current_position++;
2790 group->process_globally = true;
2792 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2794 bitmap_set_bit (group->group_kill, current_position);
2795 group->offset_map_p[j] = current_position++;
2796 group->process_globally = true;
2799 return current_position != 1;
2803 /* Init the offset tables for the spill case. */
2805 static bool
2806 dse_step2_spill (void)
2808 unsigned int j;
2809 group_info_t group = clear_alias_group;
2810 bitmap_iterator bi;
2812 /* Position 0 is unused because 0 is used in the maps to mean
2813 unused. */
2814 current_position = 1;
2816 if (dump_file)
2818 bitmap_print (dump_file, clear_alias_sets,
2819 "clear alias sets ", "\n");
2820 bitmap_print (dump_file, disqualified_clear_alias_sets,
2821 "disqualified clear alias sets ", "\n");
2824 memset (group->offset_map_n, 0, sizeof(int) * group->offset_map_size_n);
2825 memset (group->offset_map_p, 0, sizeof(int) * group->offset_map_size_p);
2826 bitmap_clear (group->group_kill);
2828 /* Remove the disqualified positions from the store2_p set. */
2829 bitmap_and_compl_into (group->store2_p, disqualified_clear_alias_sets);
2831 /* We do not need to process the store2_n set because
2832 alias_sets are always positive. */
2833 EXECUTE_IF_SET_IN_BITMAP (group->store2_p, 0, j, bi)
2835 bitmap_set_bit (group->group_kill, current_position);
2836 group->offset_map_p[j] = current_position++;
2837 group->process_globally = true;
2840 return current_position != 1;
2845 /*----------------------------------------------------------------------------
2846 Third step.
2848 Build the bit vectors for the transfer functions.
2849 ----------------------------------------------------------------------------*/
2852 /* Note that this is NOT a general purpose function. Any mem that has
2853 an alias set registered here expected to be COMPLETELY unaliased:
2854 i.e it's addresses are not and need not be examined.
2856 It is known that all references to this address will have this
2857 alias set and there are NO other references to this address in the
2858 function.
2860 Currently the only place that is known to be clean enough to use
2861 this interface is the code that assigns the spill locations.
2863 All of the mems that have alias_sets registered are subjected to a
2864 very powerful form of dse where function calls, volatile reads and
2865 writes, and reads from random location are not taken into account.
2867 It is also assumed that these locations go dead when the function
2868 returns. This assumption could be relaxed if there were found to
2869 be places that this assumption was not correct.
2871 The MODE is passed in and saved. The mode of each load or store to
2872 a mem with ALIAS_SET is checked against MEM. If the size of that
2873 load or store is different from MODE, processing is halted on this
2874 alias set. For the vast majority of aliases sets, all of the loads
2875 and stores will use the same mode. But vectors are treated
2876 differently: the alias set is established for the entire vector,
2877 but reload will insert loads and stores for individual elements and
2878 we do not necessarily have the information to track those separate
2879 elements. So when we see a mode mismatch, we just bail. */
2882 void
2883 dse_record_singleton_alias_set (alias_set_type alias_set,
2884 enum machine_mode mode)
2886 struct clear_alias_mode_holder tmp_holder;
2887 struct clear_alias_mode_holder *entry;
2888 void **slot;
2890 /* If we are not going to run dse, we need to return now or there
2891 will be problems with allocating the bitmaps. */
2892 if ((!gate_dse()) || !alias_set)
2893 return;
2895 if (!clear_alias_sets)
2897 clear_alias_sets = BITMAP_ALLOC (NULL);
2898 disqualified_clear_alias_sets = BITMAP_ALLOC (NULL);
2899 clear_alias_mode_table = htab_create (11, clear_alias_mode_hash,
2900 clear_alias_mode_eq, NULL);
2901 clear_alias_mode_pool = create_alloc_pool ("clear_alias_mode_pool",
2902 sizeof (struct clear_alias_mode_holder), 100);
2905 bitmap_set_bit (clear_alias_sets, alias_set);
2907 tmp_holder.alias_set = alias_set;
2909 slot = htab_find_slot (clear_alias_mode_table, &tmp_holder, INSERT);
2910 gcc_assert (*slot == NULL);
2912 *slot = entry =
2913 (struct clear_alias_mode_holder *) pool_alloc (clear_alias_mode_pool);
2914 entry->alias_set = alias_set;
2915 entry->mode = mode;
2919 /* Remove ALIAS_SET from the sets of stack slots being considered. */
2921 void
2922 dse_invalidate_singleton_alias_set (alias_set_type alias_set)
2924 if ((!gate_dse()) || !alias_set)
2925 return;
2927 bitmap_clear_bit (clear_alias_sets, alias_set);
2931 /* Look up the bitmap index for OFFSET in GROUP_INFO. If it is not
2932 there, return 0. */
2934 static int
2935 get_bitmap_index (group_info_t group_info, HOST_WIDE_INT offset)
2937 if (offset < 0)
2939 HOST_WIDE_INT offset_p = -offset;
2940 if (offset_p >= group_info->offset_map_size_n)
2941 return 0;
2942 return group_info->offset_map_n[offset_p];
2944 else
2946 if (offset >= group_info->offset_map_size_p)
2947 return 0;
2948 return group_info->offset_map_p[offset];
2953 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2954 may be NULL. */
2956 static void
2957 scan_stores_nospill (store_info_t store_info, bitmap gen, bitmap kill)
2959 while (store_info)
2961 HOST_WIDE_INT i;
2962 group_info_t group_info
2963 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
2964 if (group_info->process_globally)
2965 for (i = store_info->begin; i < store_info->end; i++)
2967 int index = get_bitmap_index (group_info, i);
2968 if (index != 0)
2970 bitmap_set_bit (gen, index);
2971 if (kill)
2972 bitmap_clear_bit (kill, index);
2975 store_info = store_info->next;
2980 /* Process the STORE_INFOs into the bitmaps into GEN and KILL. KILL
2981 may be NULL. */
2983 static void
2984 scan_stores_spill (store_info_t store_info, bitmap gen, bitmap kill)
2986 while (store_info)
2988 if (store_info->alias_set)
2990 int index = get_bitmap_index (clear_alias_group,
2991 store_info->alias_set);
2992 if (index != 0)
2994 bitmap_set_bit (gen, index);
2995 if (kill)
2996 bitmap_clear_bit (kill, index);
2999 store_info = store_info->next;
3004 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3005 may be NULL. */
3007 static void
3008 scan_reads_nospill (insn_info_t insn_info, bitmap gen, bitmap kill)
3010 read_info_t read_info = insn_info->read_rec;
3011 int i;
3012 group_info_t group;
3014 /* If this insn reads the frame, kill all the frame related stores. */
3015 if (insn_info->frame_read)
3017 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3018 if (group->process_globally && group->frame_related)
3020 if (kill)
3021 bitmap_ior_into (kill, group->group_kill);
3022 bitmap_and_compl_into (gen, group->group_kill);
3026 while (read_info)
3028 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3030 if (group->process_globally)
3032 if (i == read_info->group_id)
3034 if (read_info->begin > read_info->end)
3036 /* Begin > end for block mode reads. */
3037 if (kill)
3038 bitmap_ior_into (kill, group->group_kill);
3039 bitmap_and_compl_into (gen, group->group_kill);
3041 else
3043 /* The groups are the same, just process the
3044 offsets. */
3045 HOST_WIDE_INT j;
3046 for (j = read_info->begin; j < read_info->end; j++)
3048 int index = get_bitmap_index (group, j);
3049 if (index != 0)
3051 if (kill)
3052 bitmap_set_bit (kill, index);
3053 bitmap_clear_bit (gen, index);
3058 else
3060 /* The groups are different, if the alias sets
3061 conflict, clear the entire group. We only need
3062 to apply this test if the read_info is a cselib
3063 read. Anything with a constant base cannot alias
3064 something else with a different constant
3065 base. */
3066 if ((read_info->group_id < 0)
3067 && canon_true_dependence (group->base_mem,
3068 QImode,
3069 group->canon_base_mem,
3070 read_info->mem, rtx_varies_p))
3072 if (kill)
3073 bitmap_ior_into (kill, group->group_kill);
3074 bitmap_and_compl_into (gen, group->group_kill);
3080 read_info = read_info->next;
3084 /* Process the READ_INFOs into the bitmaps into GEN and KILL. KILL
3085 may be NULL. */
3087 static void
3088 scan_reads_spill (read_info_t read_info, bitmap gen, bitmap kill)
3090 while (read_info)
3092 if (read_info->alias_set)
3094 int index = get_bitmap_index (clear_alias_group,
3095 read_info->alias_set);
3096 if (index != 0)
3098 if (kill)
3099 bitmap_set_bit (kill, index);
3100 bitmap_clear_bit (gen, index);
3104 read_info = read_info->next;
3109 /* Return the insn in BB_INFO before the first wild read or if there
3110 are no wild reads in the block, return the last insn. */
3112 static insn_info_t
3113 find_insn_before_first_wild_read (bb_info_t bb_info)
3115 insn_info_t insn_info = bb_info->last_insn;
3116 insn_info_t last_wild_read = NULL;
3118 while (insn_info)
3120 if (insn_info->wild_read)
3122 last_wild_read = insn_info->prev_insn;
3123 /* Block starts with wild read. */
3124 if (!last_wild_read)
3125 return NULL;
3128 insn_info = insn_info->prev_insn;
3131 if (last_wild_read)
3132 return last_wild_read;
3133 else
3134 return bb_info->last_insn;
3138 /* Scan the insns in BB_INFO starting at PTR and going to the top of
3139 the block in order to build the gen and kill sets for the block.
3140 We start at ptr which may be the last insn in the block or may be
3141 the first insn with a wild read. In the latter case we are able to
3142 skip the rest of the block because it just does not matter:
3143 anything that happens is hidden by the wild read. */
3145 static void
3146 dse_step3_scan (bool for_spills, basic_block bb)
3148 bb_info_t bb_info = bb_table[bb->index];
3149 insn_info_t insn_info;
3151 if (for_spills)
3152 /* There are no wild reads in the spill case. */
3153 insn_info = bb_info->last_insn;
3154 else
3155 insn_info = find_insn_before_first_wild_read (bb_info);
3157 /* In the spill case or in the no_spill case if there is no wild
3158 read in the block, we will need a kill set. */
3159 if (insn_info == bb_info->last_insn)
3161 if (bb_info->kill)
3162 bitmap_clear (bb_info->kill);
3163 else
3164 bb_info->kill = BITMAP_ALLOC (NULL);
3166 else
3167 if (bb_info->kill)
3168 BITMAP_FREE (bb_info->kill);
3170 while (insn_info)
3172 /* There may have been code deleted by the dce pass run before
3173 this phase. */
3174 if (insn_info->insn && INSN_P (insn_info->insn))
3176 /* Process the read(s) last. */
3177 if (for_spills)
3179 scan_stores_spill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3180 scan_reads_spill (insn_info->read_rec, bb_info->gen, bb_info->kill);
3182 else
3184 scan_stores_nospill (insn_info->store_rec, bb_info->gen, bb_info->kill);
3185 scan_reads_nospill (insn_info, bb_info->gen, bb_info->kill);
3189 insn_info = insn_info->prev_insn;
3194 /* Set the gen set of the exit block, and also any block with no
3195 successors that does not have a wild read. */
3197 static void
3198 dse_step3_exit_block_scan (bb_info_t bb_info)
3200 /* The gen set is all 0's for the exit block except for the
3201 frame_pointer_group. */
3203 if (stores_off_frame_dead_at_return)
3205 unsigned int i;
3206 group_info_t group;
3208 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3210 if (group->process_globally && group->frame_related)
3211 bitmap_ior_into (bb_info->gen, group->group_kill);
3217 /* Find all of the blocks that are not backwards reachable from the
3218 exit block or any block with no successors (BB). These are the
3219 infinite loops or infinite self loops. These blocks will still
3220 have their bits set in UNREACHABLE_BLOCKS. */
3222 static void
3223 mark_reachable_blocks (sbitmap unreachable_blocks, basic_block bb)
3225 edge e;
3226 edge_iterator ei;
3228 if (TEST_BIT (unreachable_blocks, bb->index))
3230 RESET_BIT (unreachable_blocks, bb->index);
3231 FOR_EACH_EDGE (e, ei, bb->preds)
3233 mark_reachable_blocks (unreachable_blocks, e->src);
3238 /* Build the transfer functions for the function. */
3240 static void
3241 dse_step3 (bool for_spills)
3243 basic_block bb;
3244 sbitmap unreachable_blocks = sbitmap_alloc (last_basic_block);
3245 sbitmap_iterator sbi;
3246 bitmap all_ones = NULL;
3247 unsigned int i;
3249 sbitmap_ones (unreachable_blocks);
3251 FOR_ALL_BB (bb)
3253 bb_info_t bb_info = bb_table[bb->index];
3254 if (bb_info->gen)
3255 bitmap_clear (bb_info->gen);
3256 else
3257 bb_info->gen = BITMAP_ALLOC (NULL);
3259 if (bb->index == ENTRY_BLOCK)
3261 else if (bb->index == EXIT_BLOCK)
3262 dse_step3_exit_block_scan (bb_info);
3263 else
3264 dse_step3_scan (for_spills, bb);
3265 if (EDGE_COUNT (bb->succs) == 0)
3266 mark_reachable_blocks (unreachable_blocks, bb);
3268 /* If this is the second time dataflow is run, delete the old
3269 sets. */
3270 if (bb_info->in)
3271 BITMAP_FREE (bb_info->in);
3272 if (bb_info->out)
3273 BITMAP_FREE (bb_info->out);
3276 /* For any block in an infinite loop, we must initialize the out set
3277 to all ones. This could be expensive, but almost never occurs in
3278 practice. However, it is common in regression tests. */
3279 EXECUTE_IF_SET_IN_SBITMAP (unreachable_blocks, 0, i, sbi)
3281 if (bitmap_bit_p (all_blocks, i))
3283 bb_info_t bb_info = bb_table[i];
3284 if (!all_ones)
3286 unsigned int j;
3287 group_info_t group;
3289 all_ones = BITMAP_ALLOC (NULL);
3290 for (j = 0; VEC_iterate (group_info_t, rtx_group_vec, j, group); j++)
3291 bitmap_ior_into (all_ones, group->group_kill);
3293 if (!bb_info->out)
3295 bb_info->out = BITMAP_ALLOC (NULL);
3296 bitmap_copy (bb_info->out, all_ones);
3301 if (all_ones)
3302 BITMAP_FREE (all_ones);
3303 sbitmap_free (unreachable_blocks);
3308 /*----------------------------------------------------------------------------
3309 Fourth step.
3311 Solve the bitvector equations.
3312 ----------------------------------------------------------------------------*/
3315 /* Confluence function for blocks with no successors. Create an out
3316 set from the gen set of the exit block. This block logically has
3317 the exit block as a successor. */
3321 static void
3322 dse_confluence_0 (basic_block bb)
3324 bb_info_t bb_info = bb_table[bb->index];
3326 if (bb->index == EXIT_BLOCK)
3327 return;
3329 if (!bb_info->out)
3331 bb_info->out = BITMAP_ALLOC (NULL);
3332 bitmap_copy (bb_info->out, bb_table[EXIT_BLOCK]->gen);
3336 /* Propagate the information from the in set of the dest of E to the
3337 out set of the src of E. If the various in or out sets are not
3338 there, that means they are all ones. */
3340 static void
3341 dse_confluence_n (edge e)
3343 bb_info_t src_info = bb_table[e->src->index];
3344 bb_info_t dest_info = bb_table[e->dest->index];
3346 if (dest_info->in)
3348 if (src_info->out)
3349 bitmap_and_into (src_info->out, dest_info->in);
3350 else
3352 src_info->out = BITMAP_ALLOC (NULL);
3353 bitmap_copy (src_info->out, dest_info->in);
3359 /* Propagate the info from the out to the in set of BB_INDEX's basic
3360 block. There are three cases:
3362 1) The block has no kill set. In this case the kill set is all
3363 ones. It does not matter what the out set of the block is, none of
3364 the info can reach the top. The only thing that reaches the top is
3365 the gen set and we just copy the set.
3367 2) There is a kill set but no out set and bb has successors. In
3368 this case we just return. Eventually an out set will be created and
3369 it is better to wait than to create a set of ones.
3371 3) There is both a kill and out set. We apply the obvious transfer
3372 function.
3375 static bool
3376 dse_transfer_function (int bb_index)
3378 bb_info_t bb_info = bb_table[bb_index];
3380 if (bb_info->kill)
3382 if (bb_info->out)
3384 /* Case 3 above. */
3385 if (bb_info->in)
3386 return bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3387 bb_info->out, bb_info->kill);
3388 else
3390 bb_info->in = BITMAP_ALLOC (NULL);
3391 bitmap_ior_and_compl (bb_info->in, bb_info->gen,
3392 bb_info->out, bb_info->kill);
3393 return true;
3396 else
3397 /* Case 2 above. */
3398 return false;
3400 else
3402 /* Case 1 above. If there is already an in set, nothing
3403 happens. */
3404 if (bb_info->in)
3405 return false;
3406 else
3408 bb_info->in = BITMAP_ALLOC (NULL);
3409 bitmap_copy (bb_info->in, bb_info->gen);
3410 return true;
3415 /* Solve the dataflow equations. */
3417 static void
3418 dse_step4 (void)
3420 df_simple_dataflow (DF_BACKWARD, NULL, dse_confluence_0,
3421 dse_confluence_n, dse_transfer_function,
3422 all_blocks, df_get_postorder (DF_BACKWARD),
3423 df_get_n_blocks (DF_BACKWARD));
3424 if (dump_file)
3426 basic_block bb;
3428 fprintf (dump_file, "\n\n*** Global dataflow info after analysis.\n");
3429 FOR_ALL_BB (bb)
3431 bb_info_t bb_info = bb_table[bb->index];
3433 df_print_bb_index (bb, dump_file);
3434 if (bb_info->in)
3435 bitmap_print (dump_file, bb_info->in, " in: ", "\n");
3436 else
3437 fprintf (dump_file, " in: *MISSING*\n");
3438 if (bb_info->gen)
3439 bitmap_print (dump_file, bb_info->gen, " gen: ", "\n");
3440 else
3441 fprintf (dump_file, " gen: *MISSING*\n");
3442 if (bb_info->kill)
3443 bitmap_print (dump_file, bb_info->kill, " kill: ", "\n");
3444 else
3445 fprintf (dump_file, " kill: *MISSING*\n");
3446 if (bb_info->out)
3447 bitmap_print (dump_file, bb_info->out, " out: ", "\n");
3448 else
3449 fprintf (dump_file, " out: *MISSING*\n\n");
3456 /*----------------------------------------------------------------------------
3457 Fifth step.
3459 Delete the stores that can only be deleted using the global information.
3460 ----------------------------------------------------------------------------*/
3463 static void
3464 dse_step5_nospill (void)
3466 basic_block bb;
3467 FOR_EACH_BB (bb)
3469 bb_info_t bb_info = bb_table[bb->index];
3470 insn_info_t insn_info = bb_info->last_insn;
3471 bitmap v = bb_info->out;
3473 while (insn_info)
3475 bool deleted = false;
3476 if (dump_file && insn_info->insn)
3478 fprintf (dump_file, "starting to process insn %d\n",
3479 INSN_UID (insn_info->insn));
3480 bitmap_print (dump_file, v, " v: ", "\n");
3483 /* There may have been code deleted by the dce pass run before
3484 this phase. */
3485 if (insn_info->insn
3486 && INSN_P (insn_info->insn)
3487 && (!insn_info->cannot_delete)
3488 && (!bitmap_empty_p (v)))
3490 store_info_t store_info = insn_info->store_rec;
3492 /* Try to delete the current insn. */
3493 deleted = true;
3495 /* Skip the clobbers. */
3496 while (!store_info->is_set)
3497 store_info = store_info->next;
3499 if (store_info->alias_set)
3500 deleted = false;
3501 else
3503 HOST_WIDE_INT i;
3504 group_info_t group_info
3505 = VEC_index (group_info_t, rtx_group_vec, store_info->group_id);
3507 for (i = store_info->begin; i < store_info->end; i++)
3509 int index = get_bitmap_index (group_info, i);
3511 if (dump_file)
3512 fprintf (dump_file, "i = %d, index = %d\n", (int)i, index);
3513 if (index == 0 || !bitmap_bit_p (v, index))
3515 if (dump_file)
3516 fprintf (dump_file, "failing at i = %d\n", (int)i);
3517 deleted = false;
3518 break;
3522 if (deleted)
3524 if (dbg_cnt (dse))
3526 check_for_inc_dec (insn_info->insn);
3527 delete_insn (insn_info->insn);
3528 insn_info->insn = NULL;
3529 globally_deleted++;
3533 /* We do want to process the local info if the insn was
3534 deleted. For instance, if the insn did a wild read, we
3535 no longer need to trash the info. */
3536 if (insn_info->insn
3537 && INSN_P (insn_info->insn)
3538 && (!deleted))
3540 scan_stores_nospill (insn_info->store_rec, v, NULL);
3541 if (insn_info->wild_read)
3543 if (dump_file)
3544 fprintf (dump_file, "wild read\n");
3545 bitmap_clear (v);
3547 else if (insn_info->read_rec)
3549 if (dump_file)
3550 fprintf (dump_file, "regular read\n");
3551 scan_reads_nospill (insn_info, v, NULL);
3555 insn_info = insn_info->prev_insn;
3561 static void
3562 dse_step5_spill (void)
3564 basic_block bb;
3565 FOR_EACH_BB (bb)
3567 bb_info_t bb_info = bb_table[bb->index];
3568 insn_info_t insn_info = bb_info->last_insn;
3569 bitmap v = bb_info->out;
3571 while (insn_info)
3573 bool deleted = false;
3574 /* There may have been code deleted by the dce pass run before
3575 this phase. */
3576 if (insn_info->insn
3577 && INSN_P (insn_info->insn)
3578 && (!insn_info->cannot_delete)
3579 && (!bitmap_empty_p (v)))
3581 /* Try to delete the current insn. */
3582 store_info_t store_info = insn_info->store_rec;
3583 deleted = true;
3585 while (store_info)
3587 if (store_info->alias_set)
3589 int index = get_bitmap_index (clear_alias_group,
3590 store_info->alias_set);
3591 if (index == 0 || !bitmap_bit_p (v, index))
3593 deleted = false;
3594 break;
3597 else
3598 deleted = false;
3599 store_info = store_info->next;
3601 if (deleted && dbg_cnt (dse))
3603 if (dump_file)
3604 fprintf (dump_file, "Spill deleting insn %d\n",
3605 INSN_UID (insn_info->insn));
3606 check_for_inc_dec (insn_info->insn);
3607 delete_insn (insn_info->insn);
3608 spill_deleted++;
3609 insn_info->insn = NULL;
3613 if (insn_info->insn
3614 && INSN_P (insn_info->insn)
3615 && (!deleted))
3617 scan_stores_spill (insn_info->store_rec, v, NULL);
3618 scan_reads_spill (insn_info->read_rec, v, NULL);
3621 insn_info = insn_info->prev_insn;
3628 /*----------------------------------------------------------------------------
3629 Sixth step.
3631 Delete stores made redundant by earlier stores (which store the same
3632 value) that couldn't be eliminated.
3633 ----------------------------------------------------------------------------*/
3635 static void
3636 dse_step6 (void)
3638 basic_block bb;
3640 FOR_ALL_BB (bb)
3642 bb_info_t bb_info = bb_table[bb->index];
3643 insn_info_t insn_info = bb_info->last_insn;
3645 while (insn_info)
3647 /* There may have been code deleted by the dce pass run before
3648 this phase. */
3649 if (insn_info->insn
3650 && INSN_P (insn_info->insn)
3651 && !insn_info->cannot_delete)
3653 store_info_t s_info = insn_info->store_rec;
3655 while (s_info && !s_info->is_set)
3656 s_info = s_info->next;
3657 if (s_info
3658 && s_info->redundant_reason
3659 && s_info->redundant_reason->insn
3660 && INSN_P (s_info->redundant_reason->insn))
3662 rtx rinsn = s_info->redundant_reason->insn;
3663 if (dump_file)
3664 fprintf (dump_file, "Locally deleting insn %d "
3665 "because insn %d stores the "
3666 "same value and couldn't be "
3667 "eliminated\n",
3668 INSN_UID (insn_info->insn),
3669 INSN_UID (rinsn));
3670 delete_dead_store_insn (insn_info);
3673 insn_info = insn_info->prev_insn;
3678 /*----------------------------------------------------------------------------
3679 Seventh step.
3681 Destroy everything left standing.
3682 ----------------------------------------------------------------------------*/
3684 static void
3685 dse_step7 (bool global_done)
3687 unsigned int i;
3688 group_info_t group;
3689 basic_block bb;
3691 for (i = 0; VEC_iterate (group_info_t, rtx_group_vec, i, group); i++)
3693 free (group->offset_map_n);
3694 free (group->offset_map_p);
3695 BITMAP_FREE (group->store1_n);
3696 BITMAP_FREE (group->store1_p);
3697 BITMAP_FREE (group->store2_n);
3698 BITMAP_FREE (group->store2_p);
3699 BITMAP_FREE (group->group_kill);
3702 if (global_done)
3703 FOR_ALL_BB (bb)
3705 bb_info_t bb_info = bb_table[bb->index];
3706 BITMAP_FREE (bb_info->gen);
3707 if (bb_info->kill)
3708 BITMAP_FREE (bb_info->kill);
3709 if (bb_info->in)
3710 BITMAP_FREE (bb_info->in);
3711 if (bb_info->out)
3712 BITMAP_FREE (bb_info->out);
3715 if (clear_alias_sets)
3717 BITMAP_FREE (clear_alias_sets);
3718 BITMAP_FREE (disqualified_clear_alias_sets);
3719 free_alloc_pool (clear_alias_mode_pool);
3720 htab_delete (clear_alias_mode_table);
3723 end_alias_analysis ();
3724 free (bb_table);
3725 htab_delete (rtx_group_table);
3726 VEC_free (group_info_t, heap, rtx_group_vec);
3727 BITMAP_FREE (all_blocks);
3728 BITMAP_FREE (scratch);
3730 free_alloc_pool (rtx_store_info_pool);
3731 free_alloc_pool (read_info_pool);
3732 free_alloc_pool (insn_info_pool);
3733 free_alloc_pool (bb_info_pool);
3734 free_alloc_pool (rtx_group_info_pool);
3735 free_alloc_pool (deferred_change_pool);
3739 /* -------------------------------------------------------------------------
3741 ------------------------------------------------------------------------- */
3743 /* Callback for running pass_rtl_dse. */
3745 static unsigned int
3746 rest_of_handle_dse (void)
3748 bool did_global = false;
3750 df_set_flags (DF_DEFER_INSN_RESCAN);
3752 /* Need the notes since we must track live hardregs in the forwards
3753 direction. */
3754 df_note_add_problem ();
3755 df_analyze ();
3757 dse_step0 ();
3758 dse_step1 ();
3759 dse_step2_init ();
3760 if (dse_step2_nospill ())
3762 df_set_flags (DF_LR_RUN_DCE);
3763 df_analyze ();
3764 did_global = true;
3765 if (dump_file)
3766 fprintf (dump_file, "doing global processing\n");
3767 dse_step3 (false);
3768 dse_step4 ();
3769 dse_step5_nospill ();
3772 /* For the instance of dse that runs after reload, we make a special
3773 pass to process the spills. These are special in that they are
3774 totally transparent, i.e, there is no aliasing issues that need
3775 to be considered. This means that the wild reads that kill
3776 everything else do not apply here. */
3777 if (clear_alias_sets && dse_step2_spill ())
3779 if (!did_global)
3781 df_set_flags (DF_LR_RUN_DCE);
3782 df_analyze ();
3784 did_global = true;
3785 if (dump_file)
3786 fprintf (dump_file, "doing global spill processing\n");
3787 dse_step3 (true);
3788 dse_step4 ();
3789 dse_step5_spill ();
3792 dse_step6 ();
3793 dse_step7 (did_global);
3795 if (dump_file)
3796 fprintf (dump_file, "dse: local deletions = %d, global deletions = %d, spill deletions = %d\n",
3797 locally_deleted, globally_deleted, spill_deleted);
3798 return 0;
3801 static bool
3802 gate_dse (void)
3804 return gate_dse1 () || gate_dse2 ();
3807 static bool
3808 gate_dse1 (void)
3810 return optimize > 0 && flag_dse
3811 && dbg_cnt (dse1);
3814 static bool
3815 gate_dse2 (void)
3817 return optimize > 0 && flag_dse
3818 && dbg_cnt (dse2);
3821 struct rtl_opt_pass pass_rtl_dse1 =
3824 RTL_PASS,
3825 "dse1", /* name */
3826 gate_dse1, /* gate */
3827 rest_of_handle_dse, /* execute */
3828 NULL, /* sub */
3829 NULL, /* next */
3830 0, /* static_pass_number */
3831 TV_DSE1, /* tv_id */
3832 0, /* properties_required */
3833 0, /* properties_provided */
3834 0, /* properties_destroyed */
3835 0, /* todo_flags_start */
3836 TODO_dump_func |
3837 TODO_df_finish | TODO_verify_rtl_sharing |
3838 TODO_ggc_collect /* todo_flags_finish */
3842 struct rtl_opt_pass pass_rtl_dse2 =
3845 RTL_PASS,
3846 "dse2", /* name */
3847 gate_dse2, /* gate */
3848 rest_of_handle_dse, /* execute */
3849 NULL, /* sub */
3850 NULL, /* next */
3851 0, /* static_pass_number */
3852 TV_DSE2, /* tv_id */
3853 0, /* properties_required */
3854 0, /* properties_provided */
3855 0, /* properties_destroyed */
3856 0, /* todo_flags_start */
3857 TODO_dump_func |
3858 TODO_df_finish | TODO_verify_rtl_sharing |
3859 TODO_ggc_collect /* todo_flags_finish */