Fix issue for pointers to anonymous types with -fdump-ada-spec
[official-gcc.git] / gcc / tree-ssa-dse.cc
blob3beaed3ad38adaf4ac2f3da05e28fc9cf3321a3b
1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2022 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "tree-cfgcleanup.h"
35 #include "alias.h"
36 #include "tree-ssa-loop.h"
37 #include "tree-ssa-dse.h"
38 #include "builtins.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "tree-eh.h"
42 #include "cfganal.h"
43 #include "cgraph.h"
44 #include "ipa-modref-tree.h"
45 #include "ipa-modref.h"
46 #include "target.h"
47 #include "tree-ssa-loop-niter.h"
49 /* This file implements dead store elimination.
51 A dead store is a store into a memory location which will later be
52 overwritten by another store without any intervening loads. In this
53 case the earlier store can be deleted or trimmed if the store
54 was partially dead.
56 A redundant store is a store into a memory location which stores
57 the exact same value as a prior store to the same memory location.
58 While this can often be handled by dead store elimination, removing
59 the redundant store is often better than removing or trimming the
60 dead store.
62 In our SSA + virtual operand world we use immediate uses of virtual
63 operands to detect these cases. If a store's virtual definition
64 is used precisely once by a later store to the same location which
65 post dominates the first store, then the first store is dead. If
66 the data stored is the same, then the second store is redundant.
68 The single use of the store's virtual definition ensures that
69 there are no intervening aliased loads and the requirement that
70 the second load post dominate the first ensures that if the earlier
71 store executes, then the later stores will execute before the function
72 exits.
74 It may help to think of this as first moving the earlier store to
75 the point immediately before the later store. Again, the single
76 use of the virtual definition and the post-dominance relationship
77 ensure that such movement would be safe. Clearly if there are
78 back to back stores, then the second is makes the first dead. If
79 the second store stores the same value, then the second store is
80 redundant.
82 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
83 may also help in understanding this code since it discusses the
84 relationship between dead store and redundant load elimination. In
85 fact, they are the same transformation applied to different views of
86 the CFG. */
88 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
90 /* Bitmap of blocks that have had EH statements cleaned. We should
91 remove their dead edges eventually. */
92 static bitmap need_eh_cleanup;
93 static bitmap need_ab_cleanup;
95 /* STMT is a statement that may write into memory. Analyze it and
96 initialize WRITE to describe how STMT affects memory.
98 Return TRUE if the statement was analyzed, FALSE otherwise.
100 It is always safe to return FALSE. But typically better optimziation
101 can be achieved by analyzing more statements. */
103 static bool
104 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
106 /* It's advantageous to handle certain mem* functions. */
107 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
109 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
111 case BUILT_IN_MEMCPY:
112 case BUILT_IN_MEMMOVE:
113 case BUILT_IN_MEMSET:
114 case BUILT_IN_MEMCPY_CHK:
115 case BUILT_IN_MEMMOVE_CHK:
116 case BUILT_IN_MEMSET_CHK:
117 case BUILT_IN_STRNCPY:
118 case BUILT_IN_STRNCPY_CHK:
120 tree size = gimple_call_arg (stmt, 2);
121 tree ptr = gimple_call_arg (stmt, 0);
122 ao_ref_init_from_ptr_and_size (write, ptr, size);
123 return true;
126 /* A calloc call can never be dead, but it can make
127 subsequent stores redundant if they store 0 into
128 the same memory locations. */
129 case BUILT_IN_CALLOC:
131 tree nelem = gimple_call_arg (stmt, 0);
132 tree selem = gimple_call_arg (stmt, 1);
133 tree lhs;
134 if (TREE_CODE (nelem) == INTEGER_CST
135 && TREE_CODE (selem) == INTEGER_CST
136 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
138 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
139 nelem, selem);
140 ao_ref_init_from_ptr_and_size (write, lhs, size);
141 return true;
145 default:
146 break;
149 else if (tree lhs = gimple_get_lhs (stmt))
151 if (TREE_CODE (lhs) != SSA_NAME)
153 ao_ref_init (write, lhs);
154 return true;
157 return false;
160 /* Given REF from the alias oracle, return TRUE if it is a valid
161 kill memory reference for dead store elimination, false otherwise.
163 In particular, the reference must have a known base, known maximum
164 size, start at a byte offset and have a size that is one or more
165 bytes. */
167 static bool
168 valid_ao_ref_kill_for_dse (ao_ref *ref)
170 return (ao_ref_base (ref)
171 && known_size_p (ref->max_size)
172 && maybe_ne (ref->size, 0)
173 && known_eq (ref->max_size, ref->size)
174 && known_ge (ref->offset, 0));
177 /* Given REF from the alias oracle, return TRUE if it is a valid
178 load or store memory reference for dead store elimination, false otherwise.
180 Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
181 is not same as size since we can handle conservatively the larger range. */
183 static bool
184 valid_ao_ref_for_dse (ao_ref *ref)
186 return (ao_ref_base (ref)
187 && known_size_p (ref->max_size)
188 && known_ge (ref->offset, 0));
191 /* Initialize OFFSET and SIZE to a range known to contain REF
192 where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
193 Return false if this is impossible. */
195 static bool
196 get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
197 HOST_WIDE_INT *size)
199 if (!known_size_p (ref->max_size))
200 return false;
201 *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
202 poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
203 BITS_PER_UNIT);
204 return (end - *offset).is_constant (size);
207 /* Initialize OFFSET and SIZE to a range known to be contained REF
208 where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
209 Return false if this is impossible. */
211 static bool
212 get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
213 HOST_WIDE_INT *size)
215 if (!known_size_p (ref->size)
216 || !known_eq (ref->size, ref->max_size))
217 return false;
218 *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
219 poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
220 BITS_PER_UNIT);
221 /* For bit accesses we can get -1 here, but also 0 sized kill is not
222 useful. */
223 if (!known_gt (end, *offset))
224 return false;
225 return (end - *offset).is_constant (size);
228 /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
229 inside REF. If KILL is true, then COPY represent a kill and the byte range
230 needs to be fully contained in bit range given by COPY. If KILL is false
231 then the byte range returned must contain the range of COPY. */
233 static bool
234 get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
235 HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
237 HOST_WIDE_INT copy_size, ref_size;
238 poly_int64 copy_offset, ref_offset;
239 HOST_WIDE_INT diff;
241 /* First translate from bits to bytes, rounding to bigger or smaller ranges
242 as needed. Kills needs to be always rounded to smaller ranges while
243 uses and stores to larger ranges. */
244 if (kill)
246 if (!get_byte_aligned_range_contained_in_ref (copy, &copy_offset,
247 &copy_size))
248 return false;
250 else
252 if (!get_byte_aligned_range_containing_ref (copy, &copy_offset,
253 &copy_size))
254 return false;
257 if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
258 || !ordered_p (copy_offset, ref_offset))
259 return false;
261 /* Switch sizes from bits to bytes so we do not need to care about
262 overflows. Offset calculation needs to stay in bits until we compute
263 the difference and can switch to HOST_WIDE_INT. */
264 copy_size /= BITS_PER_UNIT;
265 ref_size /= BITS_PER_UNIT;
267 /* If COPY starts before REF, then reset the beginning of
268 COPY to match REF and decrease the size of COPY by the
269 number of bytes removed from COPY. */
270 if (maybe_lt (copy_offset, ref_offset))
272 if (!(ref_offset - copy_offset).is_constant (&diff)
273 || copy_size < diff / BITS_PER_UNIT)
274 return false;
275 copy_size -= diff / BITS_PER_UNIT;
276 copy_offset = ref_offset;
279 if (!(copy_offset - ref_offset).is_constant (&diff)
280 || ref_size <= diff / BITS_PER_UNIT)
281 return false;
283 /* If COPY extends beyond REF, chop off its size appropriately. */
284 HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
286 if (copy_size > limit)
287 copy_size = limit;
288 *ret_size = copy_size;
289 if (!(copy_offset - ref_offset).is_constant (ret_offset))
290 return false;
291 *ret_offset /= BITS_PER_UNIT;
292 return true;
295 /* Update LIVE_BYTES tracking REF for write to WRITE:
296 Verify we have the same base memory address, the write
297 has a known size and overlaps with REF. */
298 static void
299 clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
301 HOST_WIDE_INT start, size;
303 if (valid_ao_ref_kill_for_dse (write)
304 && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
305 && get_byte_range (write, ref, true, &start, &size))
306 bitmap_clear_range (live_bytes, start, size);
309 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
310 address written by STMT must match the one found in REF, which must
311 have its base address previously initialized.
313 This routine must be conservative. If we don't know the offset or
314 actual size written, assume nothing was written. */
316 static void
317 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
319 ao_ref write;
321 if (gcall *call = dyn_cast <gcall *> (stmt))
323 bool interposed;
324 modref_summary *summary = get_modref_function_summary (call, &interposed);
326 if (summary && !interposed)
327 for (auto kill : summary->kills)
328 if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
329 clear_live_bytes_for_ref (live_bytes, ref, &write);
331 if (!initialize_ao_ref_for_dse (stmt, &write))
332 return;
334 clear_live_bytes_for_ref (live_bytes, ref, &write);
337 /* REF is a memory write. Extract relevant information from it and
338 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
339 Otherwise return FALSE. */
341 static bool
342 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
344 HOST_WIDE_INT const_size;
345 if (valid_ao_ref_for_dse (ref)
346 && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
347 - aligned_lower_bound (ref->offset,
348 BITS_PER_UNIT)).is_constant (&const_size))
349 && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
350 && const_size > 1)
352 bitmap_clear (live_bytes);
353 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
354 return true;
356 return false;
359 /* Compute the number of elements that we can trim from the head and
360 tail of ORIG resulting in a bitmap that is a superset of LIVE.
362 Store the number of elements trimmed from the head and tail in
363 TRIM_HEAD and TRIM_TAIL.
365 STMT is the statement being trimmed and is used for debugging dump
366 output only. */
368 static void
369 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
370 gimple *stmt)
372 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
373 extends through ref->size. So we know that in the original bitmap
374 bits 0..ref->size were true. We don't actually need the bitmap, just
375 the REF to compute the trims. */
377 /* Now identify how much, if any of the tail we can chop off. */
378 HOST_WIDE_INT const_size;
379 int last_live = bitmap_last_set_bit (live);
380 if (ref->size.is_constant (&const_size))
382 int last_orig = (const_size / BITS_PER_UNIT) - 1;
383 /* We can leave inconvenient amounts on the tail as
384 residual handling in mem* and str* functions is usually
385 reasonably efficient. */
386 *trim_tail = last_orig - last_live;
388 /* But don't trim away out of bounds accesses, as this defeats
389 proper warnings.
391 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
392 where TYPE_SIZE_UNIT is not a constant. */
393 if (*trim_tail
394 && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
395 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
396 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
397 last_orig) <= 0)
398 *trim_tail = 0;
400 else
401 *trim_tail = 0;
403 /* Identify how much, if any of the head we can chop off. */
404 int first_orig = 0;
405 int first_live = bitmap_first_set_bit (live);
406 *trim_head = first_live - first_orig;
408 /* If REF is aligned, try to maintain this alignment if it reduces
409 the number of (power-of-two sized aligned) writes to memory. */
410 unsigned int align_bits;
411 unsigned HOST_WIDE_INT bitpos;
412 if ((*trim_head || *trim_tail)
413 && last_live - first_live >= 2
414 && ao_ref_alignment (ref, &align_bits, &bitpos)
415 && align_bits >= 32
416 && bitpos == 0
417 && align_bits % BITS_PER_UNIT == 0)
419 unsigned int align_units = align_bits / BITS_PER_UNIT;
420 if (align_units > 16)
421 align_units = 16;
422 while ((first_live | (align_units - 1)) > (unsigned int)last_live)
423 align_units >>= 1;
425 if (*trim_head)
427 unsigned int pos = first_live & (align_units - 1);
428 for (unsigned int i = 1; i <= align_units; i <<= 1)
430 unsigned int mask = ~(i - 1);
431 unsigned int bytes = align_units - (pos & mask);
432 if (wi::popcount (bytes) <= 1)
434 *trim_head &= mask;
435 break;
440 if (*trim_tail)
442 unsigned int pos = last_live & (align_units - 1);
443 for (unsigned int i = 1; i <= align_units; i <<= 1)
445 int mask = i - 1;
446 unsigned int bytes = (pos | mask) + 1;
447 if ((last_live | mask) > (last_live + *trim_tail))
448 break;
449 if (wi::popcount (bytes) <= 1)
451 unsigned int extra = (last_live | mask) - last_live;
452 *trim_tail -= extra;
453 break;
459 if ((*trim_head || *trim_tail)
460 && dump_file && (dump_flags & TDF_DETAILS))
462 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
463 *trim_head, *trim_tail);
464 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
465 fprintf (dump_file, "\n");
469 /* STMT initializes an object from COMPLEX_CST where one or more of the
470 bytes written may be dead stores. REF is a representation of the
471 memory written. LIVE is the bitmap of stores that are actually live.
473 Attempt to rewrite STMT so that only the real or imaginary part of
474 the object is actually stored. */
476 static void
477 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
479 int trim_head, trim_tail;
480 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
482 /* The amount of data trimmed from the head or tail must be at
483 least half the size of the object to ensure we're trimming
484 the entire real or imaginary half. By writing things this
485 way we avoid more O(n) bitmap operations. */
486 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
488 /* TREE_REALPART is live */
489 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
490 tree y = gimple_assign_lhs (stmt);
491 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
492 gimple_assign_set_lhs (stmt, y);
493 gimple_assign_set_rhs1 (stmt, x);
495 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
497 /* TREE_IMAGPART is live */
498 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
499 tree y = gimple_assign_lhs (stmt);
500 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
501 gimple_assign_set_lhs (stmt, y);
502 gimple_assign_set_rhs1 (stmt, x);
505 /* Other cases indicate parts of both the real and imag subobjects
506 are live. We do not try to optimize those cases. */
509 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
510 bytes written are dead stores. ORIG is the bitmap of bytes stored by
511 STMT. LIVE is the bitmap of stores that are actually live.
513 Attempt to rewrite STMT so that only the real or imaginary part of
514 the object is actually stored.
516 The most common case for getting here is a CONSTRUCTOR with no elements
517 being used to zero initialize an object. We do not try to handle other
518 cases as those would force us to fully cover the object with the
519 CONSTRUCTOR node except for the components that are dead. */
521 static void
522 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
524 tree ctor = gimple_assign_rhs1 (stmt);
526 /* This is the only case we currently handle. It actually seems to
527 catch most cases of actual interest. */
528 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
530 int head_trim = 0;
531 int tail_trim = 0;
532 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
534 /* Now we want to replace the constructor initializer
535 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
536 if (head_trim || tail_trim)
538 /* We want &lhs for the MEM_REF expression. */
539 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
541 if (! is_gimple_min_invariant (lhs_addr))
542 return;
544 /* The number of bytes for the new constructor. */
545 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
546 poly_int64 count = ref_bytes - head_trim - tail_trim;
548 /* And the new type for the CONSTRUCTOR. Essentially it's just
549 a char array large enough to cover the non-trimmed parts of
550 the original CONSTRUCTOR. Note we want explicit bounds here
551 so that we know how many bytes to clear when expanding the
552 CONSTRUCTOR. */
553 tree type = build_array_type_nelts (char_type_node, count);
555 /* Build a suitable alias type rather than using alias set zero
556 to avoid pessimizing. */
557 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
559 /* Build a MEM_REF representing the whole accessed area, starting
560 at the first byte not trimmed. */
561 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
562 build_int_cst (alias_type, head_trim));
564 /* Now update STMT with a new RHS and LHS. */
565 gimple_assign_set_lhs (stmt, exp);
566 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
570 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
571 copied/set by DECREMENT. */
572 static void
573 decrement_count (gimple *stmt, int decrement)
575 tree *countp = gimple_call_arg_ptr (stmt, 2);
576 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
577 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
578 - decrement));
581 static void
582 increment_start_addr (gimple *stmt, tree *where, int increment)
584 if (tree lhs = gimple_call_lhs (stmt))
585 if (where == gimple_call_arg_ptr (stmt, 0))
587 gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
588 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
589 gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
590 gimple_call_set_lhs (stmt, NULL_TREE);
591 update_stmt (stmt);
594 if (TREE_CODE (*where) == SSA_NAME)
596 tree tem = make_ssa_name (TREE_TYPE (*where));
597 gassign *newop
598 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
599 build_int_cst (sizetype, increment));
600 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
601 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
602 *where = tem;
603 update_stmt (stmt);
604 return;
607 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
608 *where,
609 build_int_cst (ptr_type_node,
610 increment)));
613 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
614 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
615 the amount of data it actually writes.
617 Right now we only support trimming from the head or the tail of the
618 memory region. In theory we could split the mem* call, but it's
619 likely of marginal value. */
621 static void
622 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
624 int head_trim, tail_trim;
625 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
627 case BUILT_IN_STRNCPY:
628 case BUILT_IN_STRNCPY_CHK:
629 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
630 if (head_trim)
632 /* Head trimming of strncpy is only possible if we can
633 prove all bytes we would trim are non-zero (or we could
634 turn the strncpy into memset if there must be zero
635 among the head trimmed bytes). If we don't know anything
636 about those bytes, the presence or absence of '\0' bytes
637 in there will affect whether it acts for the non-trimmed
638 bytes as memset or memcpy/strncpy. */
639 c_strlen_data lendata = { };
640 int orig_head_trim = head_trim;
641 tree srcstr = gimple_call_arg (stmt, 1);
642 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
643 || !tree_fits_uhwi_p (lendata.minlen))
644 head_trim = 0;
645 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
647 head_trim = tree_to_uhwi (lendata.minlen);
648 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
649 head_trim &= ~(UNITS_PER_WORD - 1);
651 if (orig_head_trim != head_trim
652 && dump_file
653 && (dump_flags & TDF_DETAILS))
654 fprintf (dump_file,
655 " Adjusting strncpy trimming to (head = %d,"
656 " tail = %d)\n", head_trim, tail_trim);
658 goto do_memcpy;
660 case BUILT_IN_MEMCPY:
661 case BUILT_IN_MEMMOVE:
662 case BUILT_IN_MEMCPY_CHK:
663 case BUILT_IN_MEMMOVE_CHK:
664 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
666 do_memcpy:
667 /* Tail trimming is easy, we can just reduce the count. */
668 if (tail_trim)
669 decrement_count (stmt, tail_trim);
671 /* Head trimming requires adjusting all the arguments. */
672 if (head_trim)
674 /* For __*_chk need to adjust also the last argument. */
675 if (gimple_call_num_args (stmt) == 4)
677 tree size = gimple_call_arg (stmt, 3);
678 if (!tree_fits_uhwi_p (size))
679 break;
680 if (!integer_all_onesp (size))
682 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
683 if (sz < (unsigned) head_trim)
684 break;
685 tree arg = wide_int_to_tree (TREE_TYPE (size),
686 sz - head_trim);
687 gimple_call_set_arg (stmt, 3, arg);
690 tree *dst = gimple_call_arg_ptr (stmt, 0);
691 increment_start_addr (stmt, dst, head_trim);
692 tree *src = gimple_call_arg_ptr (stmt, 1);
693 increment_start_addr (stmt, src, head_trim);
694 decrement_count (stmt, head_trim);
696 break;
698 case BUILT_IN_MEMSET:
699 case BUILT_IN_MEMSET_CHK:
700 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
702 /* Tail trimming is easy, we can just reduce the count. */
703 if (tail_trim)
704 decrement_count (stmt, tail_trim);
706 /* Head trimming requires adjusting all the arguments. */
707 if (head_trim)
709 /* For __*_chk need to adjust also the last argument. */
710 if (gimple_call_num_args (stmt) == 4)
712 tree size = gimple_call_arg (stmt, 3);
713 if (!tree_fits_uhwi_p (size))
714 break;
715 if (!integer_all_onesp (size))
717 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
718 if (sz < (unsigned) head_trim)
719 break;
720 tree arg = wide_int_to_tree (TREE_TYPE (size),
721 sz - head_trim);
722 gimple_call_set_arg (stmt, 3, arg);
725 tree *dst = gimple_call_arg_ptr (stmt, 0);
726 increment_start_addr (stmt, dst, head_trim);
727 decrement_count (stmt, head_trim);
729 break;
731 default:
732 break;
736 /* STMT is a memory write where one or more bytes written are dead
737 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
738 bitmap of stores that are actually live.
740 Attempt to rewrite STMT so that it writes fewer memory locations. Right
741 now we only support trimming at the start or end of the memory region.
742 It's not clear how much there is to be gained by trimming from the middle
743 of the region. */
745 static void
746 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
748 if (is_gimple_assign (stmt)
749 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
751 switch (gimple_assign_rhs_code (stmt))
753 case CONSTRUCTOR:
754 maybe_trim_constructor_store (ref, live, stmt);
755 break;
756 case COMPLEX_CST:
757 maybe_trim_complex_store (ref, live, stmt);
758 break;
759 default:
760 break;
765 /* Return TRUE if USE_REF reads bytes from LIVE where live is
766 derived from REF, a write reference.
768 While this routine may modify USE_REF, it's passed by value, not
769 location. So callers do not see those modifications. */
771 static bool
772 live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
774 /* We have already verified that USE_REF and REF hit the same object.
775 Now verify that there's actually an overlap between USE_REF and REF. */
776 HOST_WIDE_INT start, size;
777 if (get_byte_range (use_ref, ref, false, &start, &size))
779 /* If USE_REF covers all of REF, then it will hit one or more
780 live bytes. This avoids useless iteration over the bitmap
781 below. */
782 if (start == 0 && known_eq (size * 8, ref->size))
783 return true;
785 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
786 return bitmap_bit_in_range_p (live, start, (start + size - 1));
788 return true;
791 /* Callback for dse_classify_store calling for_each_index. Verify that
792 indices are invariant in the loop with backedge PHI in basic-block DATA. */
794 static bool
795 check_name (tree, tree *idx, void *data)
797 basic_block phi_bb = (basic_block) data;
798 if (TREE_CODE (*idx) == SSA_NAME
799 && !SSA_NAME_IS_DEFAULT_DEF (*idx)
800 && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
801 phi_bb))
802 return false;
803 return true;
806 /* STMT stores the value 0 into one or more memory locations
807 (via memset, empty constructor, calloc call, etc).
809 See if there is a subsequent store of the value 0 to one
810 or more of the same memory location(s). If so, the subsequent
811 store is redundant and can be removed.
813 The subsequent stores could be via memset, empty constructors,
814 simple MEM stores, etc. */
816 static void
817 dse_optimize_redundant_stores (gimple *stmt)
819 int cnt = 0;
821 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
822 alias_set_type earlier_set = 0;
823 alias_set_type earlier_base_set = 0;
824 if (is_gimple_assign (stmt))
826 ao_ref lhs_ref;
827 ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
828 earlier_set = ao_ref_alias_set (&lhs_ref);
829 earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
832 /* We could do something fairly complex and look through PHIs
833 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
834 the effort.
836 Look at all the immediate uses of the VDEF (which are obviously
837 dominated by STMT). See if one or more stores 0 into the same
838 memory locations a STMT, if so remove the immediate use statements. */
839 tree defvar = gimple_vdef (stmt);
840 imm_use_iterator ui;
841 gimple *use_stmt;
842 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
844 /* Limit stmt walking. */
845 if (++cnt > param_dse_max_alias_queries_per_store)
846 break;
848 /* If USE_STMT stores 0 into one or more of the same locations
849 as STMT and STMT would kill USE_STMT, then we can just remove
850 USE_STMT. */
851 tree fndecl;
852 if ((is_gimple_assign (use_stmt)
853 && gimple_vdef (use_stmt)
854 && (gimple_assign_single_p (use_stmt)
855 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
856 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
857 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
858 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
859 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
860 && integer_zerop (gimple_call_arg (use_stmt, 1))))
862 ao_ref write;
864 if (!initialize_ao_ref_for_dse (use_stmt, &write))
865 break;
867 if (valid_ao_ref_for_dse (&write)
868 && stmt_kills_ref_p (stmt, &write))
870 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
871 if (is_gimple_assign (use_stmt))
873 ao_ref lhs_ref;
874 ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
875 if ((earlier_set == ao_ref_alias_set (&lhs_ref)
876 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
877 earlier_set))
878 && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
879 || alias_set_subset_of
880 (ao_ref_base_alias_set (&lhs_ref),
881 earlier_base_set)))
882 delete_dead_or_redundant_assignment (&gsi, "redundant",
883 need_eh_cleanup,
884 need_ab_cleanup);
886 else if (is_gimple_call (use_stmt))
888 if ((earlier_set == 0
889 || alias_set_subset_of (0, earlier_set))
890 && (earlier_base_set == 0
891 || alias_set_subset_of (0, earlier_base_set)))
892 delete_dead_or_redundant_call (&gsi, "redundant");
894 else
895 gcc_unreachable ();
901 /* A helper of dse_optimize_stmt.
902 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
903 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
904 if only clobber statements influenced the classification result.
905 Returns the classification. */
907 dse_store_status
908 dse_classify_store (ao_ref *ref, gimple *stmt,
909 bool byte_tracking_enabled, sbitmap live_bytes,
910 bool *by_clobber_p, tree stop_at_vuse)
912 gimple *temp;
913 int cnt = 0;
914 auto_bitmap visited;
916 if (by_clobber_p)
917 *by_clobber_p = true;
919 /* Find the first dominated statement that clobbers (part of) the
920 memory stmt stores to with no intermediate statement that may use
921 part of the memory stmt stores. That is, find a store that may
922 prove stmt to be a dead store. */
923 temp = stmt;
926 gimple *use_stmt;
927 imm_use_iterator ui;
928 bool fail = false;
929 tree defvar;
931 if (gimple_code (temp) == GIMPLE_PHI)
933 /* If we visit this PHI by following a backedge then we have to
934 make sure ref->ref only refers to SSA names that are invariant
935 with respect to the loop represented by this PHI node. */
936 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
937 gimple_bb (temp))
938 && !for_each_index (ref->ref ? &ref->ref : &ref->base,
939 check_name, gimple_bb (temp)))
940 return DSE_STORE_LIVE;
941 defvar = PHI_RESULT (temp);
942 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
944 else
945 defvar = gimple_vdef (temp);
947 /* If we're instructed to stop walking at region boundary, do so. */
948 if (defvar == stop_at_vuse)
949 return DSE_STORE_LIVE;
951 auto_vec<gimple *, 10> defs;
952 gimple *first_phi_def = NULL;
953 gimple *last_phi_def = NULL;
954 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
956 /* Limit stmt walking. */
957 if (++cnt > param_dse_max_alias_queries_per_store)
959 fail = true;
960 break;
963 /* In simple cases we can look through PHI nodes, but we
964 have to be careful with loops and with memory references
965 containing operands that are also operands of PHI nodes.
966 See gcc.c-torture/execute/20051110-*.c. */
967 if (gimple_code (use_stmt) == GIMPLE_PHI)
969 /* If we already visited this PHI ignore it for further
970 processing. */
971 if (!bitmap_bit_p (visited,
972 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
974 defs.safe_push (use_stmt);
975 if (!first_phi_def)
976 first_phi_def = use_stmt;
977 last_phi_def = use_stmt;
980 /* If the statement is a use the store is not dead. */
981 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
983 /* Handle common cases where we can easily build an ao_ref
984 structure for USE_STMT and in doing so we find that the
985 references hit non-live bytes and thus can be ignored.
987 TODO: We can also use modref summary to handle calls. */
988 if (byte_tracking_enabled
989 && is_gimple_assign (use_stmt))
991 ao_ref use_ref;
992 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
993 if (valid_ao_ref_for_dse (&use_ref)
994 && operand_equal_p (use_ref.base, ref->base,
995 OEP_ADDRESS_OF)
996 && !live_bytes_read (&use_ref, ref, live_bytes))
998 /* If this is a store, remember it as we possibly
999 need to walk the defs uses. */
1000 if (gimple_vdef (use_stmt))
1001 defs.safe_push (use_stmt);
1002 continue;
1006 fail = true;
1007 break;
1009 /* We have visited ourselves already so ignore STMT for the
1010 purpose of chaining. */
1011 else if (use_stmt == stmt)
1013 /* If this is a store, remember it as we possibly need to walk the
1014 defs uses. */
1015 else if (gimple_vdef (use_stmt))
1016 defs.safe_push (use_stmt);
1019 if (fail)
1021 /* STMT might be partially dead and we may be able to reduce
1022 how many memory locations it stores into. */
1023 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1024 return DSE_STORE_MAYBE_PARTIAL_DEAD;
1025 return DSE_STORE_LIVE;
1028 /* If we didn't find any definition this means the store is dead
1029 if it isn't a store to global reachable memory. In this case
1030 just pretend the stmt makes itself dead. Otherwise fail. */
1031 if (defs.is_empty ())
1033 if (ref_may_alias_global_p (ref))
1034 return DSE_STORE_LIVE;
1036 if (by_clobber_p)
1037 *by_clobber_p = false;
1038 return DSE_STORE_DEAD;
1041 /* Process defs and remove those we need not process further. */
1042 for (unsigned i = 0; i < defs.length ();)
1044 gimple *def = defs[i];
1045 gimple *use_stmt;
1046 use_operand_p use_p;
1047 tree vdef = (gimple_code (def) == GIMPLE_PHI
1048 ? gimple_phi_result (def) : gimple_vdef (def));
1049 /* If the path to check starts with a kill we do not need to
1050 process it further.
1051 ??? With byte tracking we need only kill the bytes currently
1052 live. */
1053 if (stmt_kills_ref_p (def, ref))
1055 if (by_clobber_p && !gimple_clobber_p (def))
1056 *by_clobber_p = false;
1057 defs.unordered_remove (i);
1059 /* If the path ends here we do not need to process it further.
1060 This for example happens with calls to noreturn functions. */
1061 else if (has_zero_uses (vdef))
1063 /* But if the store is to global memory it is definitely
1064 not dead. */
1065 if (ref_may_alias_global_p (ref))
1066 return DSE_STORE_LIVE;
1067 defs.unordered_remove (i);
1069 /* In addition to kills we can remove defs whose only use
1070 is another def in defs. That can only ever be PHIs of which
1071 we track two for simplicity reasons, the first and last in
1072 {first,last}_phi_def (we fail for multiple PHIs anyways).
1073 We can also ignore defs that feed only into
1074 already visited PHIs. */
1075 else if (single_imm_use (vdef, &use_p, &use_stmt)
1076 && (use_stmt == first_phi_def
1077 || use_stmt == last_phi_def
1078 || (gimple_code (use_stmt) == GIMPLE_PHI
1079 && bitmap_bit_p (visited,
1080 SSA_NAME_VERSION
1081 (PHI_RESULT (use_stmt))))))
1082 defs.unordered_remove (i);
1083 else
1084 ++i;
1087 /* If all defs kill the ref we are done. */
1088 if (defs.is_empty ())
1089 return DSE_STORE_DEAD;
1090 /* If more than one def survives fail. */
1091 if (defs.length () > 1)
1093 /* STMT might be partially dead and we may be able to reduce
1094 how many memory locations it stores into. */
1095 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1096 return DSE_STORE_MAYBE_PARTIAL_DEAD;
1097 return DSE_STORE_LIVE;
1099 temp = defs[0];
1101 /* Track partial kills. */
1102 if (byte_tracking_enabled)
1104 clear_bytes_written_by (live_bytes, temp, ref);
1105 if (bitmap_empty_p (live_bytes))
1107 if (by_clobber_p && !gimple_clobber_p (temp))
1108 *by_clobber_p = false;
1109 return DSE_STORE_DEAD;
1113 /* Continue walking until there are no more live bytes. */
1114 while (1);
1118 /* Delete a dead call at GSI, which is mem* call of some kind. */
1119 static void
1120 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
1122 gimple *stmt = gsi_stmt (*gsi);
1123 if (dump_file && (dump_flags & TDF_DETAILS))
1125 fprintf (dump_file, " Deleted %s call: ", type);
1126 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1127 fprintf (dump_file, "\n");
1130 basic_block bb = gimple_bb (stmt);
1131 tree lhs = gimple_call_lhs (stmt);
1132 if (lhs)
1134 tree ptr = gimple_call_arg (stmt, 0);
1135 gimple *new_stmt = gimple_build_assign (lhs, ptr);
1136 unlink_stmt_vdef (stmt);
1137 if (gsi_replace (gsi, new_stmt, true))
1138 bitmap_set_bit (need_eh_cleanup, bb->index);
1140 else
1142 /* Then we need to fix the operand of the consuming stmt. */
1143 unlink_stmt_vdef (stmt);
1145 /* Remove the dead store. */
1146 if (gsi_remove (gsi, true))
1147 bitmap_set_bit (need_eh_cleanup, bb->index);
1148 release_defs (stmt);
1152 /* Delete a dead store at GSI, which is a gimple assignment. */
1154 void
1155 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
1156 const char *type,
1157 bitmap need_eh_cleanup,
1158 bitmap need_ab_cleanup)
1160 gimple *stmt = gsi_stmt (*gsi);
1161 if (dump_file && (dump_flags & TDF_DETAILS))
1163 fprintf (dump_file, " Deleted %s store: ", type);
1164 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1165 fprintf (dump_file, "\n");
1168 /* Then we need to fix the operand of the consuming stmt. */
1169 unlink_stmt_vdef (stmt);
1171 /* Remove the dead store. */
1172 basic_block bb = gimple_bb (stmt);
1173 if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
1174 bitmap_set_bit (need_ab_cleanup, bb->index);
1175 if (gsi_remove (gsi, true) && need_eh_cleanup)
1176 bitmap_set_bit (need_eh_cleanup, bb->index);
1178 /* And release any SSA_NAMEs set in this statement back to the
1179 SSA_NAME manager. */
1180 release_defs (stmt);
1183 /* Try to prove, using modref summary, that all memory written to by a call is
1184 dead and remove it. Assume that if return value is written to memory
1185 it is already proved to be dead. */
1187 static bool
1188 dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1190 gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1192 if (!stmt)
1193 return false;
1195 tree callee = gimple_call_fndecl (stmt);
1197 if (!callee)
1198 return false;
1200 /* Pure/const functions are optimized by normal DCE
1201 or handled as store above. */
1202 int flags = gimple_call_flags (stmt);
1203 if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1204 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1205 return false;
1207 cgraph_node *node = cgraph_node::get (callee);
1208 if (!node)
1209 return false;
1211 if (stmt_could_throw_p (cfun, stmt)
1212 && !cfun->can_delete_dead_exceptions)
1213 return false;
1215 /* If return value is used the call is not dead. */
1216 tree lhs = gimple_call_lhs (stmt);
1217 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1219 imm_use_iterator ui;
1220 gimple *use_stmt;
1221 FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1222 if (!is_gimple_debug (use_stmt))
1223 return false;
1226 /* Verify that there are no side-effects except for return value
1227 and memory writes tracked by modref. */
1228 modref_summary *summary = get_modref_function_summary (node);
1229 if (!summary || !summary->try_dse)
1230 return false;
1232 bool by_clobber_p = false;
1234 /* Walk all memory writes and verify that they are dead. */
1235 for (auto base_node : summary->stores->bases)
1236 for (auto ref_node : base_node->refs)
1237 for (auto access_node : ref_node->accesses)
1239 tree arg = access_node.get_call_arg (stmt);
1241 if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
1242 return false;
1244 if (integer_zerop (arg)
1245 && !targetm.addr_space.zero_address_valid
1246 (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
1247 continue;
1249 ao_ref ref;
1251 if (!access_node.get_ao_ref (stmt, &ref))
1252 return false;
1253 ref.ref_alias_set = ref_node->ref;
1254 ref.base_alias_set = base_node->base;
1256 bool byte_tracking_enabled
1257 = setup_live_bytes_from_ref (&ref, live_bytes);
1258 enum dse_store_status store_status;
1260 store_status = dse_classify_store (&ref, stmt,
1261 byte_tracking_enabled,
1262 live_bytes, &by_clobber_p);
1263 if (store_status != DSE_STORE_DEAD)
1264 return false;
1266 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1267 need_ab_cleanup);
1268 return true;
1271 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1273 A dead store is a store into a memory location which will later be
1274 overwritten by another store without any intervening loads. In this
1275 case the earlier store can be deleted.
1277 In our SSA + virtual operand world we use immediate uses of virtual
1278 operands to detect dead stores. If a store's virtual definition
1279 is used precisely once by a later store to the same location which
1280 post dominates the first store, then the first store is dead. */
1282 static void
1283 dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1285 gimple *stmt = gsi_stmt (*gsi);
1287 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1288 if (gimple_has_volatile_ops (stmt)
1289 && (!gimple_clobber_p (stmt)
1290 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1291 return;
1293 ao_ref ref;
1294 /* If this is not a store we can still remove dead call using
1295 modref summary. */
1296 if (!initialize_ao_ref_for_dse (stmt, &ref))
1298 dse_optimize_call (gsi, live_bytes);
1299 return;
1302 /* We know we have virtual definitions. We can handle assignments and
1303 some builtin calls. */
1304 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1306 tree fndecl = gimple_call_fndecl (stmt);
1307 switch (DECL_FUNCTION_CODE (fndecl))
1309 case BUILT_IN_MEMCPY:
1310 case BUILT_IN_MEMMOVE:
1311 case BUILT_IN_STRNCPY:
1312 case BUILT_IN_MEMSET:
1313 case BUILT_IN_MEMCPY_CHK:
1314 case BUILT_IN_MEMMOVE_CHK:
1315 case BUILT_IN_STRNCPY_CHK:
1316 case BUILT_IN_MEMSET_CHK:
1318 /* Occasionally calls with an explicit length of zero
1319 show up in the IL. It's pointless to do analysis
1320 on them, they're trivially dead. */
1321 tree size = gimple_call_arg (stmt, 2);
1322 if (integer_zerop (size))
1324 delete_dead_or_redundant_call (gsi, "dead");
1325 return;
1328 /* If this is a memset call that initializes an object
1329 to zero, it may be redundant with an earlier memset
1330 or empty CONSTRUCTOR of a larger object. */
1331 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1332 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1333 && integer_zerop (gimple_call_arg (stmt, 1)))
1334 dse_optimize_redundant_stores (stmt);
1336 enum dse_store_status store_status;
1337 bool byte_tracking_enabled
1338 = setup_live_bytes_from_ref (&ref, live_bytes);
1339 store_status = dse_classify_store (&ref, stmt,
1340 byte_tracking_enabled,
1341 live_bytes);
1342 if (store_status == DSE_STORE_LIVE)
1343 return;
1345 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1347 maybe_trim_memstar_call (&ref, live_bytes, stmt);
1348 return;
1351 if (store_status == DSE_STORE_DEAD)
1352 delete_dead_or_redundant_call (gsi, "dead");
1353 return;
1356 case BUILT_IN_CALLOC:
1357 /* We already know the arguments are integer constants. */
1358 dse_optimize_redundant_stores (stmt);
1359 return;
1361 default:
1362 return;
1366 bool by_clobber_p = false;
1368 /* Check if this statement stores zero to a memory location,
1369 and if there is a subsequent store of zero to the same
1370 memory location. If so, remove the subsequent store. */
1371 if (gimple_assign_single_p (stmt)
1372 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1373 dse_optimize_redundant_stores (stmt);
1375 /* Self-assignments are zombies. */
1376 if (is_gimple_assign (stmt)
1377 && operand_equal_p (gimple_assign_rhs1 (stmt),
1378 gimple_assign_lhs (stmt), 0))
1380 else
1382 bool byte_tracking_enabled
1383 = setup_live_bytes_from_ref (&ref, live_bytes);
1384 enum dse_store_status store_status;
1385 store_status = dse_classify_store (&ref, stmt,
1386 byte_tracking_enabled,
1387 live_bytes, &by_clobber_p);
1388 if (store_status == DSE_STORE_LIVE)
1389 return;
1391 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1393 maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1394 return;
1398 /* Now we know that use_stmt kills the LHS of stmt. */
1400 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1401 another clobber stmt. */
1402 if (gimple_clobber_p (stmt)
1403 && !by_clobber_p)
1404 return;
1406 if (is_gimple_call (stmt)
1407 && (gimple_has_side_effects (stmt)
1408 || (stmt_could_throw_p (fun, stmt)
1409 && !fun->can_delete_dead_exceptions)))
1411 /* See if we can remove complete call. */
1412 if (dse_optimize_call (gsi, live_bytes))
1413 return;
1414 /* Make sure we do not remove a return slot we cannot reconstruct
1415 later. */
1416 if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1417 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1418 || !poly_int_tree_p
1419 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1420 return;
1421 if (dump_file && (dump_flags & TDF_DETAILS))
1423 fprintf (dump_file, " Deleted dead store in call LHS: ");
1424 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1425 fprintf (dump_file, "\n");
1427 gimple_call_set_lhs (stmt, NULL_TREE);
1428 update_stmt (stmt);
1430 else
1431 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1432 need_ab_cleanup);
1435 namespace {
1437 const pass_data pass_data_dse =
1439 GIMPLE_PASS, /* type */
1440 "dse", /* name */
1441 OPTGROUP_NONE, /* optinfo_flags */
1442 TV_TREE_DSE, /* tv_id */
1443 ( PROP_cfg | PROP_ssa ), /* properties_required */
1444 0, /* properties_provided */
1445 0, /* properties_destroyed */
1446 0, /* todo_flags_start */
1447 0, /* todo_flags_finish */
1450 class pass_dse : public gimple_opt_pass
1452 public:
1453 pass_dse (gcc::context *ctxt)
1454 : gimple_opt_pass (pass_data_dse, ctxt)
1457 /* opt_pass methods: */
1458 opt_pass * clone () { return new pass_dse (m_ctxt); }
1459 virtual bool gate (function *) { return flag_tree_dse != 0; }
1460 virtual unsigned int execute (function *);
1462 }; // class pass_dse
1464 unsigned int
1465 pass_dse::execute (function *fun)
1467 unsigned todo = 0;
1468 bool released_def = false;
1470 need_eh_cleanup = BITMAP_ALLOC (NULL);
1471 need_ab_cleanup = BITMAP_ALLOC (NULL);
1472 auto_sbitmap live_bytes (param_dse_max_object_size);
1474 renumber_gimple_stmt_uids (fun);
1476 calculate_dominance_info (CDI_DOMINATORS);
1478 /* Dead store elimination is fundamentally a reverse program order walk. */
1479 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1480 int n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
1481 for (int i = n; i != 0; --i)
1483 basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1484 gimple_stmt_iterator gsi;
1486 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1488 gimple *stmt = gsi_stmt (gsi);
1490 if (gimple_vdef (stmt))
1491 dse_optimize_stmt (fun, &gsi, live_bytes);
1492 else if (def_operand_p
1493 def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1495 /* When we remove dead stores make sure to also delete trivially
1496 dead SSA defs. */
1497 if (has_zero_uses (DEF_FROM_PTR (def_p))
1498 && !gimple_has_side_effects (stmt)
1499 && !is_ctrl_altering_stmt (stmt)
1500 && (!stmt_could_throw_p (fun, stmt)
1501 || fun->can_delete_dead_exceptions))
1503 if (dump_file && (dump_flags & TDF_DETAILS))
1505 fprintf (dump_file, " Deleted trivially dead stmt: ");
1506 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1507 fprintf (dump_file, "\n");
1509 if (gsi_remove (&gsi, true) && need_eh_cleanup)
1510 bitmap_set_bit (need_eh_cleanup, bb->index);
1511 release_defs (stmt);
1512 released_def = true;
1515 if (gsi_end_p (gsi))
1516 gsi = gsi_last_bb (bb);
1517 else
1518 gsi_prev (&gsi);
1520 bool removed_phi = false;
1521 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1523 gphi *phi = si.phi ();
1524 if (has_zero_uses (gimple_phi_result (phi)))
1526 if (dump_file && (dump_flags & TDF_DETAILS))
1528 fprintf (dump_file, " Deleted trivially dead PHI: ");
1529 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1530 fprintf (dump_file, "\n");
1532 remove_phi_node (&si, true);
1533 removed_phi = true;
1534 released_def = true;
1536 else
1537 gsi_next (&si);
1539 if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1540 todo |= TODO_cleanup_cfg;
1542 free (rpo);
1544 /* Removal of stores may make some EH edges dead. Purge such edges from
1545 the CFG as needed. */
1546 if (!bitmap_empty_p (need_eh_cleanup))
1548 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1549 todo |= TODO_cleanup_cfg;
1551 if (!bitmap_empty_p (need_ab_cleanup))
1553 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
1554 todo |= TODO_cleanup_cfg;
1557 BITMAP_FREE (need_eh_cleanup);
1558 BITMAP_FREE (need_ab_cleanup);
1560 if (released_def)
1561 free_numbers_of_iterations_estimates (fun);
1563 return todo;
1566 } // anon namespace
1568 gimple_opt_pass *
1569 make_pass_dse (gcc::context *ctxt)
1571 return new pass_dse (ctxt);