Fortran: default-initialization of derived-type function results [PR98454]
[official-gcc.git] / gcc / tree-ssa-dse.cc
blob63bf4491cf6d0521ce48c427a6985ee6923dae6c
1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #define INCLUDE_MEMORY
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "gimple.h"
28 #include "tree-pass.h"
29 #include "ssa.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "gimple-iterator.h"
33 #include "tree-cfg.h"
34 #include "tree-dfa.h"
35 #include "tree-cfgcleanup.h"
36 #include "alias.h"
37 #include "tree-ssa-loop.h"
38 #include "tree-ssa-dse.h"
39 #include "builtins.h"
40 #include "gimple-fold.h"
41 #include "gimplify.h"
42 #include "tree-eh.h"
43 #include "cfganal.h"
44 #include "cgraph.h"
45 #include "ipa-modref-tree.h"
46 #include "ipa-modref.h"
47 #include "target.h"
48 #include "tree-ssa-loop-niter.h"
49 #include "cfgloop.h"
50 #include "tree-data-ref.h"
51 #include "internal-fn.h"
52 #include "tree-ssa.h"
54 /* This file implements dead store elimination.
56 A dead store is a store into a memory location which will later be
57 overwritten by another store without any intervening loads. In this
58 case the earlier store can be deleted or trimmed if the store
59 was partially dead.
61 A redundant store is a store into a memory location which stores
62 the exact same value as a prior store to the same memory location.
63 While this can often be handled by dead store elimination, removing
64 the redundant store is often better than removing or trimming the
65 dead store.
67 In our SSA + virtual operand world we use immediate uses of virtual
68 operands to detect these cases. If a store's virtual definition
69 is used precisely once by a later store to the same location which
70 post dominates the first store, then the first store is dead. If
71 the data stored is the same, then the second store is redundant.
73 The single use of the store's virtual definition ensures that
74 there are no intervening aliased loads and the requirement that
75 the second load post dominate the first ensures that if the earlier
76 store executes, then the later stores will execute before the function
77 exits.
79 It may help to think of this as first moving the earlier store to
80 the point immediately before the later store. Again, the single
81 use of the virtual definition and the post-dominance relationship
82 ensure that such movement would be safe. Clearly if there are
83 back to back stores, then the second is makes the first dead. If
84 the second store stores the same value, then the second store is
85 redundant.
87 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
88 may also help in understanding this code since it discusses the
89 relationship between dead store and redundant load elimination. In
90 fact, they are the same transformation applied to different views of
91 the CFG. */
93 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
95 /* Bitmap of blocks that have had EH statements cleaned. We should
96 remove their dead edges eventually. */
97 static bitmap need_eh_cleanup;
98 static bitmap need_ab_cleanup;
100 /* STMT is a statement that may write into memory. Analyze it and
101 initialize WRITE to describe how STMT affects memory. When
102 MAY_DEF_OK is true then the function initializes WRITE to what
103 the stmt may define.
105 Return TRUE if the statement was analyzed, FALSE otherwise.
107 It is always safe to return FALSE. But typically better optimziation
108 can be achieved by analyzing more statements. */
110 static bool
111 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write, bool may_def_ok = false)
113 /* It's advantageous to handle certain mem* functions. */
114 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
116 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
118 case BUILT_IN_MEMCPY:
119 case BUILT_IN_MEMMOVE:
120 case BUILT_IN_MEMSET:
121 case BUILT_IN_MEMCPY_CHK:
122 case BUILT_IN_MEMMOVE_CHK:
123 case BUILT_IN_MEMSET_CHK:
124 case BUILT_IN_STRNCPY:
125 case BUILT_IN_STRNCPY_CHK:
127 tree size = gimple_call_arg (stmt, 2);
128 tree ptr = gimple_call_arg (stmt, 0);
129 ao_ref_init_from_ptr_and_size (write, ptr, size);
130 return true;
133 /* A calloc call can never be dead, but it can make
134 subsequent stores redundant if they store 0 into
135 the same memory locations. */
136 case BUILT_IN_CALLOC:
138 tree nelem = gimple_call_arg (stmt, 0);
139 tree selem = gimple_call_arg (stmt, 1);
140 tree lhs;
141 if (TREE_CODE (nelem) == INTEGER_CST
142 && TREE_CODE (selem) == INTEGER_CST
143 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
145 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
146 nelem, selem);
147 ao_ref_init_from_ptr_and_size (write, lhs, size);
148 return true;
152 default:
153 break;
156 else if (is_gimple_call (stmt)
157 && gimple_call_internal_p (stmt))
159 switch (gimple_call_internal_fn (stmt))
161 case IFN_LEN_STORE:
162 case IFN_MASK_STORE:
163 case IFN_MASK_LEN_STORE:
165 internal_fn ifn = gimple_call_internal_fn (stmt);
166 int stored_value_index = internal_fn_stored_value_index (ifn);
167 int len_index = internal_fn_len_index (ifn);
168 if (ifn == IFN_LEN_STORE)
170 tree len = gimple_call_arg (stmt, len_index);
171 tree bias = gimple_call_arg (stmt, len_index + 1);
172 if (tree_fits_uhwi_p (len))
174 ao_ref_init_from_ptr_and_size (write,
175 gimple_call_arg (stmt, 0),
176 int_const_binop (MINUS_EXPR,
177 len, bias));
178 return true;
181 /* We cannot initialize a must-def ao_ref (in all cases) but we
182 can provide a may-def variant. */
183 if (may_def_ok)
185 ao_ref_init_from_ptr_and_size (
186 write, gimple_call_arg (stmt, 0),
187 TYPE_SIZE_UNIT (
188 TREE_TYPE (gimple_call_arg (stmt, stored_value_index))));
189 return true;
191 break;
193 default:;
196 if (tree lhs = gimple_get_lhs (stmt))
198 if (TREE_CODE (lhs) != SSA_NAME
199 && (may_def_ok || !stmt_could_throw_p (cfun, stmt)))
201 ao_ref_init (write, lhs);
202 return true;
205 return false;
208 /* Given REF from the alias oracle, return TRUE if it is a valid
209 kill memory reference for dead store elimination, false otherwise.
211 In particular, the reference must have a known base, known maximum
212 size, start at a byte offset and have a size that is one or more
213 bytes. */
215 static bool
216 valid_ao_ref_kill_for_dse (ao_ref *ref)
218 return (ao_ref_base (ref)
219 && known_size_p (ref->max_size)
220 && maybe_ne (ref->size, 0)
221 && known_eq (ref->max_size, ref->size)
222 && known_ge (ref->offset, 0));
225 /* Given REF from the alias oracle, return TRUE if it is a valid
226 load or store memory reference for dead store elimination, false otherwise.
228 Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
229 is not same as size since we can handle conservatively the larger range. */
231 static bool
232 valid_ao_ref_for_dse (ao_ref *ref)
234 return (ao_ref_base (ref)
235 && known_size_p (ref->max_size)
236 && known_ge (ref->offset, 0));
239 /* Initialize OFFSET and SIZE to a range known to contain REF
240 where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
241 Return false if this is impossible. */
243 static bool
244 get_byte_aligned_range_containing_ref (ao_ref *ref, poly_int64 *offset,
245 HOST_WIDE_INT *size)
247 if (!known_size_p (ref->max_size))
248 return false;
249 *offset = aligned_lower_bound (ref->offset, BITS_PER_UNIT);
250 poly_int64 end = aligned_upper_bound (ref->offset + ref->max_size,
251 BITS_PER_UNIT);
252 return (end - *offset).is_constant (size);
255 /* Initialize OFFSET and SIZE to a range known to be contained REF
256 where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
257 Return false if this is impossible. */
259 static bool
260 get_byte_aligned_range_contained_in_ref (ao_ref *ref, poly_int64 *offset,
261 HOST_WIDE_INT *size)
263 if (!known_size_p (ref->size)
264 || !known_eq (ref->size, ref->max_size))
265 return false;
266 *offset = aligned_upper_bound (ref->offset, BITS_PER_UNIT);
267 poly_int64 end = aligned_lower_bound (ref->offset + ref->max_size,
268 BITS_PER_UNIT);
269 /* For bit accesses we can get -1 here, but also 0 sized kill is not
270 useful. */
271 if (!known_gt (end, *offset))
272 return false;
273 return (end - *offset).is_constant (size);
276 /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
277 inside REF. If KILL is true, then COPY represent a kill and the byte range
278 needs to be fully contained in bit range given by COPY. If KILL is false
279 then the byte range returned must contain the range of COPY. */
281 static bool
282 get_byte_range (ao_ref *copy, ao_ref *ref, bool kill,
283 HOST_WIDE_INT *ret_offset, HOST_WIDE_INT *ret_size)
285 HOST_WIDE_INT copy_size, ref_size;
286 poly_int64 copy_offset, ref_offset;
287 HOST_WIDE_INT diff;
289 /* First translate from bits to bytes, rounding to bigger or smaller ranges
290 as needed. Kills needs to be always rounded to smaller ranges while
291 uses and stores to larger ranges. */
292 if (kill)
294 if (!get_byte_aligned_range_contained_in_ref (copy, &copy_offset,
295 &copy_size))
296 return false;
298 else
300 if (!get_byte_aligned_range_containing_ref (copy, &copy_offset,
301 &copy_size))
302 return false;
305 if (!get_byte_aligned_range_containing_ref (ref, &ref_offset, &ref_size)
306 || !ordered_p (copy_offset, ref_offset))
307 return false;
309 /* Switch sizes from bits to bytes so we do not need to care about
310 overflows. Offset calculation needs to stay in bits until we compute
311 the difference and can switch to HOST_WIDE_INT. */
312 copy_size /= BITS_PER_UNIT;
313 ref_size /= BITS_PER_UNIT;
315 /* If COPY starts before REF, then reset the beginning of
316 COPY to match REF and decrease the size of COPY by the
317 number of bytes removed from COPY. */
318 if (maybe_lt (copy_offset, ref_offset))
320 if (!(ref_offset - copy_offset).is_constant (&diff)
321 || copy_size < diff / BITS_PER_UNIT)
322 return false;
323 copy_size -= diff / BITS_PER_UNIT;
324 copy_offset = ref_offset;
327 if (!(copy_offset - ref_offset).is_constant (&diff)
328 || ref_size <= diff / BITS_PER_UNIT)
329 return false;
331 /* If COPY extends beyond REF, chop off its size appropriately. */
332 HOST_WIDE_INT limit = ref_size - diff / BITS_PER_UNIT;
334 if (copy_size > limit)
335 copy_size = limit;
336 *ret_size = copy_size;
337 if (!(copy_offset - ref_offset).is_constant (ret_offset))
338 return false;
339 *ret_offset /= BITS_PER_UNIT;
340 return true;
343 /* Update LIVE_BYTES tracking REF for write to WRITE:
344 Verify we have the same base memory address, the write
345 has a known size and overlaps with REF. */
346 static void
347 clear_live_bytes_for_ref (sbitmap live_bytes, ao_ref *ref, ao_ref *write)
349 HOST_WIDE_INT start, size;
351 if (valid_ao_ref_kill_for_dse (write)
352 && operand_equal_p (write->base, ref->base, OEP_ADDRESS_OF)
353 && get_byte_range (write, ref, true, &start, &size))
354 bitmap_clear_range (live_bytes, start, size);
357 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
358 address written by STMT must match the one found in REF, which must
359 have its base address previously initialized.
361 This routine must be conservative. If we don't know the offset or
362 actual size written, assume nothing was written. */
364 static void
365 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
367 ao_ref write;
369 if (gcall *call = dyn_cast <gcall *> (stmt))
371 bool interposed;
372 modref_summary *summary = get_modref_function_summary (call, &interposed);
374 if (summary && !interposed)
375 for (auto kill : summary->kills)
376 if (kill.get_ao_ref (as_a <gcall *> (stmt), &write))
377 clear_live_bytes_for_ref (live_bytes, ref, &write);
379 if (!initialize_ao_ref_for_dse (stmt, &write))
380 return;
382 clear_live_bytes_for_ref (live_bytes, ref, &write);
385 /* REF is a memory write. Extract relevant information from it and
386 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
387 Otherwise return FALSE. */
389 static bool
390 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
392 HOST_WIDE_INT const_size;
393 if (valid_ao_ref_for_dse (ref)
394 && ((aligned_upper_bound (ref->offset + ref->max_size, BITS_PER_UNIT)
395 - aligned_lower_bound (ref->offset,
396 BITS_PER_UNIT)).is_constant (&const_size))
397 && (const_size / BITS_PER_UNIT <= param_dse_max_object_size)
398 && const_size > 1)
400 bitmap_clear (live_bytes);
401 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
402 return true;
404 return false;
407 /* Compute the number of stored bytes that we can trim from the head and
408 tail of REF. LIVE is the bitmap of stores to REF that are still live.
410 Store the number of bytes trimmed from the head and tail in TRIM_HEAD
411 and TRIM_TAIL respectively.
413 STMT is the statement being trimmed and is used for debugging dump
414 output only. */
416 static void
417 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
418 gimple *stmt)
420 *trim_head = 0;
421 *trim_tail = 0;
423 /* We use bitmaps biased such that ref->offset is contained in bit zero and
424 the bitmap extends through ref->max_size, so we know that in the original
425 bitmap bits 0 .. ref->max_size were true. But we need to check that this
426 covers the bytes of REF exactly. */
427 const unsigned int align = known_alignment (ref->offset);
428 if ((align > 0 && align < BITS_PER_UNIT)
429 || !known_eq (ref->size, ref->max_size))
430 return;
432 /* Now identify how much, if any of the tail we can chop off. */
433 HOST_WIDE_INT const_size;
434 int last_live = bitmap_last_set_bit (live);
435 if (ref->size.is_constant (&const_size))
437 int last_orig = (const_size / BITS_PER_UNIT) - 1;
438 /* We can leave inconvenient amounts on the tail as
439 residual handling in mem* and str* functions is usually
440 reasonably efficient. */
441 *trim_tail = last_orig - last_live;
443 /* But don't trim away out of bounds accesses, as this defeats
444 proper warnings.
446 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
447 where TYPE_SIZE_UNIT is not a constant. */
448 if (*trim_tail
449 && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
450 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
451 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
452 last_orig) <= 0)
453 *trim_tail = 0;
456 /* Identify how much, if any of the head we can chop off. */
457 int first_orig = 0;
458 int first_live = bitmap_first_set_bit (live);
459 *trim_head = first_live - first_orig;
461 /* If REF is aligned, try to maintain this alignment if it reduces
462 the number of (power-of-two sized aligned) writes to memory. */
463 unsigned int align_bits;
464 unsigned HOST_WIDE_INT bitpos;
465 if ((*trim_head || *trim_tail)
466 && last_live - first_live >= 2
467 && ao_ref_alignment (ref, &align_bits, &bitpos)
468 && align_bits >= 32
469 && bitpos == 0
470 && align_bits % BITS_PER_UNIT == 0)
472 unsigned int align_units = align_bits / BITS_PER_UNIT;
473 if (align_units > 16)
474 align_units = 16;
475 while ((first_live | (align_units - 1)) > (unsigned int)last_live)
476 align_units >>= 1;
478 if (*trim_head)
480 unsigned int pos = first_live & (align_units - 1);
481 for (unsigned int i = 1; i <= align_units; i <<= 1)
483 unsigned int mask = ~(i - 1);
484 unsigned int bytes = align_units - (pos & mask);
485 if (wi::popcount (bytes) <= 1)
487 *trim_head &= mask;
488 break;
493 if (*trim_tail)
495 unsigned int pos = last_live & (align_units - 1);
496 for (unsigned int i = 1; i <= align_units; i <<= 1)
498 int mask = i - 1;
499 unsigned int bytes = (pos | mask) + 1;
500 if ((last_live | mask) > (last_live + *trim_tail))
501 break;
502 if (wi::popcount (bytes) <= 1)
504 unsigned int extra = (last_live | mask) - last_live;
505 *trim_tail -= extra;
506 break;
512 if ((*trim_head || *trim_tail) && dump_file && (dump_flags & TDF_DETAILS))
514 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
515 *trim_head, *trim_tail);
516 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
517 fprintf (dump_file, "\n");
521 /* STMT initializes an object from COMPLEX_CST where one or more of the bytes
522 written may be dead stores. REF is a representation of the memory written.
523 LIVE is the bitmap of stores to REF that are still live.
525 Attempt to rewrite STMT so that only the real or the imaginary part of the
526 object is actually stored. */
528 static void
529 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
531 int trim_head, trim_tail;
532 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
534 /* The amount of data trimmed from the head or tail must be at
535 least half the size of the object to ensure we're trimming
536 the entire real or imaginary half. By writing things this
537 way we avoid more O(n) bitmap operations. */
538 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
540 /* TREE_REALPART is live */
541 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
542 tree y = gimple_assign_lhs (stmt);
543 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
544 gimple_assign_set_lhs (stmt, y);
545 gimple_assign_set_rhs1 (stmt, x);
547 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
549 /* TREE_IMAGPART is live */
550 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
551 tree y = gimple_assign_lhs (stmt);
552 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
553 gimple_assign_set_lhs (stmt, y);
554 gimple_assign_set_rhs1 (stmt, x);
557 /* Other cases indicate parts of both the real and imag subobjects
558 are live. We do not try to optimize those cases. */
561 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
562 bytes written are dead stores. REF is a representation of the memory
563 written. LIVE is the bitmap of stores to REF that are still live.
565 Attempt to rewrite STMT so that it writes fewer memory locations.
567 The most common case for getting here is a CONSTRUCTOR with no elements
568 being used to zero initialize an object. We do not try to handle other
569 cases as those would force us to fully cover the object with the
570 CONSTRUCTOR node except for the components that are dead. */
572 static void
573 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
575 tree ctor = gimple_assign_rhs1 (stmt);
577 /* This is the only case we currently handle. It actually seems to
578 catch most cases of actual interest. */
579 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
581 int head_trim = 0;
582 int tail_trim = 0;
583 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
585 /* Now we want to replace the constructor initializer
586 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
587 if (head_trim || tail_trim)
589 /* We want &lhs for the MEM_REF expression. */
590 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
592 if (! is_gimple_min_invariant (lhs_addr))
593 return;
595 /* The number of bytes for the new constructor. */
596 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
597 poly_int64 count = ref_bytes - head_trim - tail_trim;
599 /* And the new type for the CONSTRUCTOR. Essentially it's just
600 a char array large enough to cover the non-trimmed parts of
601 the original CONSTRUCTOR. Note we want explicit bounds here
602 so that we know how many bytes to clear when expanding the
603 CONSTRUCTOR. */
604 tree type = build_array_type_nelts (char_type_node, count);
606 /* Build a suitable alias type rather than using alias set zero
607 to avoid pessimizing. */
608 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
610 /* Build a MEM_REF representing the whole accessed area, starting
611 at the first byte not trimmed. */
612 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
613 build_int_cst (alias_type, head_trim));
615 /* Now update STMT with a new RHS and LHS. */
616 gimple_assign_set_lhs (stmt, exp);
617 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
621 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
622 copied/set by DECREMENT. */
623 static void
624 decrement_count (gimple *stmt, int decrement)
626 tree *countp = gimple_call_arg_ptr (stmt, 2);
627 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
628 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
629 - decrement));
632 static void
633 increment_start_addr (gimple *stmt, tree *where, int increment)
635 if (tree lhs = gimple_call_lhs (stmt))
636 if (where == gimple_call_arg_ptr (stmt, 0))
638 gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
639 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
640 gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
641 gimple_call_set_lhs (stmt, NULL_TREE);
642 update_stmt (stmt);
645 if (TREE_CODE (*where) == SSA_NAME)
647 tree tem = make_ssa_name (TREE_TYPE (*where));
648 gassign *newop
649 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
650 build_int_cst (sizetype, increment));
651 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
652 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
653 *where = tem;
654 update_stmt (stmt);
655 return;
658 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
659 *where,
660 build_int_cst (ptr_type_node,
661 increment)));
662 STRIP_USELESS_TYPE_CONVERSION (*where);
665 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
666 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
667 the amount of data it actually writes.
669 Right now we only support trimming from the head or the tail of the
670 memory region. In theory we could split the mem* call, but it's
671 likely of marginal value. */
673 static void
674 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
676 int head_trim, tail_trim;
677 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
679 case BUILT_IN_STRNCPY:
680 case BUILT_IN_STRNCPY_CHK:
681 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
682 if (head_trim)
684 /* Head trimming of strncpy is only possible if we can
685 prove all bytes we would trim are non-zero (or we could
686 turn the strncpy into memset if there must be zero
687 among the head trimmed bytes). If we don't know anything
688 about those bytes, the presence or absence of '\0' bytes
689 in there will affect whether it acts for the non-trimmed
690 bytes as memset or memcpy/strncpy. */
691 c_strlen_data lendata = { };
692 int orig_head_trim = head_trim;
693 tree srcstr = gimple_call_arg (stmt, 1);
694 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
695 || !tree_fits_uhwi_p (lendata.minlen))
696 head_trim = 0;
697 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
699 head_trim = tree_to_uhwi (lendata.minlen);
700 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
701 head_trim &= ~(UNITS_PER_WORD - 1);
703 if (orig_head_trim != head_trim
704 && dump_file
705 && (dump_flags & TDF_DETAILS))
706 fprintf (dump_file,
707 " Adjusting strncpy trimming to (head = %d,"
708 " tail = %d)\n", head_trim, tail_trim);
710 goto do_memcpy;
712 case BUILT_IN_MEMCPY:
713 case BUILT_IN_MEMMOVE:
714 case BUILT_IN_MEMCPY_CHK:
715 case BUILT_IN_MEMMOVE_CHK:
716 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
718 do_memcpy:
719 /* Tail trimming is easy, we can just reduce the count. */
720 if (tail_trim)
721 decrement_count (stmt, tail_trim);
723 /* Head trimming requires adjusting all the arguments. */
724 if (head_trim)
726 /* For __*_chk need to adjust also the last argument. */
727 if (gimple_call_num_args (stmt) == 4)
729 tree size = gimple_call_arg (stmt, 3);
730 if (!tree_fits_uhwi_p (size))
731 break;
732 if (!integer_all_onesp (size))
734 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
735 if (sz < (unsigned) head_trim)
736 break;
737 tree arg = wide_int_to_tree (TREE_TYPE (size),
738 sz - head_trim);
739 gimple_call_set_arg (stmt, 3, arg);
742 tree *dst = gimple_call_arg_ptr (stmt, 0);
743 increment_start_addr (stmt, dst, head_trim);
744 tree *src = gimple_call_arg_ptr (stmt, 1);
745 increment_start_addr (stmt, src, head_trim);
746 decrement_count (stmt, head_trim);
748 break;
750 case BUILT_IN_MEMSET:
751 case BUILT_IN_MEMSET_CHK:
752 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
754 /* Tail trimming is easy, we can just reduce the count. */
755 if (tail_trim)
756 decrement_count (stmt, tail_trim);
758 /* Head trimming requires adjusting all the arguments. */
759 if (head_trim)
761 /* For __*_chk need to adjust also the last argument. */
762 if (gimple_call_num_args (stmt) == 4)
764 tree size = gimple_call_arg (stmt, 3);
765 if (!tree_fits_uhwi_p (size))
766 break;
767 if (!integer_all_onesp (size))
769 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
770 if (sz < (unsigned) head_trim)
771 break;
772 tree arg = wide_int_to_tree (TREE_TYPE (size),
773 sz - head_trim);
774 gimple_call_set_arg (stmt, 3, arg);
777 tree *dst = gimple_call_arg_ptr (stmt, 0);
778 increment_start_addr (stmt, dst, head_trim);
779 decrement_count (stmt, head_trim);
781 break;
783 default:
784 break;
788 /* STMT is a memory write where one or more bytes written are dead stores.
789 REF is a representation of the memory written. LIVE is the bitmap of
790 stores to REF that are still live.
792 Attempt to rewrite STMT so that it writes fewer memory locations. Right
793 now we only support trimming at the start or end of the memory region.
794 It's not clear how much there is to be gained by trimming from the middle
795 of the region. */
797 static void
798 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
800 if (is_gimple_assign (stmt)
801 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
803 switch (gimple_assign_rhs_code (stmt))
805 case CONSTRUCTOR:
806 maybe_trim_constructor_store (ref, live, stmt);
807 break;
808 case COMPLEX_CST:
809 maybe_trim_complex_store (ref, live, stmt);
810 break;
811 default:
812 break;
817 /* Return TRUE if USE_REF reads bytes from LIVE where live is
818 derived from REF, a write reference.
820 While this routine may modify USE_REF, it's passed by value, not
821 location. So callers do not see those modifications. */
823 static bool
824 live_bytes_read (ao_ref *use_ref, ao_ref *ref, sbitmap live)
826 /* We have already verified that USE_REF and REF hit the same object.
827 Now verify that there's actually an overlap between USE_REF and REF. */
828 HOST_WIDE_INT start, size;
829 if (get_byte_range (use_ref, ref, false, &start, &size))
831 /* If USE_REF covers all of REF, then it will hit one or more
832 live bytes. This avoids useless iteration over the bitmap
833 below. */
834 if (start == 0 && known_eq (size * 8, ref->size))
835 return true;
837 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
838 return bitmap_bit_in_range_p (live, start, (start + size - 1));
840 return true;
843 /* Callback for dse_classify_store calling for_each_index. Verify that
844 indices are invariant in the loop with backedge PHI in basic-block DATA. */
846 static bool
847 check_name (tree, tree *idx, void *data)
849 basic_block phi_bb = (basic_block) data;
850 if (TREE_CODE (*idx) == SSA_NAME
851 && !SSA_NAME_IS_DEFAULT_DEF (*idx)
852 && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
853 phi_bb))
854 return false;
855 return true;
858 /* STMT stores the value 0 into one or more memory locations
859 (via memset, empty constructor, calloc call, etc).
861 See if there is a subsequent store of the value 0 to one
862 or more of the same memory location(s). If so, the subsequent
863 store is redundant and can be removed.
865 The subsequent stores could be via memset, empty constructors,
866 simple MEM stores, etc. */
868 static void
869 dse_optimize_redundant_stores (gimple *stmt)
871 int cnt = 0;
873 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
874 alias_set_type earlier_set = 0;
875 alias_set_type earlier_base_set = 0;
876 if (is_gimple_assign (stmt))
878 ao_ref lhs_ref;
879 ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
880 earlier_set = ao_ref_alias_set (&lhs_ref);
881 earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
884 /* We could do something fairly complex and look through PHIs
885 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
886 the effort.
888 Look at all the immediate uses of the VDEF (which are obviously
889 dominated by STMT). See if one or more stores 0 into the same
890 memory locations a STMT, if so remove the immediate use statements. */
891 tree defvar = gimple_vdef (stmt);
892 imm_use_iterator ui;
893 gimple *use_stmt;
894 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
896 /* Limit stmt walking. */
897 if (++cnt > param_dse_max_alias_queries_per_store)
898 break;
900 /* If USE_STMT stores 0 into one or more of the same locations
901 as STMT and STMT would kill USE_STMT, then we can just remove
902 USE_STMT. */
903 tree fndecl;
904 if ((is_gimple_assign (use_stmt)
905 && gimple_vdef (use_stmt)
906 && (gimple_assign_single_p (use_stmt)
907 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
908 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
909 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
910 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
911 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
912 && integer_zerop (gimple_call_arg (use_stmt, 1))))
914 ao_ref write;
916 if (!initialize_ao_ref_for_dse (use_stmt, &write))
917 break;
919 if (valid_ao_ref_for_dse (&write)
920 && stmt_kills_ref_p (stmt, &write))
922 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
923 if (is_gimple_assign (use_stmt))
925 ao_ref lhs_ref;
926 ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
927 if ((earlier_set == ao_ref_alias_set (&lhs_ref)
928 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
929 earlier_set))
930 && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
931 || alias_set_subset_of
932 (ao_ref_base_alias_set (&lhs_ref),
933 earlier_base_set)))
934 delete_dead_or_redundant_assignment (&gsi, "redundant",
935 need_eh_cleanup,
936 need_ab_cleanup);
938 else if (is_gimple_call (use_stmt))
940 if ((earlier_set == 0
941 || alias_set_subset_of (0, earlier_set))
942 && (earlier_base_set == 0
943 || alias_set_subset_of (0, earlier_base_set)))
944 delete_dead_or_redundant_call (&gsi, "redundant");
946 else
947 gcc_unreachable ();
953 /* Return whether PHI contains ARG as an argument. */
955 static bool
956 contains_phi_arg (gphi *phi, tree arg)
958 for (unsigned i = 0; i < gimple_phi_num_args (phi); ++i)
959 if (gimple_phi_arg_def (phi, i) == arg)
960 return true;
961 return false;
964 /* Hash map of the memory use in a GIMPLE assignment to its
965 data reference. If NULL data-ref analysis isn't used. */
966 static hash_map<gimple *, data_reference_p> *dse_stmt_to_dr_map;
968 /* A helper of dse_optimize_stmt.
969 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
970 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
971 if only clobber statements influenced the classification result.
972 Returns the classification. */
974 static dse_store_status
975 dse_classify_store (ao_ref *ref, gimple *stmt,
976 bool byte_tracking_enabled, sbitmap live_bytes,
977 bool *by_clobber_p, tree stop_at_vuse, int &cnt,
978 bitmap visited)
980 gimple *temp;
981 std::unique_ptr<data_reference, void(*)(data_reference_p)>
982 dra (nullptr, free_data_ref);
984 if (by_clobber_p)
985 *by_clobber_p = true;
987 /* Find the first dominated statement that clobbers (part of) the
988 memory stmt stores to with no intermediate statement that may use
989 part of the memory stmt stores. That is, find a store that may
990 prove stmt to be a dead store. */
991 temp = stmt;
994 gimple *use_stmt;
995 imm_use_iterator ui;
996 bool fail = false;
997 tree defvar;
999 if (gimple_code (temp) == GIMPLE_PHI)
1001 defvar = PHI_RESULT (temp);
1002 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
1004 else
1005 defvar = gimple_vdef (temp);
1007 auto_vec<gimple *, 10> defs;
1008 gphi *first_phi_def = NULL;
1009 gphi *last_phi_def = NULL;
1011 auto_vec<tree, 10> worklist;
1012 worklist.quick_push (defvar);
1016 defvar = worklist.pop ();
1017 /* If we're instructed to stop walking at region boundary, do so. */
1018 if (defvar == stop_at_vuse)
1019 return DSE_STORE_LIVE;
1021 use_operand_p usep;
1022 FOR_EACH_IMM_USE_FAST (usep, ui, defvar)
1024 use_stmt = USE_STMT (usep);
1026 /* Limit stmt walking. */
1027 if (++cnt > param_dse_max_alias_queries_per_store)
1029 fail = true;
1030 break;
1033 /* In simple cases we can look through PHI nodes, but we
1034 have to be careful with loops and with memory references
1035 containing operands that are also operands of PHI nodes.
1036 See gcc.c-torture/execute/20051110-*.c. */
1037 if (gphi *phi = dyn_cast <gphi *> (use_stmt))
1039 /* Look through single-argument PHIs. */
1040 if (gimple_phi_num_args (phi) == 1)
1041 worklist.safe_push (gimple_phi_result (phi));
1042 else
1044 /* If we visit this PHI by following a backedge then we
1045 have to make sure ref->ref only refers to SSA names
1046 that are invariant with respect to the loop
1047 represented by this PHI node. We handle irreducible
1048 regions by relying on backedge marking and identifying
1049 the head of the (sub-)region. */
1050 edge e = gimple_phi_arg_edge
1051 (phi, PHI_ARG_INDEX_FROM_USE (usep));
1052 if (e->flags & EDGE_DFS_BACK)
1054 basic_block rgn_head
1055 = nearest_common_dominator (CDI_DOMINATORS,
1056 gimple_bb (phi),
1057 e->src);
1058 if (!for_each_index (ref->ref
1059 ? &ref->ref : &ref->base,
1060 check_name, rgn_head))
1061 return DSE_STORE_LIVE;
1063 /* If we already visited this PHI ignore it for further
1064 processing. But note we have to check each incoming
1065 edge above. */
1066 if (!bitmap_bit_p (visited,
1067 SSA_NAME_VERSION (PHI_RESULT (phi))))
1069 defs.safe_push (phi);
1070 if (!first_phi_def)
1071 first_phi_def = phi;;
1072 last_phi_def = phi;
1076 /* If the statement is a use the store is not dead. */
1077 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
1079 if (dse_stmt_to_dr_map
1080 && ref->ref
1081 && is_gimple_assign (use_stmt))
1083 if (!dra)
1084 dra.reset (create_data_ref (NULL, NULL, ref->ref, stmt,
1085 false, false));
1086 bool existed_p;
1087 data_reference_p &drb
1088 = dse_stmt_to_dr_map->get_or_insert (use_stmt,
1089 &existed_p);
1090 if (!existed_p)
1091 drb = create_data_ref (NULL, NULL,
1092 gimple_assign_rhs1 (use_stmt),
1093 use_stmt, false, false);
1094 if (!dr_may_alias_p (dra.get (), drb, NULL))
1096 if (gimple_vdef (use_stmt))
1097 defs.safe_push (use_stmt);
1098 continue;
1102 /* Handle common cases where we can easily build an ao_ref
1103 structure for USE_STMT and in doing so we find that the
1104 references hit non-live bytes and thus can be ignored.
1106 TODO: We can also use modref summary to handle calls. */
1107 if (byte_tracking_enabled
1108 && is_gimple_assign (use_stmt))
1110 ao_ref use_ref;
1111 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
1112 if (valid_ao_ref_for_dse (&use_ref)
1113 && operand_equal_p (use_ref.base, ref->base,
1114 OEP_ADDRESS_OF)
1115 && !live_bytes_read (&use_ref, ref, live_bytes))
1117 /* If this is a store, remember it as we possibly
1118 need to walk the defs uses. */
1119 if (gimple_vdef (use_stmt))
1120 defs.safe_push (use_stmt);
1121 continue;
1125 fail = true;
1126 break;
1128 /* We have visited ourselves already so ignore STMT for the
1129 purpose of chaining. */
1130 else if (use_stmt == stmt)
1132 /* If this is a store, remember it as we possibly need to walk the
1133 defs uses. */
1134 else if (gimple_vdef (use_stmt))
1135 defs.safe_push (use_stmt);
1138 while (!fail && !worklist.is_empty ());
1140 if (fail)
1142 /* STMT might be partially dead and we may be able to reduce
1143 how many memory locations it stores into. */
1144 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1145 return DSE_STORE_MAYBE_PARTIAL_DEAD;
1146 return DSE_STORE_LIVE;
1149 /* If we didn't find any definition this means the store is dead
1150 if it isn't a store to global reachable memory. In this case
1151 just pretend the stmt makes itself dead. Otherwise fail. */
1152 if (defs.is_empty ())
1154 if (ref_may_alias_global_p (ref, false))
1156 basic_block def_bb = gimple_bb (SSA_NAME_DEF_STMT (defvar));
1157 /* Assume that BUILT_IN_UNREACHABLE and BUILT_IN_UNREACHABLE_TRAP
1158 do not need to keep (global) memory side-effects live.
1159 We do not have virtual operands on BUILT_IN_UNREACHABLE
1160 but we can do poor mans reachability when the last
1161 definition we want to elide is in the block that ends
1162 in such a call. */
1163 if (EDGE_COUNT (def_bb->succs) == 0)
1164 if (gcall *last = dyn_cast <gcall *> (*gsi_last_bb (def_bb)))
1165 if (gimple_call_builtin_p (last, BUILT_IN_UNREACHABLE)
1166 || gimple_call_builtin_p (last,
1167 BUILT_IN_UNREACHABLE_TRAP))
1169 if (by_clobber_p)
1170 *by_clobber_p = false;
1171 return DSE_STORE_DEAD;
1173 return DSE_STORE_LIVE;
1176 if (by_clobber_p)
1177 *by_clobber_p = false;
1178 return DSE_STORE_DEAD;
1181 /* Process defs and remove those we need not process further. */
1182 for (unsigned i = 0; i < defs.length ();)
1184 gimple *def = defs[i];
1185 gimple *use_stmt;
1186 use_operand_p use_p;
1187 tree vdef = (gimple_code (def) == GIMPLE_PHI
1188 ? gimple_phi_result (def) : gimple_vdef (def));
1189 gphi *phi_def;
1190 /* If the path to check starts with a kill we do not need to
1191 process it further.
1192 ??? With byte tracking we need only kill the bytes currently
1193 live. */
1194 if (stmt_kills_ref_p (def, ref))
1196 if (by_clobber_p && !gimple_clobber_p (def))
1197 *by_clobber_p = false;
1198 defs.unordered_remove (i);
1200 /* If the path ends here we do not need to process it further.
1201 This for example happens with calls to noreturn functions. */
1202 else if (has_zero_uses (vdef))
1204 /* But if the store is to global memory it is definitely
1205 not dead. */
1206 if (ref_may_alias_global_p (ref, false))
1207 return DSE_STORE_LIVE;
1208 defs.unordered_remove (i);
1210 /* In addition to kills we can remove defs whose only use
1211 is another def in defs. That can only ever be PHIs of which
1212 we track two for simplicity reasons, the first and last in
1213 {first,last}_phi_def (we fail for multiple PHIs anyways).
1214 We can also ignore defs that feed only into
1215 already visited PHIs. */
1216 else if (single_imm_use (vdef, &use_p, &use_stmt)
1217 && (use_stmt == first_phi_def
1218 || use_stmt == last_phi_def
1219 || (gimple_code (use_stmt) == GIMPLE_PHI
1220 && bitmap_bit_p (visited,
1221 SSA_NAME_VERSION
1222 (PHI_RESULT (use_stmt))))))
1224 defs.unordered_remove (i);
1225 if (def == first_phi_def)
1226 first_phi_def = NULL;
1227 else if (def == last_phi_def)
1228 last_phi_def = NULL;
1230 /* If def is a PHI and one of its arguments is another PHI node still
1231 in consideration we can defer processing it. */
1232 else if ((phi_def = dyn_cast <gphi *> (def))
1233 && ((last_phi_def
1234 && phi_def != last_phi_def
1235 && contains_phi_arg (phi_def,
1236 gimple_phi_result (last_phi_def)))
1237 || (first_phi_def
1238 && phi_def != first_phi_def
1239 && contains_phi_arg
1240 (phi_def, gimple_phi_result (first_phi_def)))))
1242 defs.unordered_remove (i);
1243 if (phi_def == first_phi_def)
1244 first_phi_def = NULL;
1245 else if (phi_def == last_phi_def)
1246 last_phi_def = NULL;
1248 else
1249 ++i;
1252 /* If all defs kill the ref we are done. */
1253 if (defs.is_empty ())
1254 return DSE_STORE_DEAD;
1255 /* If more than one def survives we have to analyze multiple
1256 paths. We can handle this by recursing, sharing 'visited'
1257 to avoid redundant work and limiting it by shared 'cnt'.
1258 For now do not bother with byte-tracking in this case. */
1259 while (defs.length () > 1)
1261 if (dse_classify_store (ref, defs.last (), false, NULL,
1262 by_clobber_p, stop_at_vuse, cnt,
1263 visited) != DSE_STORE_DEAD)
1264 break;
1265 byte_tracking_enabled = false;
1266 defs.pop ();
1268 /* If more than one def survives fail. */
1269 if (defs.length () > 1)
1271 /* STMT might be partially dead and we may be able to reduce
1272 how many memory locations it stores into. */
1273 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
1274 return DSE_STORE_MAYBE_PARTIAL_DEAD;
1275 return DSE_STORE_LIVE;
1277 temp = defs[0];
1279 /* Track partial kills. */
1280 if (byte_tracking_enabled)
1282 clear_bytes_written_by (live_bytes, temp, ref);
1283 if (bitmap_empty_p (live_bytes))
1285 if (by_clobber_p && !gimple_clobber_p (temp))
1286 *by_clobber_p = false;
1287 return DSE_STORE_DEAD;
1291 /* Continue walking until there are no more live bytes. */
1292 while (1);
1295 dse_store_status
1296 dse_classify_store (ao_ref *ref, gimple *stmt,
1297 bool byte_tracking_enabled, sbitmap live_bytes,
1298 bool *by_clobber_p, tree stop_at_vuse)
1300 int cnt = 0;
1301 auto_bitmap visited;
1302 return dse_classify_store (ref, stmt, byte_tracking_enabled, live_bytes,
1303 by_clobber_p, stop_at_vuse, cnt, visited);
1307 /* Delete a dead call at GSI, which is mem* call of some kind. */
1308 static void
1309 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
1311 gimple *stmt = gsi_stmt (*gsi);
1312 if (dump_file && (dump_flags & TDF_DETAILS))
1314 fprintf (dump_file, " Deleted %s call: ", type);
1315 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1316 fprintf (dump_file, "\n");
1319 basic_block bb = gimple_bb (stmt);
1320 tree lhs = gimple_call_lhs (stmt);
1321 if (lhs)
1323 tree ptr = gimple_call_arg (stmt, 0);
1324 gimple *new_stmt = gimple_build_assign (lhs, ptr);
1325 unlink_stmt_vdef (stmt);
1326 if (gsi_replace (gsi, new_stmt, true))
1327 bitmap_set_bit (need_eh_cleanup, bb->index);
1329 else
1331 /* Then we need to fix the operand of the consuming stmt. */
1332 unlink_stmt_vdef (stmt);
1334 /* Remove the dead store. */
1335 if (gsi_remove (gsi, true))
1336 bitmap_set_bit (need_eh_cleanup, bb->index);
1337 release_defs (stmt);
1341 /* Delete a dead store at GSI, which is a gimple assignment. */
1343 void
1344 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi,
1345 const char *type,
1346 bitmap need_eh_cleanup,
1347 bitmap need_ab_cleanup)
1349 gimple *stmt = gsi_stmt (*gsi);
1350 if (dump_file && (dump_flags & TDF_DETAILS))
1352 fprintf (dump_file, " Deleted %s store: ", type);
1353 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1354 fprintf (dump_file, "\n");
1357 /* Then we need to fix the operand of the consuming stmt. */
1358 unlink_stmt_vdef (stmt);
1360 /* Remove the dead store. */
1361 basic_block bb = gimple_bb (stmt);
1362 if (need_ab_cleanup && stmt_can_make_abnormal_goto (stmt))
1363 bitmap_set_bit (need_ab_cleanup, bb->index);
1364 if (gsi_remove (gsi, true) && need_eh_cleanup)
1365 bitmap_set_bit (need_eh_cleanup, bb->index);
1367 /* And release any SSA_NAMEs set in this statement back to the
1368 SSA_NAME manager. */
1369 release_defs (stmt);
1372 /* Try to prove, using modref summary, that all memory written to by a call is
1373 dead and remove it. Assume that if return value is written to memory
1374 it is already proved to be dead. */
1376 static bool
1377 dse_optimize_call (gimple_stmt_iterator *gsi, sbitmap live_bytes)
1379 gcall *stmt = dyn_cast <gcall *> (gsi_stmt (*gsi));
1381 if (!stmt)
1382 return false;
1384 tree callee = gimple_call_fndecl (stmt);
1386 if (!callee)
1387 return false;
1389 /* Pure/const functions are optimized by normal DCE
1390 or handled as store above. */
1391 int flags = gimple_call_flags (stmt);
1392 if ((flags & (ECF_PURE|ECF_CONST|ECF_NOVOPS))
1393 && !(flags & (ECF_LOOPING_CONST_OR_PURE)))
1394 return false;
1396 cgraph_node *node = cgraph_node::get (callee);
1397 if (!node)
1398 return false;
1400 if (stmt_could_throw_p (cfun, stmt)
1401 && !cfun->can_delete_dead_exceptions)
1402 return false;
1404 /* If return value is used the call is not dead. */
1405 tree lhs = gimple_call_lhs (stmt);
1406 if (lhs && TREE_CODE (lhs) == SSA_NAME)
1408 imm_use_iterator ui;
1409 gimple *use_stmt;
1410 FOR_EACH_IMM_USE_STMT (use_stmt, ui, lhs)
1411 if (!is_gimple_debug (use_stmt))
1412 return false;
1415 /* Verify that there are no side-effects except for return value
1416 and memory writes tracked by modref. */
1417 modref_summary *summary = get_modref_function_summary (node);
1418 if (!summary || !summary->try_dse)
1419 return false;
1421 bool by_clobber_p = false;
1423 /* Walk all memory writes and verify that they are dead. */
1424 for (auto base_node : summary->stores->bases)
1425 for (auto ref_node : base_node->refs)
1426 for (auto access_node : ref_node->accesses)
1428 tree arg = access_node.get_call_arg (stmt);
1430 if (!arg || !POINTER_TYPE_P (TREE_TYPE (arg)))
1431 return false;
1433 if (integer_zerop (arg)
1434 && !targetm.addr_space.zero_address_valid
1435 (TYPE_ADDR_SPACE (TREE_TYPE (arg))))
1436 continue;
1438 ao_ref ref;
1440 if (!access_node.get_ao_ref (stmt, &ref))
1441 return false;
1442 ref.ref_alias_set = ref_node->ref;
1443 ref.base_alias_set = base_node->base;
1445 bool byte_tracking_enabled
1446 = setup_live_bytes_from_ref (&ref, live_bytes);
1447 enum dse_store_status store_status;
1449 store_status = dse_classify_store (&ref, stmt,
1450 byte_tracking_enabled,
1451 live_bytes, &by_clobber_p);
1452 if (store_status != DSE_STORE_DEAD)
1453 return false;
1455 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1456 need_ab_cleanup);
1457 return true;
1460 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1462 A dead store is a store into a memory location which will later be
1463 overwritten by another store without any intervening loads. In this
1464 case the earlier store can be deleted.
1466 In our SSA + virtual operand world we use immediate uses of virtual
1467 operands to detect dead stores. If a store's virtual definition
1468 is used precisely once by a later store to the same location which
1469 post dominates the first store, then the first store is dead. */
1471 static void
1472 dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1474 gimple *stmt = gsi_stmt (*gsi);
1476 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1477 if (gimple_has_volatile_ops (stmt)
1478 && (!gimple_clobber_p (stmt)
1479 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1480 return;
1482 ao_ref ref;
1483 /* If this is not a store we can still remove dead call using
1484 modref summary. Note we specifically allow ref to be initialized
1485 to a conservative may-def since we are looking for followup stores
1486 to kill all of it. */
1487 if (!initialize_ao_ref_for_dse (stmt, &ref, true))
1489 dse_optimize_call (gsi, live_bytes);
1490 return;
1493 /* We know we have virtual definitions. We can handle assignments and
1494 some builtin calls. */
1495 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1497 tree fndecl = gimple_call_fndecl (stmt);
1498 switch (DECL_FUNCTION_CODE (fndecl))
1500 case BUILT_IN_MEMCPY:
1501 case BUILT_IN_MEMMOVE:
1502 case BUILT_IN_STRNCPY:
1503 case BUILT_IN_MEMSET:
1504 case BUILT_IN_MEMCPY_CHK:
1505 case BUILT_IN_MEMMOVE_CHK:
1506 case BUILT_IN_STRNCPY_CHK:
1507 case BUILT_IN_MEMSET_CHK:
1509 /* Occasionally calls with an explicit length of zero
1510 show up in the IL. It's pointless to do analysis
1511 on them, they're trivially dead. */
1512 tree size = gimple_call_arg (stmt, 2);
1513 if (integer_zerop (size))
1515 delete_dead_or_redundant_call (gsi, "dead");
1516 return;
1519 /* If this is a memset call that initializes an object
1520 to zero, it may be redundant with an earlier memset
1521 or empty CONSTRUCTOR of a larger object. */
1522 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1523 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1524 && integer_zerop (gimple_call_arg (stmt, 1)))
1525 dse_optimize_redundant_stores (stmt);
1527 enum dse_store_status store_status;
1528 bool byte_tracking_enabled
1529 = setup_live_bytes_from_ref (&ref, live_bytes);
1530 store_status = dse_classify_store (&ref, stmt,
1531 byte_tracking_enabled,
1532 live_bytes);
1533 if (store_status == DSE_STORE_LIVE)
1534 return;
1536 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1538 maybe_trim_memstar_call (&ref, live_bytes, stmt);
1539 return;
1542 if (store_status == DSE_STORE_DEAD)
1543 delete_dead_or_redundant_call (gsi, "dead");
1544 return;
1547 case BUILT_IN_CALLOC:
1548 /* We already know the arguments are integer constants. */
1549 dse_optimize_redundant_stores (stmt);
1550 return;
1552 default:
1553 return;
1556 else if (is_gimple_call (stmt)
1557 && gimple_call_internal_p (stmt))
1559 switch (gimple_call_internal_fn (stmt))
1561 case IFN_LEN_STORE:
1562 case IFN_MASK_STORE:
1563 case IFN_MASK_LEN_STORE:
1565 enum dse_store_status store_status;
1566 store_status = dse_classify_store (&ref, stmt, false, live_bytes);
1567 if (store_status == DSE_STORE_DEAD)
1568 delete_dead_or_redundant_call (gsi, "dead");
1569 return;
1571 default:;
1575 bool by_clobber_p = false;
1577 /* Check if this statement stores zero to a memory location,
1578 and if there is a subsequent store of zero to the same
1579 memory location. If so, remove the subsequent store. */
1580 if (gimple_assign_single_p (stmt)
1581 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1582 dse_optimize_redundant_stores (stmt);
1584 /* Self-assignments are zombies. */
1585 if (is_gimple_assign (stmt)
1586 && operand_equal_p (gimple_assign_rhs1 (stmt),
1587 gimple_assign_lhs (stmt), 0))
1589 else
1591 bool byte_tracking_enabled
1592 = setup_live_bytes_from_ref (&ref, live_bytes);
1593 enum dse_store_status store_status;
1594 store_status = dse_classify_store (&ref, stmt,
1595 byte_tracking_enabled,
1596 live_bytes, &by_clobber_p);
1597 if (store_status == DSE_STORE_LIVE)
1598 return;
1600 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1602 maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1603 return;
1607 /* Now we know that use_stmt kills the LHS of stmt. */
1609 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1610 another clobber stmt. */
1611 if (gimple_clobber_p (stmt)
1612 && !by_clobber_p)
1613 return;
1615 if (is_gimple_call (stmt)
1616 && (gimple_has_side_effects (stmt)
1617 || (stmt_could_throw_p (fun, stmt)
1618 && !fun->can_delete_dead_exceptions)))
1620 /* See if we can remove complete call. */
1621 if (dse_optimize_call (gsi, live_bytes))
1622 return;
1623 /* Make sure we do not remove a return slot we cannot reconstruct
1624 later. */
1625 if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1626 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1627 || !poly_int_tree_p
1628 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1629 return;
1630 if (dump_file && (dump_flags & TDF_DETAILS))
1632 fprintf (dump_file, " Deleted dead store in call LHS: ");
1633 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1634 fprintf (dump_file, "\n");
1636 gimple_call_set_lhs (stmt, NULL_TREE);
1637 update_stmt (stmt);
1639 else if (!stmt_could_throw_p (fun, stmt)
1640 || fun->can_delete_dead_exceptions)
1641 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup,
1642 need_ab_cleanup);
1645 namespace {
1647 const pass_data pass_data_dse =
1649 GIMPLE_PASS, /* type */
1650 "dse", /* name */
1651 OPTGROUP_NONE, /* optinfo_flags */
1652 TV_TREE_DSE, /* tv_id */
1653 ( PROP_cfg | PROP_ssa ), /* properties_required */
1654 0, /* properties_provided */
1655 0, /* properties_destroyed */
1656 0, /* todo_flags_start */
1657 0, /* todo_flags_finish */
1660 class pass_dse : public gimple_opt_pass
1662 public:
1663 pass_dse (gcc::context *ctxt)
1664 : gimple_opt_pass (pass_data_dse, ctxt), use_dr_analysis_p (false)
1667 /* opt_pass methods: */
1668 opt_pass * clone () final override { return new pass_dse (m_ctxt); }
1669 void set_pass_param (unsigned n, bool param) final override
1671 gcc_assert (n == 0);
1672 use_dr_analysis_p = param;
1674 bool gate (function *) final override { return flag_tree_dse != 0; }
1675 unsigned int execute (function *) final override;
1677 private:
1678 bool use_dr_analysis_p;
1679 }; // class pass_dse
1681 unsigned int
1682 pass_dse::execute (function *fun)
1684 unsigned todo = 0;
1685 bool released_def = false;
1687 need_eh_cleanup = BITMAP_ALLOC (NULL);
1688 need_ab_cleanup = BITMAP_ALLOC (NULL);
1689 auto_sbitmap live_bytes (param_dse_max_object_size);
1690 if (flag_expensive_optimizations && use_dr_analysis_p)
1691 dse_stmt_to_dr_map = new hash_map<gimple *, data_reference_p>;
1693 renumber_gimple_stmt_uids (fun);
1695 calculate_dominance_info (CDI_DOMINATORS);
1697 /* Dead store elimination is fundamentally a reverse program order walk. */
1698 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1699 auto_bitmap exit_bbs;
1700 bitmap_set_bit (exit_bbs, EXIT_BLOCK);
1701 edge entry = single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun));
1702 int n = rev_post_order_and_mark_dfs_back_seme (fun, entry,
1703 exit_bbs, false, rpo, NULL);
1704 for (int i = n; i != 0; --i)
1706 basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1707 gimple_stmt_iterator gsi;
1709 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1711 gimple *stmt = gsi_stmt (gsi);
1713 if (gimple_vdef (stmt))
1714 dse_optimize_stmt (fun, &gsi, live_bytes);
1715 else if (def_operand_p
1716 def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1718 /* When we remove dead stores make sure to also delete trivially
1719 dead SSA defs. */
1720 if (has_zero_uses (DEF_FROM_PTR (def_p))
1721 && !gimple_has_side_effects (stmt)
1722 && !is_ctrl_altering_stmt (stmt)
1723 && (!stmt_could_throw_p (fun, stmt)
1724 || fun->can_delete_dead_exceptions))
1726 if (dump_file && (dump_flags & TDF_DETAILS))
1728 fprintf (dump_file, " Deleted trivially dead stmt: ");
1729 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1730 fprintf (dump_file, "\n");
1732 if (gsi_remove (&gsi, true) && need_eh_cleanup)
1733 bitmap_set_bit (need_eh_cleanup, bb->index);
1734 release_defs (stmt);
1735 released_def = true;
1738 if (gsi_end_p (gsi))
1739 gsi = gsi_last_bb (bb);
1740 else
1741 gsi_prev (&gsi);
1743 bool removed_phi = false;
1744 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1746 gphi *phi = si.phi ();
1747 if (has_zero_uses (gimple_phi_result (phi)))
1749 if (dump_file && (dump_flags & TDF_DETAILS))
1751 fprintf (dump_file, " Deleted trivially dead PHI: ");
1752 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1753 fprintf (dump_file, "\n");
1755 remove_phi_node (&si, true);
1756 removed_phi = true;
1757 released_def = true;
1759 else
1760 gsi_next (&si);
1762 if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1763 todo |= TODO_cleanup_cfg;
1765 free (rpo);
1767 /* Removal of stores may make some EH edges dead. Purge such edges from
1768 the CFG as needed. */
1769 if (!bitmap_empty_p (need_eh_cleanup))
1771 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1772 todo |= TODO_cleanup_cfg;
1774 if (!bitmap_empty_p (need_ab_cleanup))
1776 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup);
1777 todo |= TODO_cleanup_cfg;
1780 BITMAP_FREE (need_eh_cleanup);
1781 BITMAP_FREE (need_ab_cleanup);
1783 if (released_def)
1784 free_numbers_of_iterations_estimates (fun);
1786 if (flag_expensive_optimizations && use_dr_analysis_p)
1788 for (auto i = dse_stmt_to_dr_map->begin ();
1789 i != dse_stmt_to_dr_map->end (); ++i)
1790 free_data_ref ((*i).second);
1791 delete dse_stmt_to_dr_map;
1792 dse_stmt_to_dr_map = NULL;
1795 return todo;
1798 } // anon namespace
1800 gimple_opt_pass *
1801 make_pass_dse (gcc::context *ctxt)
1803 return new pass_dse (ctxt);