1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #define INCLUDE_MEMORY
23 #include "coretypes.h"
28 #include "tree-pass.h"
30 #include "gimple-pretty-print.h"
31 #include "fold-const.h"
32 #include "gimple-iterator.h"
35 #include "tree-cfgcleanup.h"
37 #include "tree-ssa-loop.h"
38 #include "tree-ssa-dse.h"
40 #include "gimple-fold.h"
45 #include "ipa-modref-tree.h"
46 #include "ipa-modref.h"
48 #include "tree-ssa-loop-niter.h"
50 #include "tree-data-ref.h"
52 /* This file implements dead store elimination.
54 A dead store is a store into a memory location which will later be
55 overwritten by another store without any intervening loads. In this
56 case the earlier store can be deleted or trimmed if the store
59 A redundant store is a store into a memory location which stores
60 the exact same value as a prior store to the same memory location.
61 While this can often be handled by dead store elimination, removing
62 the redundant store is often better than removing or trimming the
65 In our SSA + virtual operand world we use immediate uses of virtual
66 operands to detect these cases. If a store's virtual definition
67 is used precisely once by a later store to the same location which
68 post dominates the first store, then the first store is dead. If
69 the data stored is the same, then the second store is redundant.
71 The single use of the store's virtual definition ensures that
72 there are no intervening aliased loads and the requirement that
73 the second load post dominate the first ensures that if the earlier
74 store executes, then the later stores will execute before the function
77 It may help to think of this as first moving the earlier store to
78 the point immediately before the later store. Again, the single
79 use of the virtual definition and the post-dominance relationship
80 ensure that such movement would be safe. Clearly if there are
81 back to back stores, then the second is makes the first dead. If
82 the second store stores the same value, then the second store is
85 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
86 may also help in understanding this code since it discusses the
87 relationship between dead store and redundant load elimination. In
88 fact, they are the same transformation applied to different views of
91 static void delete_dead_or_redundant_call (gimple_stmt_iterator
*, const char *);
93 /* Bitmap of blocks that have had EH statements cleaned. We should
94 remove their dead edges eventually. */
95 static bitmap need_eh_cleanup
;
96 static bitmap need_ab_cleanup
;
98 /* STMT is a statement that may write into memory. Analyze it and
99 initialize WRITE to describe how STMT affects memory. When
100 MAY_DEF_OK is true then the function initializes WRITE to what
103 Return TRUE if the statement was analyzed, FALSE otherwise.
105 It is always safe to return FALSE. But typically better optimziation
106 can be achieved by analyzing more statements. */
109 initialize_ao_ref_for_dse (gimple
*stmt
, ao_ref
*write
, bool may_def_ok
= false)
111 /* It's advantageous to handle certain mem* functions. */
112 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
114 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
)))
116 case BUILT_IN_MEMCPY
:
117 case BUILT_IN_MEMMOVE
:
118 case BUILT_IN_MEMSET
:
119 case BUILT_IN_MEMCPY_CHK
:
120 case BUILT_IN_MEMMOVE_CHK
:
121 case BUILT_IN_MEMSET_CHK
:
122 case BUILT_IN_STRNCPY
:
123 case BUILT_IN_STRNCPY_CHK
:
125 tree size
= gimple_call_arg (stmt
, 2);
126 tree ptr
= gimple_call_arg (stmt
, 0);
127 ao_ref_init_from_ptr_and_size (write
, ptr
, size
);
131 /* A calloc call can never be dead, but it can make
132 subsequent stores redundant if they store 0 into
133 the same memory locations. */
134 case BUILT_IN_CALLOC
:
136 tree nelem
= gimple_call_arg (stmt
, 0);
137 tree selem
= gimple_call_arg (stmt
, 1);
139 if (TREE_CODE (nelem
) == INTEGER_CST
140 && TREE_CODE (selem
) == INTEGER_CST
141 && (lhs
= gimple_call_lhs (stmt
)) != NULL_TREE
)
143 tree size
= fold_build2 (MULT_EXPR
, TREE_TYPE (nelem
),
145 ao_ref_init_from_ptr_and_size (write
, lhs
, size
);
154 else if (is_gimple_call (stmt
)
155 && gimple_call_internal_p (stmt
))
157 switch (gimple_call_internal_fn (stmt
))
160 ao_ref_init_from_ptr_and_size
161 (write
, gimple_call_arg (stmt
, 0),
162 int_const_binop (MINUS_EXPR
,
163 gimple_call_arg (stmt
, 2),
164 gimple_call_arg (stmt
, 4)));
167 /* We cannot initialize a must-def ao_ref (in all cases) but we
168 can provide a may-def variant. */
171 ao_ref_init_from_ptr_and_size
172 (write
, gimple_call_arg (stmt
, 0),
173 TYPE_SIZE_UNIT (TREE_TYPE (gimple_call_arg (stmt
, 3))));
180 if (tree lhs
= gimple_get_lhs (stmt
))
182 if (TREE_CODE (lhs
) != SSA_NAME
183 && (may_def_ok
|| !stmt_could_throw_p (cfun
, stmt
)))
185 ao_ref_init (write
, lhs
);
192 /* Given REF from the alias oracle, return TRUE if it is a valid
193 kill memory reference for dead store elimination, false otherwise.
195 In particular, the reference must have a known base, known maximum
196 size, start at a byte offset and have a size that is one or more
200 valid_ao_ref_kill_for_dse (ao_ref
*ref
)
202 return (ao_ref_base (ref
)
203 && known_size_p (ref
->max_size
)
204 && maybe_ne (ref
->size
, 0)
205 && known_eq (ref
->max_size
, ref
->size
)
206 && known_ge (ref
->offset
, 0));
209 /* Given REF from the alias oracle, return TRUE if it is a valid
210 load or store memory reference for dead store elimination, false otherwise.
212 Unlike for valid_ao_ref_kill_for_dse we can accept writes where max_size
213 is not same as size since we can handle conservatively the larger range. */
216 valid_ao_ref_for_dse (ao_ref
*ref
)
218 return (ao_ref_base (ref
)
219 && known_size_p (ref
->max_size
)
220 && known_ge (ref
->offset
, 0));
223 /* Initialize OFFSET and SIZE to a range known to contain REF
224 where the boundaries are divisible by BITS_PER_UNIT (bit still in bits).
225 Return false if this is impossible. */
228 get_byte_aligned_range_containing_ref (ao_ref
*ref
, poly_int64
*offset
,
231 if (!known_size_p (ref
->max_size
))
233 *offset
= aligned_lower_bound (ref
->offset
, BITS_PER_UNIT
);
234 poly_int64 end
= aligned_upper_bound (ref
->offset
+ ref
->max_size
,
236 return (end
- *offset
).is_constant (size
);
239 /* Initialize OFFSET and SIZE to a range known to be contained REF
240 where the boundaries are divisible by BITS_PER_UNIT (but still in bits).
241 Return false if this is impossible. */
244 get_byte_aligned_range_contained_in_ref (ao_ref
*ref
, poly_int64
*offset
,
247 if (!known_size_p (ref
->size
)
248 || !known_eq (ref
->size
, ref
->max_size
))
250 *offset
= aligned_upper_bound (ref
->offset
, BITS_PER_UNIT
);
251 poly_int64 end
= aligned_lower_bound (ref
->offset
+ ref
->max_size
,
253 /* For bit accesses we can get -1 here, but also 0 sized kill is not
255 if (!known_gt (end
, *offset
))
257 return (end
- *offset
).is_constant (size
);
260 /* Compute byte range (returned iN REF_OFFSET and RET_SIZE) for access COPY
261 inside REF. If KILL is true, then COPY represent a kill and the byte range
262 needs to be fully contained in bit range given by COPY. If KILL is false
263 then the byte range returned must contain the range of COPY. */
266 get_byte_range (ao_ref
*copy
, ao_ref
*ref
, bool kill
,
267 HOST_WIDE_INT
*ret_offset
, HOST_WIDE_INT
*ret_size
)
269 HOST_WIDE_INT copy_size
, ref_size
;
270 poly_int64 copy_offset
, ref_offset
;
273 /* First translate from bits to bytes, rounding to bigger or smaller ranges
274 as needed. Kills needs to be always rounded to smaller ranges while
275 uses and stores to larger ranges. */
278 if (!get_byte_aligned_range_contained_in_ref (copy
, ©_offset
,
284 if (!get_byte_aligned_range_containing_ref (copy
, ©_offset
,
289 if (!get_byte_aligned_range_containing_ref (ref
, &ref_offset
, &ref_size
)
290 || !ordered_p (copy_offset
, ref_offset
))
293 /* Switch sizes from bits to bytes so we do not need to care about
294 overflows. Offset calculation needs to stay in bits until we compute
295 the difference and can switch to HOST_WIDE_INT. */
296 copy_size
/= BITS_PER_UNIT
;
297 ref_size
/= BITS_PER_UNIT
;
299 /* If COPY starts before REF, then reset the beginning of
300 COPY to match REF and decrease the size of COPY by the
301 number of bytes removed from COPY. */
302 if (maybe_lt (copy_offset
, ref_offset
))
304 if (!(ref_offset
- copy_offset
).is_constant (&diff
)
305 || copy_size
< diff
/ BITS_PER_UNIT
)
307 copy_size
-= diff
/ BITS_PER_UNIT
;
308 copy_offset
= ref_offset
;
311 if (!(copy_offset
- ref_offset
).is_constant (&diff
)
312 || ref_size
<= diff
/ BITS_PER_UNIT
)
315 /* If COPY extends beyond REF, chop off its size appropriately. */
316 HOST_WIDE_INT limit
= ref_size
- diff
/ BITS_PER_UNIT
;
318 if (copy_size
> limit
)
320 *ret_size
= copy_size
;
321 if (!(copy_offset
- ref_offset
).is_constant (ret_offset
))
323 *ret_offset
/= BITS_PER_UNIT
;
327 /* Update LIVE_BYTES tracking REF for write to WRITE:
328 Verify we have the same base memory address, the write
329 has a known size and overlaps with REF. */
331 clear_live_bytes_for_ref (sbitmap live_bytes
, ao_ref
*ref
, ao_ref
*write
)
333 HOST_WIDE_INT start
, size
;
335 if (valid_ao_ref_kill_for_dse (write
)
336 && operand_equal_p (write
->base
, ref
->base
, OEP_ADDRESS_OF
)
337 && get_byte_range (write
, ref
, true, &start
, &size
))
338 bitmap_clear_range (live_bytes
, start
, size
);
341 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
342 address written by STMT must match the one found in REF, which must
343 have its base address previously initialized.
345 This routine must be conservative. If we don't know the offset or
346 actual size written, assume nothing was written. */
349 clear_bytes_written_by (sbitmap live_bytes
, gimple
*stmt
, ao_ref
*ref
)
353 if (gcall
*call
= dyn_cast
<gcall
*> (stmt
))
356 modref_summary
*summary
= get_modref_function_summary (call
, &interposed
);
358 if (summary
&& !interposed
)
359 for (auto kill
: summary
->kills
)
360 if (kill
.get_ao_ref (as_a
<gcall
*> (stmt
), &write
))
361 clear_live_bytes_for_ref (live_bytes
, ref
, &write
);
363 if (!initialize_ao_ref_for_dse (stmt
, &write
))
366 clear_live_bytes_for_ref (live_bytes
, ref
, &write
);
369 /* REF is a memory write. Extract relevant information from it and
370 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
371 Otherwise return FALSE. */
374 setup_live_bytes_from_ref (ao_ref
*ref
, sbitmap live_bytes
)
376 HOST_WIDE_INT const_size
;
377 if (valid_ao_ref_for_dse (ref
)
378 && ((aligned_upper_bound (ref
->offset
+ ref
->max_size
, BITS_PER_UNIT
)
379 - aligned_lower_bound (ref
->offset
,
380 BITS_PER_UNIT
)).is_constant (&const_size
))
381 && (const_size
/ BITS_PER_UNIT
<= param_dse_max_object_size
)
384 bitmap_clear (live_bytes
);
385 bitmap_set_range (live_bytes
, 0, const_size
/ BITS_PER_UNIT
);
391 /* Compute the number of elements that we can trim from the head and
392 tail of ORIG resulting in a bitmap that is a superset of LIVE.
394 Store the number of elements trimmed from the head and tail in
395 TRIM_HEAD and TRIM_TAIL.
397 STMT is the statement being trimmed and is used for debugging dump
401 compute_trims (ao_ref
*ref
, sbitmap live
, int *trim_head
, int *trim_tail
,
404 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
405 extends through ref->size. So we know that in the original bitmap
406 bits 0..ref->size were true. We don't actually need the bitmap, just
407 the REF to compute the trims. */
409 /* Now identify how much, if any of the tail we can chop off. */
410 HOST_WIDE_INT const_size
;
411 int last_live
= bitmap_last_set_bit (live
);
412 if (ref
->size
.is_constant (&const_size
))
414 int last_orig
= (const_size
/ BITS_PER_UNIT
) - 1;
415 /* We can leave inconvenient amounts on the tail as
416 residual handling in mem* and str* functions is usually
417 reasonably efficient. */
418 *trim_tail
= last_orig
- last_live
;
420 /* But don't trim away out of bounds accesses, as this defeats
423 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
424 where TYPE_SIZE_UNIT is not a constant. */
426 && TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
))
427 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
))) == INTEGER_CST
428 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref
->base
)),
435 /* Identify how much, if any of the head we can chop off. */
437 int first_live
= bitmap_first_set_bit (live
);
438 *trim_head
= first_live
- first_orig
;
440 /* If REF is aligned, try to maintain this alignment if it reduces
441 the number of (power-of-two sized aligned) writes to memory. */
442 unsigned int align_bits
;
443 unsigned HOST_WIDE_INT bitpos
;
444 if ((*trim_head
|| *trim_tail
)
445 && last_live
- first_live
>= 2
446 && ao_ref_alignment (ref
, &align_bits
, &bitpos
)
449 && align_bits
% BITS_PER_UNIT
== 0)
451 unsigned int align_units
= align_bits
/ BITS_PER_UNIT
;
452 if (align_units
> 16)
454 while ((first_live
| (align_units
- 1)) > (unsigned int)last_live
)
459 unsigned int pos
= first_live
& (align_units
- 1);
460 for (unsigned int i
= 1; i
<= align_units
; i
<<= 1)
462 unsigned int mask
= ~(i
- 1);
463 unsigned int bytes
= align_units
- (pos
& mask
);
464 if (wi::popcount (bytes
) <= 1)
474 unsigned int pos
= last_live
& (align_units
- 1);
475 for (unsigned int i
= 1; i
<= align_units
; i
<<= 1)
478 unsigned int bytes
= (pos
| mask
) + 1;
479 if ((last_live
| mask
) > (last_live
+ *trim_tail
))
481 if (wi::popcount (bytes
) <= 1)
483 unsigned int extra
= (last_live
| mask
) - last_live
;
491 if ((*trim_head
|| *trim_tail
)
492 && dump_file
&& (dump_flags
& TDF_DETAILS
))
494 fprintf (dump_file
, " Trimming statement (head = %d, tail = %d): ",
495 *trim_head
, *trim_tail
);
496 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
497 fprintf (dump_file
, "\n");
501 /* STMT initializes an object from COMPLEX_CST where one or more of the
502 bytes written may be dead stores. REF is a representation of the
503 memory written. LIVE is the bitmap of stores that are actually live.
505 Attempt to rewrite STMT so that only the real or imaginary part of
506 the object is actually stored. */
509 maybe_trim_complex_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
511 int trim_head
, trim_tail
;
512 compute_trims (ref
, live
, &trim_head
, &trim_tail
, stmt
);
514 /* The amount of data trimmed from the head or tail must be at
515 least half the size of the object to ensure we're trimming
516 the entire real or imaginary half. By writing things this
517 way we avoid more O(n) bitmap operations. */
518 if (known_ge (trim_tail
* 2 * BITS_PER_UNIT
, ref
->size
))
520 /* TREE_REALPART is live */
521 tree x
= TREE_REALPART (gimple_assign_rhs1 (stmt
));
522 tree y
= gimple_assign_lhs (stmt
);
523 y
= build1 (REALPART_EXPR
, TREE_TYPE (x
), y
);
524 gimple_assign_set_lhs (stmt
, y
);
525 gimple_assign_set_rhs1 (stmt
, x
);
527 else if (known_ge (trim_head
* 2 * BITS_PER_UNIT
, ref
->size
))
529 /* TREE_IMAGPART is live */
530 tree x
= TREE_IMAGPART (gimple_assign_rhs1 (stmt
));
531 tree y
= gimple_assign_lhs (stmt
);
532 y
= build1 (IMAGPART_EXPR
, TREE_TYPE (x
), y
);
533 gimple_assign_set_lhs (stmt
, y
);
534 gimple_assign_set_rhs1 (stmt
, x
);
537 /* Other cases indicate parts of both the real and imag subobjects
538 are live. We do not try to optimize those cases. */
541 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
542 bytes written are dead stores. ORIG is the bitmap of bytes stored by
543 STMT. LIVE is the bitmap of stores that are actually live.
545 Attempt to rewrite STMT so that only the real or imaginary part of
546 the object is actually stored.
548 The most common case for getting here is a CONSTRUCTOR with no elements
549 being used to zero initialize an object. We do not try to handle other
550 cases as those would force us to fully cover the object with the
551 CONSTRUCTOR node except for the components that are dead. */
554 maybe_trim_constructor_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
556 tree ctor
= gimple_assign_rhs1 (stmt
);
558 /* This is the only case we currently handle. It actually seems to
559 catch most cases of actual interest. */
560 gcc_assert (CONSTRUCTOR_NELTS (ctor
) == 0);
564 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
566 /* Now we want to replace the constructor initializer
567 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
568 if (head_trim
|| tail_trim
)
570 /* We want &lhs for the MEM_REF expression. */
571 tree lhs_addr
= build_fold_addr_expr (gimple_assign_lhs (stmt
));
573 if (! is_gimple_min_invariant (lhs_addr
))
576 /* The number of bytes for the new constructor. */
577 poly_int64 ref_bytes
= exact_div (ref
->size
, BITS_PER_UNIT
);
578 poly_int64 count
= ref_bytes
- head_trim
- tail_trim
;
580 /* And the new type for the CONSTRUCTOR. Essentially it's just
581 a char array large enough to cover the non-trimmed parts of
582 the original CONSTRUCTOR. Note we want explicit bounds here
583 so that we know how many bytes to clear when expanding the
585 tree type
= build_array_type_nelts (char_type_node
, count
);
587 /* Build a suitable alias type rather than using alias set zero
588 to avoid pessimizing. */
589 tree alias_type
= reference_alias_ptr_type (gimple_assign_lhs (stmt
));
591 /* Build a MEM_REF representing the whole accessed area, starting
592 at the first byte not trimmed. */
593 tree exp
= fold_build2 (MEM_REF
, type
, lhs_addr
,
594 build_int_cst (alias_type
, head_trim
));
596 /* Now update STMT with a new RHS and LHS. */
597 gimple_assign_set_lhs (stmt
, exp
);
598 gimple_assign_set_rhs1 (stmt
, build_constructor (type
, NULL
));
602 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
603 copied/set by DECREMENT. */
605 decrement_count (gimple
*stmt
, int decrement
)
607 tree
*countp
= gimple_call_arg_ptr (stmt
, 2);
608 gcc_assert (TREE_CODE (*countp
) == INTEGER_CST
);
609 *countp
= wide_int_to_tree (TREE_TYPE (*countp
), (TREE_INT_CST_LOW (*countp
)
614 increment_start_addr (gimple
*stmt
, tree
*where
, int increment
)
616 if (tree lhs
= gimple_call_lhs (stmt
))
617 if (where
== gimple_call_arg_ptr (stmt
, 0))
619 gassign
*newop
= gimple_build_assign (lhs
, unshare_expr (*where
));
620 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
621 gsi_insert_after (&gsi
, newop
, GSI_SAME_STMT
);
622 gimple_call_set_lhs (stmt
, NULL_TREE
);
626 if (TREE_CODE (*where
) == SSA_NAME
)
628 tree tem
= make_ssa_name (TREE_TYPE (*where
));
630 = gimple_build_assign (tem
, POINTER_PLUS_EXPR
, *where
,
631 build_int_cst (sizetype
, increment
));
632 gimple_stmt_iterator gsi
= gsi_for_stmt (stmt
);
633 gsi_insert_before (&gsi
, newop
, GSI_SAME_STMT
);
639 *where
= build_fold_addr_expr (fold_build2 (MEM_REF
, char_type_node
,
641 build_int_cst (ptr_type_node
,
645 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
646 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
647 the amount of data it actually writes.
649 Right now we only support trimming from the head or the tail of the
650 memory region. In theory we could split the mem* call, but it's
651 likely of marginal value. */
654 maybe_trim_memstar_call (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
656 int head_trim
, tail_trim
;
657 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt
)))
659 case BUILT_IN_STRNCPY
:
660 case BUILT_IN_STRNCPY_CHK
:
661 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
664 /* Head trimming of strncpy is only possible if we can
665 prove all bytes we would trim are non-zero (or we could
666 turn the strncpy into memset if there must be zero
667 among the head trimmed bytes). If we don't know anything
668 about those bytes, the presence or absence of '\0' bytes
669 in there will affect whether it acts for the non-trimmed
670 bytes as memset or memcpy/strncpy. */
671 c_strlen_data lendata
= { };
672 int orig_head_trim
= head_trim
;
673 tree srcstr
= gimple_call_arg (stmt
, 1);
674 if (!get_range_strlen (srcstr
, &lendata
, /*eltsize=*/1)
675 || !tree_fits_uhwi_p (lendata
.minlen
))
677 else if (tree_to_uhwi (lendata
.minlen
) < (unsigned) head_trim
)
679 head_trim
= tree_to_uhwi (lendata
.minlen
);
680 if ((orig_head_trim
& (UNITS_PER_WORD
- 1)) == 0)
681 head_trim
&= ~(UNITS_PER_WORD
- 1);
683 if (orig_head_trim
!= head_trim
685 && (dump_flags
& TDF_DETAILS
))
687 " Adjusting strncpy trimming to (head = %d,"
688 " tail = %d)\n", head_trim
, tail_trim
);
692 case BUILT_IN_MEMCPY
:
693 case BUILT_IN_MEMMOVE
:
694 case BUILT_IN_MEMCPY_CHK
:
695 case BUILT_IN_MEMMOVE_CHK
:
696 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
699 /* Tail trimming is easy, we can just reduce the count. */
701 decrement_count (stmt
, tail_trim
);
703 /* Head trimming requires adjusting all the arguments. */
706 /* For __*_chk need to adjust also the last argument. */
707 if (gimple_call_num_args (stmt
) == 4)
709 tree size
= gimple_call_arg (stmt
, 3);
710 if (!tree_fits_uhwi_p (size
))
712 if (!integer_all_onesp (size
))
714 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (size
);
715 if (sz
< (unsigned) head_trim
)
717 tree arg
= wide_int_to_tree (TREE_TYPE (size
),
719 gimple_call_set_arg (stmt
, 3, arg
);
722 tree
*dst
= gimple_call_arg_ptr (stmt
, 0);
723 increment_start_addr (stmt
, dst
, head_trim
);
724 tree
*src
= gimple_call_arg_ptr (stmt
, 1);
725 increment_start_addr (stmt
, src
, head_trim
);
726 decrement_count (stmt
, head_trim
);
730 case BUILT_IN_MEMSET
:
731 case BUILT_IN_MEMSET_CHK
:
732 compute_trims (ref
, live
, &head_trim
, &tail_trim
, stmt
);
734 /* Tail trimming is easy, we can just reduce the count. */
736 decrement_count (stmt
, tail_trim
);
738 /* Head trimming requires adjusting all the arguments. */
741 /* For __*_chk need to adjust also the last argument. */
742 if (gimple_call_num_args (stmt
) == 4)
744 tree size
= gimple_call_arg (stmt
, 3);
745 if (!tree_fits_uhwi_p (size
))
747 if (!integer_all_onesp (size
))
749 unsigned HOST_WIDE_INT sz
= tree_to_uhwi (size
);
750 if (sz
< (unsigned) head_trim
)
752 tree arg
= wide_int_to_tree (TREE_TYPE (size
),
754 gimple_call_set_arg (stmt
, 3, arg
);
757 tree
*dst
= gimple_call_arg_ptr (stmt
, 0);
758 increment_start_addr (stmt
, dst
, head_trim
);
759 decrement_count (stmt
, head_trim
);
768 /* STMT is a memory write where one or more bytes written are dead
769 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
770 bitmap of stores that are actually live.
772 Attempt to rewrite STMT so that it writes fewer memory locations. Right
773 now we only support trimming at the start or end of the memory region.
774 It's not clear how much there is to be gained by trimming from the middle
778 maybe_trim_partially_dead_store (ao_ref
*ref
, sbitmap live
, gimple
*stmt
)
780 if (is_gimple_assign (stmt
)
781 && TREE_CODE (gimple_assign_lhs (stmt
)) != TARGET_MEM_REF
)
783 switch (gimple_assign_rhs_code (stmt
))
786 maybe_trim_constructor_store (ref
, live
, stmt
);
789 maybe_trim_complex_store (ref
, live
, stmt
);
797 /* Return TRUE if USE_REF reads bytes from LIVE where live is
798 derived from REF, a write reference.
800 While this routine may modify USE_REF, it's passed by value, not
801 location. So callers do not see those modifications. */
804 live_bytes_read (ao_ref
*use_ref
, ao_ref
*ref
, sbitmap live
)
806 /* We have already verified that USE_REF and REF hit the same object.
807 Now verify that there's actually an overlap between USE_REF and REF. */
808 HOST_WIDE_INT start
, size
;
809 if (get_byte_range (use_ref
, ref
, false, &start
, &size
))
811 /* If USE_REF covers all of REF, then it will hit one or more
812 live bytes. This avoids useless iteration over the bitmap
814 if (start
== 0 && known_eq (size
* 8, ref
->size
))
817 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
818 return bitmap_bit_in_range_p (live
, start
, (start
+ size
- 1));
823 /* Callback for dse_classify_store calling for_each_index. Verify that
824 indices are invariant in the loop with backedge PHI in basic-block DATA. */
827 check_name (tree
, tree
*idx
, void *data
)
829 basic_block phi_bb
= (basic_block
) data
;
830 if (TREE_CODE (*idx
) == SSA_NAME
831 && !SSA_NAME_IS_DEFAULT_DEF (*idx
)
832 && dominated_by_p (CDI_DOMINATORS
, gimple_bb (SSA_NAME_DEF_STMT (*idx
)),
838 /* STMT stores the value 0 into one or more memory locations
839 (via memset, empty constructor, calloc call, etc).
841 See if there is a subsequent store of the value 0 to one
842 or more of the same memory location(s). If so, the subsequent
843 store is redundant and can be removed.
845 The subsequent stores could be via memset, empty constructors,
846 simple MEM stores, etc. */
849 dse_optimize_redundant_stores (gimple
*stmt
)
853 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
854 alias_set_type earlier_set
= 0;
855 alias_set_type earlier_base_set
= 0;
856 if (is_gimple_assign (stmt
))
859 ao_ref_init (&lhs_ref
, gimple_assign_lhs (stmt
));
860 earlier_set
= ao_ref_alias_set (&lhs_ref
);
861 earlier_base_set
= ao_ref_base_alias_set (&lhs_ref
);
864 /* We could do something fairly complex and look through PHIs
865 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
868 Look at all the immediate uses of the VDEF (which are obviously
869 dominated by STMT). See if one or more stores 0 into the same
870 memory locations a STMT, if so remove the immediate use statements. */
871 tree defvar
= gimple_vdef (stmt
);
874 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, defvar
)
876 /* Limit stmt walking. */
877 if (++cnt
> param_dse_max_alias_queries_per_store
)
880 /* If USE_STMT stores 0 into one or more of the same locations
881 as STMT and STMT would kill USE_STMT, then we can just remove
884 if ((is_gimple_assign (use_stmt
)
885 && gimple_vdef (use_stmt
)
886 && (gimple_assign_single_p (use_stmt
)
887 && initializer_zerop (gimple_assign_rhs1 (use_stmt
))))
888 || (gimple_call_builtin_p (use_stmt
, BUILT_IN_NORMAL
)
889 && (fndecl
= gimple_call_fndecl (use_stmt
)) != NULL
890 && (DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
891 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET_CHK
)
892 && integer_zerop (gimple_call_arg (use_stmt
, 1))))
896 if (!initialize_ao_ref_for_dse (use_stmt
, &write
))
899 if (valid_ao_ref_for_dse (&write
)
900 && stmt_kills_ref_p (stmt
, &write
))
902 gimple_stmt_iterator gsi
= gsi_for_stmt (use_stmt
);
903 if (is_gimple_assign (use_stmt
))
906 ao_ref_init (&lhs_ref
, gimple_assign_lhs (use_stmt
));
907 if ((earlier_set
== ao_ref_alias_set (&lhs_ref
)
908 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref
),
910 && (earlier_base_set
== ao_ref_base_alias_set (&lhs_ref
)
911 || alias_set_subset_of
912 (ao_ref_base_alias_set (&lhs_ref
),
914 delete_dead_or_redundant_assignment (&gsi
, "redundant",
918 else if (is_gimple_call (use_stmt
))
920 if ((earlier_set
== 0
921 || alias_set_subset_of (0, earlier_set
))
922 && (earlier_base_set
== 0
923 || alias_set_subset_of (0, earlier_base_set
)))
924 delete_dead_or_redundant_call (&gsi
, "redundant");
933 /* Return whether PHI contains ARG as an argument. */
936 contains_phi_arg (gphi
*phi
, tree arg
)
938 for (unsigned i
= 0; i
< gimple_phi_num_args (phi
); ++i
)
939 if (gimple_phi_arg_def (phi
, i
) == arg
)
944 /* Hash map of the memory use in a GIMPLE assignment to its
945 data reference. If NULL data-ref analysis isn't used. */
946 static hash_map
<gimple
*, data_reference_p
> *dse_stmt_to_dr_map
;
948 /* A helper of dse_optimize_stmt.
949 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
950 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
951 if only clobber statements influenced the classification result.
952 Returns the classification. */
955 dse_classify_store (ao_ref
*ref
, gimple
*stmt
,
956 bool byte_tracking_enabled
, sbitmap live_bytes
,
957 bool *by_clobber_p
, tree stop_at_vuse
)
962 std::unique_ptr
<data_reference
, void(*)(data_reference_p
)>
963 dra (nullptr, free_data_ref
);
966 *by_clobber_p
= true;
968 /* Find the first dominated statement that clobbers (part of) the
969 memory stmt stores to with no intermediate statement that may use
970 part of the memory stmt stores. That is, find a store that may
971 prove stmt to be a dead store. */
980 if (gimple_code (temp
) == GIMPLE_PHI
)
982 defvar
= PHI_RESULT (temp
);
983 bitmap_set_bit (visited
, SSA_NAME_VERSION (defvar
));
986 defvar
= gimple_vdef (temp
);
988 auto_vec
<gimple
*, 10> defs
;
989 gphi
*first_phi_def
= NULL
;
990 gphi
*last_phi_def
= NULL
;
992 auto_vec
<tree
, 10> worklist
;
993 worklist
.quick_push (defvar
);
997 defvar
= worklist
.pop ();
998 /* If we're instructed to stop walking at region boundary, do so. */
999 if (defvar
== stop_at_vuse
)
1000 return DSE_STORE_LIVE
;
1002 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, defvar
)
1004 /* Limit stmt walking. */
1005 if (++cnt
> param_dse_max_alias_queries_per_store
)
1011 /* In simple cases we can look through PHI nodes, but we
1012 have to be careful with loops and with memory references
1013 containing operands that are also operands of PHI nodes.
1014 See gcc.c-torture/execute/20051110-*.c. */
1015 if (gimple_code (use_stmt
) == GIMPLE_PHI
)
1017 /* Look through single-argument PHIs. */
1018 if (gimple_phi_num_args (use_stmt
) == 1)
1019 worklist
.safe_push (gimple_phi_result (use_stmt
));
1021 /* If we already visited this PHI ignore it for further
1023 else if (!bitmap_bit_p (visited
,
1025 (PHI_RESULT (use_stmt
))))
1027 /* If we visit this PHI by following a backedge then we
1028 have to make sure ref->ref only refers to SSA names
1029 that are invariant with respect to the loop
1030 represented by this PHI node. */
1031 if (dominated_by_p (CDI_DOMINATORS
, gimple_bb (stmt
),
1032 gimple_bb (use_stmt
))
1033 && !for_each_index (ref
->ref
? &ref
->ref
: &ref
->base
,
1034 check_name
, gimple_bb (use_stmt
)))
1035 return DSE_STORE_LIVE
;
1036 defs
.safe_push (use_stmt
);
1038 first_phi_def
= as_a
<gphi
*> (use_stmt
);
1039 last_phi_def
= as_a
<gphi
*> (use_stmt
);
1042 /* If the statement is a use the store is not dead. */
1043 else if (ref_maybe_used_by_stmt_p (use_stmt
, ref
))
1045 if (dse_stmt_to_dr_map
1047 && is_gimple_assign (use_stmt
))
1050 dra
.reset (create_data_ref (NULL
, NULL
, ref
->ref
, stmt
,
1053 data_reference_p
&drb
1054 = dse_stmt_to_dr_map
->get_or_insert (use_stmt
,
1057 drb
= create_data_ref (NULL
, NULL
,
1058 gimple_assign_rhs1 (use_stmt
),
1059 use_stmt
, false, false);
1060 if (!dr_may_alias_p (dra
.get (), drb
, NULL
))
1062 if (gimple_vdef (use_stmt
))
1063 defs
.safe_push (use_stmt
);
1068 /* Handle common cases where we can easily build an ao_ref
1069 structure for USE_STMT and in doing so we find that the
1070 references hit non-live bytes and thus can be ignored.
1072 TODO: We can also use modref summary to handle calls. */
1073 if (byte_tracking_enabled
1074 && is_gimple_assign (use_stmt
))
1077 ao_ref_init (&use_ref
, gimple_assign_rhs1 (use_stmt
));
1078 if (valid_ao_ref_for_dse (&use_ref
)
1079 && operand_equal_p (use_ref
.base
, ref
->base
,
1081 && !live_bytes_read (&use_ref
, ref
, live_bytes
))
1083 /* If this is a store, remember it as we possibly
1084 need to walk the defs uses. */
1085 if (gimple_vdef (use_stmt
))
1086 defs
.safe_push (use_stmt
);
1094 /* We have visited ourselves already so ignore STMT for the
1095 purpose of chaining. */
1096 else if (use_stmt
== stmt
)
1098 /* If this is a store, remember it as we possibly need to walk the
1100 else if (gimple_vdef (use_stmt
))
1101 defs
.safe_push (use_stmt
);
1104 while (!fail
&& !worklist
.is_empty ());
1108 /* STMT might be partially dead and we may be able to reduce
1109 how many memory locations it stores into. */
1110 if (byte_tracking_enabled
&& !gimple_clobber_p (stmt
))
1111 return DSE_STORE_MAYBE_PARTIAL_DEAD
;
1112 return DSE_STORE_LIVE
;
1115 /* If we didn't find any definition this means the store is dead
1116 if it isn't a store to global reachable memory. In this case
1117 just pretend the stmt makes itself dead. Otherwise fail. */
1118 if (defs
.is_empty ())
1120 if (ref_may_alias_global_p (ref
, false))
1121 return DSE_STORE_LIVE
;
1124 *by_clobber_p
= false;
1125 return DSE_STORE_DEAD
;
1128 /* Process defs and remove those we need not process further. */
1129 for (unsigned i
= 0; i
< defs
.length ();)
1131 gimple
*def
= defs
[i
];
1133 use_operand_p use_p
;
1134 tree vdef
= (gimple_code (def
) == GIMPLE_PHI
1135 ? gimple_phi_result (def
) : gimple_vdef (def
));
1137 /* If the path to check starts with a kill we do not need to
1139 ??? With byte tracking we need only kill the bytes currently
1141 if (stmt_kills_ref_p (def
, ref
))
1143 if (by_clobber_p
&& !gimple_clobber_p (def
))
1144 *by_clobber_p
= false;
1145 defs
.unordered_remove (i
);
1147 /* If the path ends here we do not need to process it further.
1148 This for example happens with calls to noreturn functions. */
1149 else if (has_zero_uses (vdef
))
1151 /* But if the store is to global memory it is definitely
1153 if (ref_may_alias_global_p (ref
, false))
1154 return DSE_STORE_LIVE
;
1155 defs
.unordered_remove (i
);
1157 /* In addition to kills we can remove defs whose only use
1158 is another def in defs. That can only ever be PHIs of which
1159 we track two for simplicity reasons, the first and last in
1160 {first,last}_phi_def (we fail for multiple PHIs anyways).
1161 We can also ignore defs that feed only into
1162 already visited PHIs. */
1163 else if (single_imm_use (vdef
, &use_p
, &use_stmt
)
1164 && (use_stmt
== first_phi_def
1165 || use_stmt
== last_phi_def
1166 || (gimple_code (use_stmt
) == GIMPLE_PHI
1167 && bitmap_bit_p (visited
,
1169 (PHI_RESULT (use_stmt
))))))
1171 defs
.unordered_remove (i
);
1172 if (def
== first_phi_def
)
1173 first_phi_def
= NULL
;
1174 else if (def
== last_phi_def
)
1175 last_phi_def
= NULL
;
1177 /* If def is a PHI and one of its arguments is another PHI node still
1178 in consideration we can defer processing it. */
1179 else if ((phi_def
= dyn_cast
<gphi
*> (def
))
1181 && phi_def
!= last_phi_def
1182 && contains_phi_arg (phi_def
,
1183 gimple_phi_result (last_phi_def
)))
1185 && phi_def
!= first_phi_def
1187 (phi_def
, gimple_phi_result (first_phi_def
)))))
1189 defs
.unordered_remove (i
);
1190 if (phi_def
== first_phi_def
)
1191 first_phi_def
= NULL
;
1192 else if (phi_def
== last_phi_def
)
1193 last_phi_def
= NULL
;
1199 /* If all defs kill the ref we are done. */
1200 if (defs
.is_empty ())
1201 return DSE_STORE_DEAD
;
1202 /* If more than one def survives fail. */
1203 if (defs
.length () > 1)
1205 /* STMT might be partially dead and we may be able to reduce
1206 how many memory locations it stores into. */
1207 if (byte_tracking_enabled
&& !gimple_clobber_p (stmt
))
1208 return DSE_STORE_MAYBE_PARTIAL_DEAD
;
1209 return DSE_STORE_LIVE
;
1213 /* Track partial kills. */
1214 if (byte_tracking_enabled
)
1216 clear_bytes_written_by (live_bytes
, temp
, ref
);
1217 if (bitmap_empty_p (live_bytes
))
1219 if (by_clobber_p
&& !gimple_clobber_p (temp
))
1220 *by_clobber_p
= false;
1221 return DSE_STORE_DEAD
;
1225 /* Continue walking until there are no more live bytes. */
1230 /* Delete a dead call at GSI, which is mem* call of some kind. */
1232 delete_dead_or_redundant_call (gimple_stmt_iterator
*gsi
, const char *type
)
1234 gimple
*stmt
= gsi_stmt (*gsi
);
1235 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1237 fprintf (dump_file
, " Deleted %s call: ", type
);
1238 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1239 fprintf (dump_file
, "\n");
1242 basic_block bb
= gimple_bb (stmt
);
1243 tree lhs
= gimple_call_lhs (stmt
);
1246 tree ptr
= gimple_call_arg (stmt
, 0);
1247 gimple
*new_stmt
= gimple_build_assign (lhs
, ptr
);
1248 unlink_stmt_vdef (stmt
);
1249 if (gsi_replace (gsi
, new_stmt
, true))
1250 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1254 /* Then we need to fix the operand of the consuming stmt. */
1255 unlink_stmt_vdef (stmt
);
1257 /* Remove the dead store. */
1258 if (gsi_remove (gsi
, true))
1259 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1260 release_defs (stmt
);
1264 /* Delete a dead store at GSI, which is a gimple assignment. */
1267 delete_dead_or_redundant_assignment (gimple_stmt_iterator
*gsi
,
1269 bitmap need_eh_cleanup
,
1270 bitmap need_ab_cleanup
)
1272 gimple
*stmt
= gsi_stmt (*gsi
);
1273 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1275 fprintf (dump_file
, " Deleted %s store: ", type
);
1276 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1277 fprintf (dump_file
, "\n");
1280 /* Then we need to fix the operand of the consuming stmt. */
1281 unlink_stmt_vdef (stmt
);
1283 /* Remove the dead store. */
1284 basic_block bb
= gimple_bb (stmt
);
1285 if (need_ab_cleanup
&& stmt_can_make_abnormal_goto (stmt
))
1286 bitmap_set_bit (need_ab_cleanup
, bb
->index
);
1287 if (gsi_remove (gsi
, true) && need_eh_cleanup
)
1288 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1290 /* And release any SSA_NAMEs set in this statement back to the
1291 SSA_NAME manager. */
1292 release_defs (stmt
);
1295 /* Try to prove, using modref summary, that all memory written to by a call is
1296 dead and remove it. Assume that if return value is written to memory
1297 it is already proved to be dead. */
1300 dse_optimize_call (gimple_stmt_iterator
*gsi
, sbitmap live_bytes
)
1302 gcall
*stmt
= dyn_cast
<gcall
*> (gsi_stmt (*gsi
));
1307 tree callee
= gimple_call_fndecl (stmt
);
1312 /* Pure/const functions are optimized by normal DCE
1313 or handled as store above. */
1314 int flags
= gimple_call_flags (stmt
);
1315 if ((flags
& (ECF_PURE
|ECF_CONST
|ECF_NOVOPS
))
1316 && !(flags
& (ECF_LOOPING_CONST_OR_PURE
)))
1319 cgraph_node
*node
= cgraph_node::get (callee
);
1323 if (stmt_could_throw_p (cfun
, stmt
)
1324 && !cfun
->can_delete_dead_exceptions
)
1327 /* If return value is used the call is not dead. */
1328 tree lhs
= gimple_call_lhs (stmt
);
1329 if (lhs
&& TREE_CODE (lhs
) == SSA_NAME
)
1331 imm_use_iterator ui
;
1333 FOR_EACH_IMM_USE_STMT (use_stmt
, ui
, lhs
)
1334 if (!is_gimple_debug (use_stmt
))
1338 /* Verify that there are no side-effects except for return value
1339 and memory writes tracked by modref. */
1340 modref_summary
*summary
= get_modref_function_summary (node
);
1341 if (!summary
|| !summary
->try_dse
)
1344 bool by_clobber_p
= false;
1346 /* Walk all memory writes and verify that they are dead. */
1347 for (auto base_node
: summary
->stores
->bases
)
1348 for (auto ref_node
: base_node
->refs
)
1349 for (auto access_node
: ref_node
->accesses
)
1351 tree arg
= access_node
.get_call_arg (stmt
);
1353 if (!arg
|| !POINTER_TYPE_P (TREE_TYPE (arg
)))
1356 if (integer_zerop (arg
)
1357 && !targetm
.addr_space
.zero_address_valid
1358 (TYPE_ADDR_SPACE (TREE_TYPE (arg
))))
1363 if (!access_node
.get_ao_ref (stmt
, &ref
))
1365 ref
.ref_alias_set
= ref_node
->ref
;
1366 ref
.base_alias_set
= base_node
->base
;
1368 bool byte_tracking_enabled
1369 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1370 enum dse_store_status store_status
;
1372 store_status
= dse_classify_store (&ref
, stmt
,
1373 byte_tracking_enabled
,
1374 live_bytes
, &by_clobber_p
);
1375 if (store_status
!= DSE_STORE_DEAD
)
1378 delete_dead_or_redundant_assignment (gsi
, "dead", need_eh_cleanup
,
1383 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1385 A dead store is a store into a memory location which will later be
1386 overwritten by another store without any intervening loads. In this
1387 case the earlier store can be deleted.
1389 In our SSA + virtual operand world we use immediate uses of virtual
1390 operands to detect dead stores. If a store's virtual definition
1391 is used precisely once by a later store to the same location which
1392 post dominates the first store, then the first store is dead. */
1395 dse_optimize_stmt (function
*fun
, gimple_stmt_iterator
*gsi
, sbitmap live_bytes
)
1397 gimple
*stmt
= gsi_stmt (*gsi
);
1399 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1400 if (gimple_has_volatile_ops (stmt
)
1401 && (!gimple_clobber_p (stmt
)
1402 || TREE_CODE (gimple_assign_lhs (stmt
)) != MEM_REF
))
1406 /* If this is not a store we can still remove dead call using
1407 modref summary. Note we specifically allow ref to be initialized
1408 to a conservative may-def since we are looking for followup stores
1409 to kill all of it. */
1410 if (!initialize_ao_ref_for_dse (stmt
, &ref
, true))
1412 dse_optimize_call (gsi
, live_bytes
);
1416 /* We know we have virtual definitions. We can handle assignments and
1417 some builtin calls. */
1418 if (gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
1420 tree fndecl
= gimple_call_fndecl (stmt
);
1421 switch (DECL_FUNCTION_CODE (fndecl
))
1423 case BUILT_IN_MEMCPY
:
1424 case BUILT_IN_MEMMOVE
:
1425 case BUILT_IN_STRNCPY
:
1426 case BUILT_IN_MEMSET
:
1427 case BUILT_IN_MEMCPY_CHK
:
1428 case BUILT_IN_MEMMOVE_CHK
:
1429 case BUILT_IN_STRNCPY_CHK
:
1430 case BUILT_IN_MEMSET_CHK
:
1432 /* Occasionally calls with an explicit length of zero
1433 show up in the IL. It's pointless to do analysis
1434 on them, they're trivially dead. */
1435 tree size
= gimple_call_arg (stmt
, 2);
1436 if (integer_zerop (size
))
1438 delete_dead_or_redundant_call (gsi
, "dead");
1442 /* If this is a memset call that initializes an object
1443 to zero, it may be redundant with an earlier memset
1444 or empty CONSTRUCTOR of a larger object. */
1445 if ((DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET
1446 || DECL_FUNCTION_CODE (fndecl
) == BUILT_IN_MEMSET_CHK
)
1447 && integer_zerop (gimple_call_arg (stmt
, 1)))
1448 dse_optimize_redundant_stores (stmt
);
1450 enum dse_store_status store_status
;
1451 bool byte_tracking_enabled
1452 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1453 store_status
= dse_classify_store (&ref
, stmt
,
1454 byte_tracking_enabled
,
1456 if (store_status
== DSE_STORE_LIVE
)
1459 if (store_status
== DSE_STORE_MAYBE_PARTIAL_DEAD
)
1461 maybe_trim_memstar_call (&ref
, live_bytes
, stmt
);
1465 if (store_status
== DSE_STORE_DEAD
)
1466 delete_dead_or_redundant_call (gsi
, "dead");
1470 case BUILT_IN_CALLOC
:
1471 /* We already know the arguments are integer constants. */
1472 dse_optimize_redundant_stores (stmt
);
1479 else if (is_gimple_call (stmt
)
1480 && gimple_call_internal_p (stmt
))
1482 switch (gimple_call_internal_fn (stmt
))
1485 case IFN_MASK_STORE
:
1487 enum dse_store_status store_status
;
1488 store_status
= dse_classify_store (&ref
, stmt
, false, live_bytes
);
1489 if (store_status
== DSE_STORE_DEAD
)
1490 delete_dead_or_redundant_call (gsi
, "dead");
1497 bool by_clobber_p
= false;
1499 /* Check if this statement stores zero to a memory location,
1500 and if there is a subsequent store of zero to the same
1501 memory location. If so, remove the subsequent store. */
1502 if (gimple_assign_single_p (stmt
)
1503 && initializer_zerop (gimple_assign_rhs1 (stmt
)))
1504 dse_optimize_redundant_stores (stmt
);
1506 /* Self-assignments are zombies. */
1507 if (is_gimple_assign (stmt
)
1508 && operand_equal_p (gimple_assign_rhs1 (stmt
),
1509 gimple_assign_lhs (stmt
), 0))
1513 bool byte_tracking_enabled
1514 = setup_live_bytes_from_ref (&ref
, live_bytes
);
1515 enum dse_store_status store_status
;
1516 store_status
= dse_classify_store (&ref
, stmt
,
1517 byte_tracking_enabled
,
1518 live_bytes
, &by_clobber_p
);
1519 if (store_status
== DSE_STORE_LIVE
)
1522 if (store_status
== DSE_STORE_MAYBE_PARTIAL_DEAD
)
1524 maybe_trim_partially_dead_store (&ref
, live_bytes
, stmt
);
1529 /* Now we know that use_stmt kills the LHS of stmt. */
1531 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1532 another clobber stmt. */
1533 if (gimple_clobber_p (stmt
)
1537 if (is_gimple_call (stmt
)
1538 && (gimple_has_side_effects (stmt
)
1539 || (stmt_could_throw_p (fun
, stmt
)
1540 && !fun
->can_delete_dead_exceptions
)))
1542 /* See if we can remove complete call. */
1543 if (dse_optimize_call (gsi
, live_bytes
))
1545 /* Make sure we do not remove a return slot we cannot reconstruct
1547 if (gimple_call_return_slot_opt_p (as_a
<gcall
*>(stmt
))
1548 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt
)))
1550 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt
))))))
1552 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1554 fprintf (dump_file
, " Deleted dead store in call LHS: ");
1555 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1556 fprintf (dump_file
, "\n");
1558 gimple_call_set_lhs (stmt
, NULL_TREE
);
1561 else if (!stmt_could_throw_p (fun
, stmt
)
1562 || fun
->can_delete_dead_exceptions
)
1563 delete_dead_or_redundant_assignment (gsi
, "dead", need_eh_cleanup
,
1569 const pass_data pass_data_dse
=
1571 GIMPLE_PASS
, /* type */
1573 OPTGROUP_NONE
, /* optinfo_flags */
1574 TV_TREE_DSE
, /* tv_id */
1575 ( PROP_cfg
| PROP_ssa
), /* properties_required */
1576 0, /* properties_provided */
1577 0, /* properties_destroyed */
1578 0, /* todo_flags_start */
1579 0, /* todo_flags_finish */
1582 class pass_dse
: public gimple_opt_pass
1585 pass_dse (gcc::context
*ctxt
)
1586 : gimple_opt_pass (pass_data_dse
, ctxt
), use_dr_analysis_p (false)
1589 /* opt_pass methods: */
1590 opt_pass
* clone () final override
{ return new pass_dse (m_ctxt
); }
1591 void set_pass_param (unsigned n
, bool param
) final override
1593 gcc_assert (n
== 0);
1594 use_dr_analysis_p
= param
;
1596 bool gate (function
*) final override
{ return flag_tree_dse
!= 0; }
1597 unsigned int execute (function
*) final override
;
1600 bool use_dr_analysis_p
;
1601 }; // class pass_dse
1604 pass_dse::execute (function
*fun
)
1607 bool released_def
= false;
1609 need_eh_cleanup
= BITMAP_ALLOC (NULL
);
1610 need_ab_cleanup
= BITMAP_ALLOC (NULL
);
1611 auto_sbitmap
live_bytes (param_dse_max_object_size
);
1612 if (flag_expensive_optimizations
&& use_dr_analysis_p
)
1613 dse_stmt_to_dr_map
= new hash_map
<gimple
*, data_reference_p
>;
1615 renumber_gimple_stmt_uids (fun
);
1617 calculate_dominance_info (CDI_DOMINATORS
);
1619 /* Dead store elimination is fundamentally a reverse program order walk. */
1620 int *rpo
= XNEWVEC (int, n_basic_blocks_for_fn (fun
) - NUM_FIXED_BLOCKS
);
1621 int n
= pre_and_rev_post_order_compute_fn (fun
, NULL
, rpo
, false);
1622 for (int i
= n
; i
!= 0; --i
)
1624 basic_block bb
= BASIC_BLOCK_FOR_FN (fun
, rpo
[i
-1]);
1625 gimple_stmt_iterator gsi
;
1627 for (gsi
= gsi_last_bb (bb
); !gsi_end_p (gsi
);)
1629 gimple
*stmt
= gsi_stmt (gsi
);
1631 if (gimple_vdef (stmt
))
1632 dse_optimize_stmt (fun
, &gsi
, live_bytes
);
1633 else if (def_operand_p
1634 def_p
= single_ssa_def_operand (stmt
, SSA_OP_DEF
))
1636 /* When we remove dead stores make sure to also delete trivially
1638 if (has_zero_uses (DEF_FROM_PTR (def_p
))
1639 && !gimple_has_side_effects (stmt
)
1640 && !is_ctrl_altering_stmt (stmt
)
1641 && (!stmt_could_throw_p (fun
, stmt
)
1642 || fun
->can_delete_dead_exceptions
))
1644 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1646 fprintf (dump_file
, " Deleted trivially dead stmt: ");
1647 print_gimple_stmt (dump_file
, stmt
, 0, dump_flags
);
1648 fprintf (dump_file
, "\n");
1650 if (gsi_remove (&gsi
, true) && need_eh_cleanup
)
1651 bitmap_set_bit (need_eh_cleanup
, bb
->index
);
1652 release_defs (stmt
);
1653 released_def
= true;
1656 if (gsi_end_p (gsi
))
1657 gsi
= gsi_last_bb (bb
);
1661 bool removed_phi
= false;
1662 for (gphi_iterator si
= gsi_start_phis (bb
); !gsi_end_p (si
);)
1664 gphi
*phi
= si
.phi ();
1665 if (has_zero_uses (gimple_phi_result (phi
)))
1667 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1669 fprintf (dump_file
, " Deleted trivially dead PHI: ");
1670 print_gimple_stmt (dump_file
, phi
, 0, dump_flags
);
1671 fprintf (dump_file
, "\n");
1673 remove_phi_node (&si
, true);
1675 released_def
= true;
1680 if (removed_phi
&& gimple_seq_empty_p (phi_nodes (bb
)))
1681 todo
|= TODO_cleanup_cfg
;
1685 /* Removal of stores may make some EH edges dead. Purge such edges from
1686 the CFG as needed. */
1687 if (!bitmap_empty_p (need_eh_cleanup
))
1689 gimple_purge_all_dead_eh_edges (need_eh_cleanup
);
1690 todo
|= TODO_cleanup_cfg
;
1692 if (!bitmap_empty_p (need_ab_cleanup
))
1694 gimple_purge_all_dead_abnormal_call_edges (need_ab_cleanup
);
1695 todo
|= TODO_cleanup_cfg
;
1698 BITMAP_FREE (need_eh_cleanup
);
1699 BITMAP_FREE (need_ab_cleanup
);
1702 free_numbers_of_iterations_estimates (fun
);
1704 if (flag_expensive_optimizations
&& use_dr_analysis_p
)
1706 for (auto i
= dse_stmt_to_dr_map
->begin ();
1707 i
!= dse_stmt_to_dr_map
->end (); ++i
)
1708 free_data_ref ((*i
).second
);
1709 delete dse_stmt_to_dr_map
;
1710 dse_stmt_to_dr_map
= NULL
;
1719 make_pass_dse (gcc::context
*ctxt
)
1721 return new pass_dse (ctxt
);