Fix warnings building linux-atomic.c and fptr.c on hppa64-linux
[official-gcc.git] / gcc / tree-ssa-dse.c
blob27287fe88ee1d9cee10c8ff08fa4473198df3a0c
1 /* Dead and redundant store elimination
2 Copyright (C) 2004-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
9 any later version.
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "tree-pass.h"
28 #include "ssa.h"
29 #include "gimple-pretty-print.h"
30 #include "fold-const.h"
31 #include "gimple-iterator.h"
32 #include "tree-cfg.h"
33 #include "tree-dfa.h"
34 #include "tree-cfgcleanup.h"
35 #include "alias.h"
36 #include "tree-ssa-loop.h"
37 #include "tree-ssa-dse.h"
38 #include "builtins.h"
39 #include "gimple-fold.h"
40 #include "gimplify.h"
41 #include "tree-eh.h"
42 #include "cfganal.h"
44 /* This file implements dead store elimination.
46 A dead store is a store into a memory location which will later be
47 overwritten by another store without any intervening loads. In this
48 case the earlier store can be deleted or trimmed if the store
49 was partially dead.
51 A redundant store is a store into a memory location which stores
52 the exact same value as a prior store to the same memory location.
53 While this can often be handled by dead store elimination, removing
54 the redundant store is often better than removing or trimming the
55 dead store.
57 In our SSA + virtual operand world we use immediate uses of virtual
58 operands to detect these cases. If a store's virtual definition
59 is used precisely once by a later store to the same location which
60 post dominates the first store, then the first store is dead. If
61 the data stored is the same, then the second store is redundant.
63 The single use of the store's virtual definition ensures that
64 there are no intervening aliased loads and the requirement that
65 the second load post dominate the first ensures that if the earlier
66 store executes, then the later stores will execute before the function
67 exits.
69 It may help to think of this as first moving the earlier store to
70 the point immediately before the later store. Again, the single
71 use of the virtual definition and the post-dominance relationship
72 ensure that such movement would be safe. Clearly if there are
73 back to back stores, then the second is makes the first dead. If
74 the second store stores the same value, then the second store is
75 redundant.
77 Reviewing section 10.7.2 in Morgan's "Building an Optimizing Compiler"
78 may also help in understanding this code since it discusses the
79 relationship between dead store and redundant load elimination. In
80 fact, they are the same transformation applied to different views of
81 the CFG. */
83 static void delete_dead_or_redundant_call (gimple_stmt_iterator *, const char *);
85 /* Bitmap of blocks that have had EH statements cleaned. We should
86 remove their dead edges eventually. */
87 static bitmap need_eh_cleanup;
89 /* STMT is a statement that may write into memory. Analyze it and
90 initialize WRITE to describe how STMT affects memory.
92 Return TRUE if the statement was analyzed, FALSE otherwise.
94 It is always safe to return FALSE. But typically better optimziation
95 can be achieved by analyzing more statements. */
97 static bool
98 initialize_ao_ref_for_dse (gimple *stmt, ao_ref *write)
100 /* It's advantageous to handle certain mem* functions. */
101 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
103 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
105 case BUILT_IN_MEMCPY:
106 case BUILT_IN_MEMMOVE:
107 case BUILT_IN_MEMSET:
108 case BUILT_IN_MEMCPY_CHK:
109 case BUILT_IN_MEMMOVE_CHK:
110 case BUILT_IN_MEMSET_CHK:
111 case BUILT_IN_STRNCPY:
112 case BUILT_IN_STRNCPY_CHK:
114 tree size = gimple_call_arg (stmt, 2);
115 tree ptr = gimple_call_arg (stmt, 0);
116 ao_ref_init_from_ptr_and_size (write, ptr, size);
117 return true;
120 /* A calloc call can never be dead, but it can make
121 subsequent stores redundant if they store 0 into
122 the same memory locations. */
123 case BUILT_IN_CALLOC:
125 tree nelem = gimple_call_arg (stmt, 0);
126 tree selem = gimple_call_arg (stmt, 1);
127 tree lhs;
128 if (TREE_CODE (nelem) == INTEGER_CST
129 && TREE_CODE (selem) == INTEGER_CST
130 && (lhs = gimple_call_lhs (stmt)) != NULL_TREE)
132 tree size = fold_build2 (MULT_EXPR, TREE_TYPE (nelem),
133 nelem, selem);
134 ao_ref_init_from_ptr_and_size (write, lhs, size);
135 return true;
139 default:
140 break;
143 else if (tree lhs = gimple_get_lhs (stmt))
145 if (TREE_CODE (lhs) != SSA_NAME)
147 ao_ref_init (write, lhs);
148 return true;
151 return false;
154 /* Given REF from the alias oracle, return TRUE if it is a valid
155 memory reference for dead store elimination, false otherwise.
157 In particular, the reference must have a known base, known maximum
158 size, start at a byte offset and have a size that is one or more
159 bytes. */
161 static bool
162 valid_ao_ref_for_dse (ao_ref *ref)
164 return (ao_ref_base (ref)
165 && known_size_p (ref->max_size)
166 && maybe_ne (ref->size, 0)
167 && known_eq (ref->max_size, ref->size)
168 && known_ge (ref->offset, 0)
169 && multiple_p (ref->offset, BITS_PER_UNIT)
170 && multiple_p (ref->size, BITS_PER_UNIT));
173 /* Try to normalize COPY (an ao_ref) relative to REF. Essentially when we are
174 done COPY will only refer bytes found within REF. Return true if COPY
175 is known to intersect at least one byte of REF. */
177 static bool
178 normalize_ref (ao_ref *copy, ao_ref *ref)
180 if (!ordered_p (copy->offset, ref->offset))
181 return false;
183 /* If COPY starts before REF, then reset the beginning of
184 COPY to match REF and decrease the size of COPY by the
185 number of bytes removed from COPY. */
186 if (maybe_lt (copy->offset, ref->offset))
188 poly_int64 diff = ref->offset - copy->offset;
189 if (maybe_le (copy->size, diff))
190 return false;
191 copy->size -= diff;
192 copy->offset = ref->offset;
195 poly_int64 diff = copy->offset - ref->offset;
196 if (maybe_le (ref->size, diff))
197 return false;
199 /* If COPY extends beyond REF, chop off its size appropriately. */
200 poly_int64 limit = ref->size - diff;
201 if (!ordered_p (limit, copy->size))
202 return false;
204 if (maybe_gt (copy->size, limit))
205 copy->size = limit;
206 return true;
209 /* Clear any bytes written by STMT from the bitmap LIVE_BYTES. The base
210 address written by STMT must match the one found in REF, which must
211 have its base address previously initialized.
213 This routine must be conservative. If we don't know the offset or
214 actual size written, assume nothing was written. */
216 static void
217 clear_bytes_written_by (sbitmap live_bytes, gimple *stmt, ao_ref *ref)
219 ao_ref write;
220 if (!initialize_ao_ref_for_dse (stmt, &write))
221 return;
223 /* Verify we have the same base memory address, the write
224 has a known size and overlaps with REF. */
225 HOST_WIDE_INT start, size;
226 if (valid_ao_ref_for_dse (&write)
227 && operand_equal_p (write.base, ref->base, OEP_ADDRESS_OF)
228 && known_eq (write.size, write.max_size)
229 && normalize_ref (&write, ref)
230 && (write.offset - ref->offset).is_constant (&start)
231 && write.size.is_constant (&size))
232 bitmap_clear_range (live_bytes, start / BITS_PER_UNIT,
233 size / BITS_PER_UNIT);
236 /* REF is a memory write. Extract relevant information from it and
237 initialize the LIVE_BYTES bitmap. If successful, return TRUE.
238 Otherwise return FALSE. */
240 static bool
241 setup_live_bytes_from_ref (ao_ref *ref, sbitmap live_bytes)
243 HOST_WIDE_INT const_size;
244 if (valid_ao_ref_for_dse (ref)
245 && ref->size.is_constant (&const_size)
246 && (const_size / BITS_PER_UNIT
247 <= param_dse_max_object_size))
249 bitmap_clear (live_bytes);
250 bitmap_set_range (live_bytes, 0, const_size / BITS_PER_UNIT);
251 return true;
253 return false;
256 /* Compute the number of elements that we can trim from the head and
257 tail of ORIG resulting in a bitmap that is a superset of LIVE.
259 Store the number of elements trimmed from the head and tail in
260 TRIM_HEAD and TRIM_TAIL.
262 STMT is the statement being trimmed and is used for debugging dump
263 output only. */
265 static void
266 compute_trims (ao_ref *ref, sbitmap live, int *trim_head, int *trim_tail,
267 gimple *stmt)
269 /* We use sbitmaps biased such that ref->offset is bit zero and the bitmap
270 extends through ref->size. So we know that in the original bitmap
271 bits 0..ref->size were true. We don't actually need the bitmap, just
272 the REF to compute the trims. */
274 /* Now identify how much, if any of the tail we can chop off. */
275 HOST_WIDE_INT const_size;
276 int last_live = bitmap_last_set_bit (live);
277 if (ref->size.is_constant (&const_size))
279 int last_orig = (const_size / BITS_PER_UNIT) - 1;
280 /* We can leave inconvenient amounts on the tail as
281 residual handling in mem* and str* functions is usually
282 reasonably efficient. */
283 *trim_tail = last_orig - last_live;
285 /* But don't trim away out of bounds accesses, as this defeats
286 proper warnings.
288 We could have a type with no TYPE_SIZE_UNIT or we could have a VLA
289 where TYPE_SIZE_UNIT is not a constant. */
290 if (*trim_tail
291 && TYPE_SIZE_UNIT (TREE_TYPE (ref->base))
292 && TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (ref->base))) == INTEGER_CST
293 && compare_tree_int (TYPE_SIZE_UNIT (TREE_TYPE (ref->base)),
294 last_orig) <= 0)
295 *trim_tail = 0;
297 else
298 *trim_tail = 0;
300 /* Identify how much, if any of the head we can chop off. */
301 int first_orig = 0;
302 int first_live = bitmap_first_set_bit (live);
303 *trim_head = first_live - first_orig;
305 /* If more than a word remains, then make sure to keep the
306 starting point at least word aligned. */
307 if (last_live - first_live > UNITS_PER_WORD)
308 *trim_head &= ~(UNITS_PER_WORD - 1);
310 if ((*trim_head || *trim_tail)
311 && dump_file && (dump_flags & TDF_DETAILS))
313 fprintf (dump_file, " Trimming statement (head = %d, tail = %d): ",
314 *trim_head, *trim_tail);
315 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
316 fprintf (dump_file, "\n");
320 /* STMT initializes an object from COMPLEX_CST where one or more of the
321 bytes written may be dead stores. REF is a representation of the
322 memory written. LIVE is the bitmap of stores that are actually live.
324 Attempt to rewrite STMT so that only the real or imaginary part of
325 the object is actually stored. */
327 static void
328 maybe_trim_complex_store (ao_ref *ref, sbitmap live, gimple *stmt)
330 int trim_head, trim_tail;
331 compute_trims (ref, live, &trim_head, &trim_tail, stmt);
333 /* The amount of data trimmed from the head or tail must be at
334 least half the size of the object to ensure we're trimming
335 the entire real or imaginary half. By writing things this
336 way we avoid more O(n) bitmap operations. */
337 if (known_ge (trim_tail * 2 * BITS_PER_UNIT, ref->size))
339 /* TREE_REALPART is live */
340 tree x = TREE_REALPART (gimple_assign_rhs1 (stmt));
341 tree y = gimple_assign_lhs (stmt);
342 y = build1 (REALPART_EXPR, TREE_TYPE (x), y);
343 gimple_assign_set_lhs (stmt, y);
344 gimple_assign_set_rhs1 (stmt, x);
346 else if (known_ge (trim_head * 2 * BITS_PER_UNIT, ref->size))
348 /* TREE_IMAGPART is live */
349 tree x = TREE_IMAGPART (gimple_assign_rhs1 (stmt));
350 tree y = gimple_assign_lhs (stmt);
351 y = build1 (IMAGPART_EXPR, TREE_TYPE (x), y);
352 gimple_assign_set_lhs (stmt, y);
353 gimple_assign_set_rhs1 (stmt, x);
356 /* Other cases indicate parts of both the real and imag subobjects
357 are live. We do not try to optimize those cases. */
360 /* STMT initializes an object using a CONSTRUCTOR where one or more of the
361 bytes written are dead stores. ORIG is the bitmap of bytes stored by
362 STMT. LIVE is the bitmap of stores that are actually live.
364 Attempt to rewrite STMT so that only the real or imaginary part of
365 the object is actually stored.
367 The most common case for getting here is a CONSTRUCTOR with no elements
368 being used to zero initialize an object. We do not try to handle other
369 cases as those would force us to fully cover the object with the
370 CONSTRUCTOR node except for the components that are dead. */
372 static void
373 maybe_trim_constructor_store (ao_ref *ref, sbitmap live, gimple *stmt)
375 tree ctor = gimple_assign_rhs1 (stmt);
377 /* This is the only case we currently handle. It actually seems to
378 catch most cases of actual interest. */
379 gcc_assert (CONSTRUCTOR_NELTS (ctor) == 0);
381 int head_trim = 0;
382 int tail_trim = 0;
383 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
385 /* Now we want to replace the constructor initializer
386 with memset (object + head_trim, 0, size - head_trim - tail_trim). */
387 if (head_trim || tail_trim)
389 /* We want &lhs for the MEM_REF expression. */
390 tree lhs_addr = build_fold_addr_expr (gimple_assign_lhs (stmt));
392 if (! is_gimple_min_invariant (lhs_addr))
393 return;
395 /* The number of bytes for the new constructor. */
396 poly_int64 ref_bytes = exact_div (ref->size, BITS_PER_UNIT);
397 poly_int64 count = ref_bytes - head_trim - tail_trim;
399 /* And the new type for the CONSTRUCTOR. Essentially it's just
400 a char array large enough to cover the non-trimmed parts of
401 the original CONSTRUCTOR. Note we want explicit bounds here
402 so that we know how many bytes to clear when expanding the
403 CONSTRUCTOR. */
404 tree type = build_array_type_nelts (char_type_node, count);
406 /* Build a suitable alias type rather than using alias set zero
407 to avoid pessimizing. */
408 tree alias_type = reference_alias_ptr_type (gimple_assign_lhs (stmt));
410 /* Build a MEM_REF representing the whole accessed area, starting
411 at the first byte not trimmed. */
412 tree exp = fold_build2 (MEM_REF, type, lhs_addr,
413 build_int_cst (alias_type, head_trim));
415 /* Now update STMT with a new RHS and LHS. */
416 gimple_assign_set_lhs (stmt, exp);
417 gimple_assign_set_rhs1 (stmt, build_constructor (type, NULL));
421 /* STMT is a memcpy, memmove or memset. Decrement the number of bytes
422 copied/set by DECREMENT. */
423 static void
424 decrement_count (gimple *stmt, int decrement)
426 tree *countp = gimple_call_arg_ptr (stmt, 2);
427 gcc_assert (TREE_CODE (*countp) == INTEGER_CST);
428 *countp = wide_int_to_tree (TREE_TYPE (*countp), (TREE_INT_CST_LOW (*countp)
429 - decrement));
432 static void
433 increment_start_addr (gimple *stmt, tree *where, int increment)
435 if (tree lhs = gimple_call_lhs (stmt))
436 if (where == gimple_call_arg_ptr (stmt, 0))
438 gassign *newop = gimple_build_assign (lhs, unshare_expr (*where));
439 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
440 gsi_insert_after (&gsi, newop, GSI_SAME_STMT);
441 gimple_call_set_lhs (stmt, NULL_TREE);
442 update_stmt (stmt);
445 if (TREE_CODE (*where) == SSA_NAME)
447 tree tem = make_ssa_name (TREE_TYPE (*where));
448 gassign *newop
449 = gimple_build_assign (tem, POINTER_PLUS_EXPR, *where,
450 build_int_cst (sizetype, increment));
451 gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
452 gsi_insert_before (&gsi, newop, GSI_SAME_STMT);
453 *where = tem;
454 update_stmt (stmt);
455 return;
458 *where = build_fold_addr_expr (fold_build2 (MEM_REF, char_type_node,
459 *where,
460 build_int_cst (ptr_type_node,
461 increment)));
464 /* STMT is builtin call that writes bytes in bitmap ORIG, some bytes are dead
465 (ORIG & ~NEW) and need not be stored. Try to rewrite STMT to reduce
466 the amount of data it actually writes.
468 Right now we only support trimming from the head or the tail of the
469 memory region. In theory we could split the mem* call, but it's
470 likely of marginal value. */
472 static void
473 maybe_trim_memstar_call (ao_ref *ref, sbitmap live, gimple *stmt)
475 int head_trim, tail_trim;
476 switch (DECL_FUNCTION_CODE (gimple_call_fndecl (stmt)))
478 case BUILT_IN_STRNCPY:
479 case BUILT_IN_STRNCPY_CHK:
480 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
481 if (head_trim)
483 /* Head trimming of strncpy is only possible if we can
484 prove all bytes we would trim are non-zero (or we could
485 turn the strncpy into memset if there must be zero
486 among the head trimmed bytes). If we don't know anything
487 about those bytes, the presence or absence of '\0' bytes
488 in there will affect whether it acts for the non-trimmed
489 bytes as memset or memcpy/strncpy. */
490 c_strlen_data lendata = { };
491 int orig_head_trim = head_trim;
492 tree srcstr = gimple_call_arg (stmt, 1);
493 if (!get_range_strlen (srcstr, &lendata, /*eltsize=*/1)
494 || !tree_fits_uhwi_p (lendata.minlen))
495 head_trim = 0;
496 else if (tree_to_uhwi (lendata.minlen) < (unsigned) head_trim)
498 head_trim = tree_to_uhwi (lendata.minlen);
499 if ((orig_head_trim & (UNITS_PER_WORD - 1)) == 0)
500 head_trim &= ~(UNITS_PER_WORD - 1);
502 if (orig_head_trim != head_trim
503 && dump_file
504 && (dump_flags & TDF_DETAILS))
505 fprintf (dump_file,
506 " Adjusting strncpy trimming to (head = %d,"
507 " tail = %d)\n", head_trim, tail_trim);
509 goto do_memcpy;
511 case BUILT_IN_MEMCPY:
512 case BUILT_IN_MEMMOVE:
513 case BUILT_IN_MEMCPY_CHK:
514 case BUILT_IN_MEMMOVE_CHK:
515 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
517 do_memcpy:
518 /* Tail trimming is easy, we can just reduce the count. */
519 if (tail_trim)
520 decrement_count (stmt, tail_trim);
522 /* Head trimming requires adjusting all the arguments. */
523 if (head_trim)
525 /* For __*_chk need to adjust also the last argument. */
526 if (gimple_call_num_args (stmt) == 4)
528 tree size = gimple_call_arg (stmt, 3);
529 if (!tree_fits_uhwi_p (size))
530 break;
531 if (!integer_all_onesp (size))
533 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
534 if (sz < (unsigned) head_trim)
535 break;
536 tree arg = wide_int_to_tree (TREE_TYPE (size),
537 sz - head_trim);
538 gimple_call_set_arg (stmt, 3, arg);
541 tree *dst = gimple_call_arg_ptr (stmt, 0);
542 increment_start_addr (stmt, dst, head_trim);
543 tree *src = gimple_call_arg_ptr (stmt, 1);
544 increment_start_addr (stmt, src, head_trim);
545 decrement_count (stmt, head_trim);
547 break;
549 case BUILT_IN_MEMSET:
550 case BUILT_IN_MEMSET_CHK:
551 compute_trims (ref, live, &head_trim, &tail_trim, stmt);
553 /* Tail trimming is easy, we can just reduce the count. */
554 if (tail_trim)
555 decrement_count (stmt, tail_trim);
557 /* Head trimming requires adjusting all the arguments. */
558 if (head_trim)
560 /* For __*_chk need to adjust also the last argument. */
561 if (gimple_call_num_args (stmt) == 4)
563 tree size = gimple_call_arg (stmt, 3);
564 if (!tree_fits_uhwi_p (size))
565 break;
566 if (!integer_all_onesp (size))
568 unsigned HOST_WIDE_INT sz = tree_to_uhwi (size);
569 if (sz < (unsigned) head_trim)
570 break;
571 tree arg = wide_int_to_tree (TREE_TYPE (size),
572 sz - head_trim);
573 gimple_call_set_arg (stmt, 3, arg);
576 tree *dst = gimple_call_arg_ptr (stmt, 0);
577 increment_start_addr (stmt, dst, head_trim);
578 decrement_count (stmt, head_trim);
580 break;
582 default:
583 break;
587 /* STMT is a memory write where one or more bytes written are dead
588 stores. ORIG is the bitmap of bytes stored by STMT. LIVE is the
589 bitmap of stores that are actually live.
591 Attempt to rewrite STMT so that it writes fewer memory locations. Right
592 now we only support trimming at the start or end of the memory region.
593 It's not clear how much there is to be gained by trimming from the middle
594 of the region. */
596 static void
597 maybe_trim_partially_dead_store (ao_ref *ref, sbitmap live, gimple *stmt)
599 if (is_gimple_assign (stmt)
600 && TREE_CODE (gimple_assign_lhs (stmt)) != TARGET_MEM_REF)
602 switch (gimple_assign_rhs_code (stmt))
604 case CONSTRUCTOR:
605 maybe_trim_constructor_store (ref, live, stmt);
606 break;
607 case COMPLEX_CST:
608 maybe_trim_complex_store (ref, live, stmt);
609 break;
610 default:
611 break;
616 /* Return TRUE if USE_REF reads bytes from LIVE where live is
617 derived from REF, a write reference.
619 While this routine may modify USE_REF, it's passed by value, not
620 location. So callers do not see those modifications. */
622 static bool
623 live_bytes_read (ao_ref use_ref, ao_ref *ref, sbitmap live)
625 /* We have already verified that USE_REF and REF hit the same object.
626 Now verify that there's actually an overlap between USE_REF and REF. */
627 HOST_WIDE_INT start, size;
628 if (normalize_ref (&use_ref, ref)
629 && (use_ref.offset - ref->offset).is_constant (&start)
630 && use_ref.size.is_constant (&size))
632 /* If USE_REF covers all of REF, then it will hit one or more
633 live bytes. This avoids useless iteration over the bitmap
634 below. */
635 if (start == 0 && known_eq (size, ref->size))
636 return true;
638 /* Now check if any of the remaining bits in use_ref are set in LIVE. */
639 return bitmap_bit_in_range_p (live, start / BITS_PER_UNIT,
640 (start + size - 1) / BITS_PER_UNIT);
642 return true;
645 /* Callback for dse_classify_store calling for_each_index. Verify that
646 indices are invariant in the loop with backedge PHI in basic-block DATA. */
648 static bool
649 check_name (tree, tree *idx, void *data)
651 basic_block phi_bb = (basic_block) data;
652 if (TREE_CODE (*idx) == SSA_NAME
653 && !SSA_NAME_IS_DEFAULT_DEF (*idx)
654 && dominated_by_p (CDI_DOMINATORS, gimple_bb (SSA_NAME_DEF_STMT (*idx)),
655 phi_bb))
656 return false;
657 return true;
660 /* STMT stores the value 0 into one or more memory locations
661 (via memset, empty constructor, calloc call, etc).
663 See if there is a subsequent store of the value 0 to one
664 or more of the same memory location(s). If so, the subsequent
665 store is redundant and can be removed.
667 The subsequent stores could be via memset, empty constructors,
668 simple MEM stores, etc. */
670 static void
671 dse_optimize_redundant_stores (gimple *stmt)
673 int cnt = 0;
675 /* TBAA state of STMT, if it is a call it is effectively alias-set zero. */
676 alias_set_type earlier_set = 0;
677 alias_set_type earlier_base_set = 0;
678 if (is_gimple_assign (stmt))
680 ao_ref lhs_ref;
681 ao_ref_init (&lhs_ref, gimple_assign_lhs (stmt));
682 earlier_set = ao_ref_alias_set (&lhs_ref);
683 earlier_base_set = ao_ref_base_alias_set (&lhs_ref);
686 /* We could do something fairly complex and look through PHIs
687 like DSE_CLASSIFY_STORE, but it doesn't seem to be worth
688 the effort.
690 Look at all the immediate uses of the VDEF (which are obviously
691 dominated by STMT). See if one or more stores 0 into the same
692 memory locations a STMT, if so remove the immediate use statements. */
693 tree defvar = gimple_vdef (stmt);
694 imm_use_iterator ui;
695 gimple *use_stmt;
696 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
698 /* Limit stmt walking. */
699 if (++cnt > param_dse_max_alias_queries_per_store)
700 break;
702 /* If USE_STMT stores 0 into one or more of the same locations
703 as STMT and STMT would kill USE_STMT, then we can just remove
704 USE_STMT. */
705 tree fndecl;
706 if ((is_gimple_assign (use_stmt)
707 && gimple_vdef (use_stmt)
708 && (gimple_assign_single_p (use_stmt)
709 && initializer_zerop (gimple_assign_rhs1 (use_stmt))))
710 || (gimple_call_builtin_p (use_stmt, BUILT_IN_NORMAL)
711 && (fndecl = gimple_call_fndecl (use_stmt)) != NULL
712 && (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
713 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
714 && integer_zerop (gimple_call_arg (use_stmt, 1))))
716 ao_ref write;
718 if (!initialize_ao_ref_for_dse (use_stmt, &write))
719 break;
721 if (valid_ao_ref_for_dse (&write)
722 && stmt_kills_ref_p (stmt, &write))
724 gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
725 if (is_gimple_assign (use_stmt))
727 ao_ref lhs_ref;
728 ao_ref_init (&lhs_ref, gimple_assign_lhs (use_stmt));
729 if ((earlier_set == ao_ref_alias_set (&lhs_ref)
730 || alias_set_subset_of (ao_ref_alias_set (&lhs_ref),
731 earlier_set))
732 && (earlier_base_set == ao_ref_base_alias_set (&lhs_ref)
733 || alias_set_subset_of
734 (ao_ref_base_alias_set (&lhs_ref),
735 earlier_base_set)))
736 delete_dead_or_redundant_assignment (&gsi, "redundant",
737 need_eh_cleanup);
739 else if (is_gimple_call (use_stmt))
741 if ((earlier_set == 0
742 || alias_set_subset_of (0, earlier_set))
743 && (earlier_base_set == 0
744 || alias_set_subset_of (0, earlier_base_set)))
745 delete_dead_or_redundant_call (&gsi, "redundant");
747 else
748 gcc_unreachable ();
754 /* A helper of dse_optimize_stmt.
755 Given a GIMPLE_ASSIGN in STMT that writes to REF, classify it
756 according to downstream uses and defs. Sets *BY_CLOBBER_P to true
757 if only clobber statements influenced the classification result.
758 Returns the classification. */
760 dse_store_status
761 dse_classify_store (ao_ref *ref, gimple *stmt,
762 bool byte_tracking_enabled, sbitmap live_bytes,
763 bool *by_clobber_p, tree stop_at_vuse)
765 gimple *temp;
766 int cnt = 0;
767 auto_bitmap visited;
769 if (by_clobber_p)
770 *by_clobber_p = true;
772 /* Find the first dominated statement that clobbers (part of) the
773 memory stmt stores to with no intermediate statement that may use
774 part of the memory stmt stores. That is, find a store that may
775 prove stmt to be a dead store. */
776 temp = stmt;
779 gimple *use_stmt;
780 imm_use_iterator ui;
781 bool fail = false;
782 tree defvar;
784 if (gimple_code (temp) == GIMPLE_PHI)
786 /* If we visit this PHI by following a backedge then we have to
787 make sure ref->ref only refers to SSA names that are invariant
788 with respect to the loop represented by this PHI node. */
789 if (dominated_by_p (CDI_DOMINATORS, gimple_bb (stmt),
790 gimple_bb (temp))
791 && !for_each_index (ref->ref ? &ref->ref : &ref->base,
792 check_name, gimple_bb (temp)))
793 return DSE_STORE_LIVE;
794 defvar = PHI_RESULT (temp);
795 bitmap_set_bit (visited, SSA_NAME_VERSION (defvar));
797 else
798 defvar = gimple_vdef (temp);
800 /* If we're instructed to stop walking at region boundary, do so. */
801 if (defvar == stop_at_vuse)
802 return DSE_STORE_LIVE;
804 auto_vec<gimple *, 10> defs;
805 gimple *first_phi_def = NULL;
806 gimple *last_phi_def = NULL;
807 FOR_EACH_IMM_USE_STMT (use_stmt, ui, defvar)
809 /* Limit stmt walking. */
810 if (++cnt > param_dse_max_alias_queries_per_store)
812 fail = true;
813 break;
816 /* In simple cases we can look through PHI nodes, but we
817 have to be careful with loops and with memory references
818 containing operands that are also operands of PHI nodes.
819 See gcc.c-torture/execute/20051110-*.c. */
820 if (gimple_code (use_stmt) == GIMPLE_PHI)
822 /* If we already visited this PHI ignore it for further
823 processing. */
824 if (!bitmap_bit_p (visited,
825 SSA_NAME_VERSION (PHI_RESULT (use_stmt))))
827 defs.safe_push (use_stmt);
828 if (!first_phi_def)
829 first_phi_def = use_stmt;
830 last_phi_def = use_stmt;
833 /* If the statement is a use the store is not dead. */
834 else if (ref_maybe_used_by_stmt_p (use_stmt, ref))
836 /* Handle common cases where we can easily build an ao_ref
837 structure for USE_STMT and in doing so we find that the
838 references hit non-live bytes and thus can be ignored. */
839 if (byte_tracking_enabled
840 && is_gimple_assign (use_stmt))
842 ao_ref use_ref;
843 ao_ref_init (&use_ref, gimple_assign_rhs1 (use_stmt));
844 if (valid_ao_ref_for_dse (&use_ref)
845 && use_ref.base == ref->base
846 && known_eq (use_ref.size, use_ref.max_size)
847 && !live_bytes_read (use_ref, ref, live_bytes))
849 /* If this is a store, remember it as we possibly
850 need to walk the defs uses. */
851 if (gimple_vdef (use_stmt))
852 defs.safe_push (use_stmt);
853 continue;
857 fail = true;
858 break;
860 /* We have visited ourselves already so ignore STMT for the
861 purpose of chaining. */
862 else if (use_stmt == stmt)
864 /* If this is a store, remember it as we possibly need to walk the
865 defs uses. */
866 else if (gimple_vdef (use_stmt))
867 defs.safe_push (use_stmt);
870 if (fail)
872 /* STMT might be partially dead and we may be able to reduce
873 how many memory locations it stores into. */
874 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
875 return DSE_STORE_MAYBE_PARTIAL_DEAD;
876 return DSE_STORE_LIVE;
879 /* If we didn't find any definition this means the store is dead
880 if it isn't a store to global reachable memory. In this case
881 just pretend the stmt makes itself dead. Otherwise fail. */
882 if (defs.is_empty ())
884 if (ref_may_alias_global_p (ref))
885 return DSE_STORE_LIVE;
887 if (by_clobber_p)
888 *by_clobber_p = false;
889 return DSE_STORE_DEAD;
892 /* Process defs and remove those we need not process further. */
893 for (unsigned i = 0; i < defs.length ();)
895 gimple *def = defs[i];
896 gimple *use_stmt;
897 use_operand_p use_p;
898 tree vdef = (gimple_code (def) == GIMPLE_PHI
899 ? gimple_phi_result (def) : gimple_vdef (def));
900 /* If the path to check starts with a kill we do not need to
901 process it further.
902 ??? With byte tracking we need only kill the bytes currently
903 live. */
904 if (stmt_kills_ref_p (def, ref))
906 if (by_clobber_p && !gimple_clobber_p (def))
907 *by_clobber_p = false;
908 defs.unordered_remove (i);
910 /* If the path ends here we do not need to process it further.
911 This for example happens with calls to noreturn functions. */
912 else if (has_zero_uses (vdef))
914 /* But if the store is to global memory it is definitely
915 not dead. */
916 if (ref_may_alias_global_p (ref))
917 return DSE_STORE_LIVE;
918 defs.unordered_remove (i);
920 /* In addition to kills we can remove defs whose only use
921 is another def in defs. That can only ever be PHIs of which
922 we track two for simplicity reasons, the first and last in
923 {first,last}_phi_def (we fail for multiple PHIs anyways).
924 We can also ignore defs that feed only into
925 already visited PHIs. */
926 else if (single_imm_use (vdef, &use_p, &use_stmt)
927 && (use_stmt == first_phi_def
928 || use_stmt == last_phi_def
929 || (gimple_code (use_stmt) == GIMPLE_PHI
930 && bitmap_bit_p (visited,
931 SSA_NAME_VERSION
932 (PHI_RESULT (use_stmt))))))
933 defs.unordered_remove (i);
934 else
935 ++i;
938 /* If all defs kill the ref we are done. */
939 if (defs.is_empty ())
940 return DSE_STORE_DEAD;
941 /* If more than one def survives fail. */
942 if (defs.length () > 1)
944 /* STMT might be partially dead and we may be able to reduce
945 how many memory locations it stores into. */
946 if (byte_tracking_enabled && !gimple_clobber_p (stmt))
947 return DSE_STORE_MAYBE_PARTIAL_DEAD;
948 return DSE_STORE_LIVE;
950 temp = defs[0];
952 /* Track partial kills. */
953 if (byte_tracking_enabled)
955 clear_bytes_written_by (live_bytes, temp, ref);
956 if (bitmap_empty_p (live_bytes))
958 if (by_clobber_p && !gimple_clobber_p (temp))
959 *by_clobber_p = false;
960 return DSE_STORE_DEAD;
964 /* Continue walking until there are no more live bytes. */
965 while (1);
969 /* Delete a dead call at GSI, which is mem* call of some kind. */
970 static void
971 delete_dead_or_redundant_call (gimple_stmt_iterator *gsi, const char *type)
973 gimple *stmt = gsi_stmt (*gsi);
974 if (dump_file && (dump_flags & TDF_DETAILS))
976 fprintf (dump_file, " Deleted %s call: ", type);
977 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
978 fprintf (dump_file, "\n");
981 basic_block bb = gimple_bb (stmt);
982 tree lhs = gimple_call_lhs (stmt);
983 if (lhs)
985 tree ptr = gimple_call_arg (stmt, 0);
986 gimple *new_stmt = gimple_build_assign (lhs, ptr);
987 unlink_stmt_vdef (stmt);
988 if (gsi_replace (gsi, new_stmt, true))
989 bitmap_set_bit (need_eh_cleanup, bb->index);
991 else
993 /* Then we need to fix the operand of the consuming stmt. */
994 unlink_stmt_vdef (stmt);
996 /* Remove the dead store. */
997 if (gsi_remove (gsi, true))
998 bitmap_set_bit (need_eh_cleanup, bb->index);
999 release_defs (stmt);
1003 /* Delete a dead store at GSI, which is a gimple assignment. */
1005 void
1006 delete_dead_or_redundant_assignment (gimple_stmt_iterator *gsi, const char *type,
1007 bitmap need_eh_cleanup)
1009 gimple *stmt = gsi_stmt (*gsi);
1010 if (dump_file && (dump_flags & TDF_DETAILS))
1012 fprintf (dump_file, " Deleted %s store: ", type);
1013 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1014 fprintf (dump_file, "\n");
1017 /* Then we need to fix the operand of the consuming stmt. */
1018 unlink_stmt_vdef (stmt);
1020 /* Remove the dead store. */
1021 basic_block bb = gimple_bb (stmt);
1022 if (gsi_remove (gsi, true) && need_eh_cleanup)
1023 bitmap_set_bit (need_eh_cleanup, bb->index);
1025 /* And release any SSA_NAMEs set in this statement back to the
1026 SSA_NAME manager. */
1027 release_defs (stmt);
1030 /* Attempt to eliminate dead stores in the statement referenced by BSI.
1032 A dead store is a store into a memory location which will later be
1033 overwritten by another store without any intervening loads. In this
1034 case the earlier store can be deleted.
1036 In our SSA + virtual operand world we use immediate uses of virtual
1037 operands to detect dead stores. If a store's virtual definition
1038 is used precisely once by a later store to the same location which
1039 post dominates the first store, then the first store is dead. */
1041 static void
1042 dse_optimize_stmt (function *fun, gimple_stmt_iterator *gsi, sbitmap live_bytes)
1044 gimple *stmt = gsi_stmt (*gsi);
1046 /* Don't return early on *this_2(D) ={v} {CLOBBER}. */
1047 if (gimple_has_volatile_ops (stmt)
1048 && (!gimple_clobber_p (stmt)
1049 || TREE_CODE (gimple_assign_lhs (stmt)) != MEM_REF))
1050 return;
1052 ao_ref ref;
1053 if (!initialize_ao_ref_for_dse (stmt, &ref))
1054 return;
1056 /* We know we have virtual definitions. We can handle assignments and
1057 some builtin calls. */
1058 if (gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
1060 tree fndecl = gimple_call_fndecl (stmt);
1061 switch (DECL_FUNCTION_CODE (fndecl))
1063 case BUILT_IN_MEMCPY:
1064 case BUILT_IN_MEMMOVE:
1065 case BUILT_IN_STRNCPY:
1066 case BUILT_IN_MEMSET:
1067 case BUILT_IN_MEMCPY_CHK:
1068 case BUILT_IN_MEMMOVE_CHK:
1069 case BUILT_IN_STRNCPY_CHK:
1070 case BUILT_IN_MEMSET_CHK:
1072 /* Occasionally calls with an explicit length of zero
1073 show up in the IL. It's pointless to do analysis
1074 on them, they're trivially dead. */
1075 tree size = gimple_call_arg (stmt, 2);
1076 if (integer_zerop (size))
1078 delete_dead_or_redundant_call (gsi, "dead");
1079 return;
1082 /* If this is a memset call that initializes an object
1083 to zero, it may be redundant with an earlier memset
1084 or empty CONSTRUCTOR of a larger object. */
1085 if ((DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET
1086 || DECL_FUNCTION_CODE (fndecl) == BUILT_IN_MEMSET_CHK)
1087 && integer_zerop (gimple_call_arg (stmt, 1)))
1088 dse_optimize_redundant_stores (stmt);
1090 enum dse_store_status store_status;
1091 bool byte_tracking_enabled
1092 = setup_live_bytes_from_ref (&ref, live_bytes);
1093 store_status = dse_classify_store (&ref, stmt,
1094 byte_tracking_enabled,
1095 live_bytes);
1096 if (store_status == DSE_STORE_LIVE)
1097 return;
1099 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1101 maybe_trim_memstar_call (&ref, live_bytes, stmt);
1102 return;
1105 if (store_status == DSE_STORE_DEAD)
1106 delete_dead_or_redundant_call (gsi, "dead");
1107 return;
1110 case BUILT_IN_CALLOC:
1111 /* We already know the arguments are integer constants. */
1112 dse_optimize_redundant_stores (stmt);
1113 return;
1115 default:
1116 return;
1120 bool by_clobber_p = false;
1122 /* Check if this statement stores zero to a memory location,
1123 and if there is a subsequent store of zero to the same
1124 memory location. If so, remove the subsequent store. */
1125 if (gimple_assign_single_p (stmt)
1126 && initializer_zerop (gimple_assign_rhs1 (stmt)))
1127 dse_optimize_redundant_stores (stmt);
1129 /* Self-assignments are zombies. */
1130 if (is_gimple_assign (stmt)
1131 && operand_equal_p (gimple_assign_rhs1 (stmt),
1132 gimple_assign_lhs (stmt), 0))
1134 else
1136 bool byte_tracking_enabled
1137 = setup_live_bytes_from_ref (&ref, live_bytes);
1138 enum dse_store_status store_status;
1139 store_status = dse_classify_store (&ref, stmt,
1140 byte_tracking_enabled,
1141 live_bytes, &by_clobber_p);
1142 if (store_status == DSE_STORE_LIVE)
1143 return;
1145 if (store_status == DSE_STORE_MAYBE_PARTIAL_DEAD)
1147 maybe_trim_partially_dead_store (&ref, live_bytes, stmt);
1148 return;
1152 /* Now we know that use_stmt kills the LHS of stmt. */
1154 /* But only remove *this_2(D) ={v} {CLOBBER} if killed by
1155 another clobber stmt. */
1156 if (gimple_clobber_p (stmt)
1157 && !by_clobber_p)
1158 return;
1160 if (is_gimple_call (stmt)
1161 && (gimple_has_side_effects (stmt)
1162 || (stmt_could_throw_p (fun, stmt)
1163 && !fun->can_delete_dead_exceptions)))
1165 /* Make sure we do not remove a return slot we cannot reconstruct
1166 later. */
1167 if (gimple_call_return_slot_opt_p (as_a <gcall *>(stmt))
1168 && (TREE_ADDRESSABLE (TREE_TYPE (gimple_call_fntype (stmt)))
1169 || !poly_int_tree_p
1170 (TYPE_SIZE (TREE_TYPE (gimple_call_fntype (stmt))))))
1171 return;
1172 if (dump_file && (dump_flags & TDF_DETAILS))
1174 fprintf (dump_file, " Deleted dead store in call LHS: ");
1175 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1176 fprintf (dump_file, "\n");
1178 gimple_call_set_lhs (stmt, NULL_TREE);
1179 update_stmt (stmt);
1181 else
1182 delete_dead_or_redundant_assignment (gsi, "dead", need_eh_cleanup);
1185 namespace {
1187 const pass_data pass_data_dse =
1189 GIMPLE_PASS, /* type */
1190 "dse", /* name */
1191 OPTGROUP_NONE, /* optinfo_flags */
1192 TV_TREE_DSE, /* tv_id */
1193 ( PROP_cfg | PROP_ssa ), /* properties_required */
1194 0, /* properties_provided */
1195 0, /* properties_destroyed */
1196 0, /* todo_flags_start */
1197 0, /* todo_flags_finish */
1200 class pass_dse : public gimple_opt_pass
1202 public:
1203 pass_dse (gcc::context *ctxt)
1204 : gimple_opt_pass (pass_data_dse, ctxt)
1207 /* opt_pass methods: */
1208 opt_pass * clone () { return new pass_dse (m_ctxt); }
1209 virtual bool gate (function *) { return flag_tree_dse != 0; }
1210 virtual unsigned int execute (function *);
1212 }; // class pass_dse
1214 unsigned int
1215 pass_dse::execute (function *fun)
1217 unsigned todo = 0;
1218 need_eh_cleanup = BITMAP_ALLOC (NULL);
1219 auto_sbitmap live_bytes (param_dse_max_object_size);
1221 renumber_gimple_stmt_uids (fun);
1223 calculate_dominance_info (CDI_DOMINATORS);
1225 /* Dead store elimination is fundamentally a reverse program order walk. */
1226 int *rpo = XNEWVEC (int, n_basic_blocks_for_fn (fun) - NUM_FIXED_BLOCKS);
1227 int n = pre_and_rev_post_order_compute_fn (fun, NULL, rpo, false);
1228 for (int i = n; i != 0; --i)
1230 basic_block bb = BASIC_BLOCK_FOR_FN (fun, rpo[i-1]);
1231 gimple_stmt_iterator gsi;
1233 for (gsi = gsi_last_bb (bb); !gsi_end_p (gsi);)
1235 gimple *stmt = gsi_stmt (gsi);
1237 if (gimple_vdef (stmt))
1238 dse_optimize_stmt (fun, &gsi, live_bytes);
1239 else if (def_operand_p
1240 def_p = single_ssa_def_operand (stmt, SSA_OP_DEF))
1242 /* When we remove dead stores make sure to also delete trivially
1243 dead SSA defs. */
1244 if (has_zero_uses (DEF_FROM_PTR (def_p))
1245 && !gimple_has_side_effects (stmt)
1246 && !is_ctrl_altering_stmt (stmt)
1247 && (!stmt_could_throw_p (fun, stmt)
1248 || fun->can_delete_dead_exceptions))
1250 if (dump_file && (dump_flags & TDF_DETAILS))
1252 fprintf (dump_file, " Deleted trivially dead stmt: ");
1253 print_gimple_stmt (dump_file, stmt, 0, dump_flags);
1254 fprintf (dump_file, "\n");
1256 if (gsi_remove (&gsi, true) && need_eh_cleanup)
1257 bitmap_set_bit (need_eh_cleanup, bb->index);
1258 release_defs (stmt);
1261 if (gsi_end_p (gsi))
1262 gsi = gsi_last_bb (bb);
1263 else
1264 gsi_prev (&gsi);
1266 bool removed_phi = false;
1267 for (gphi_iterator si = gsi_start_phis (bb); !gsi_end_p (si);)
1269 gphi *phi = si.phi ();
1270 if (has_zero_uses (gimple_phi_result (phi)))
1272 if (dump_file && (dump_flags & TDF_DETAILS))
1274 fprintf (dump_file, " Deleted trivially dead PHI: ");
1275 print_gimple_stmt (dump_file, phi, 0, dump_flags);
1276 fprintf (dump_file, "\n");
1278 remove_phi_node (&si, true);
1279 removed_phi = true;
1281 else
1282 gsi_next (&si);
1284 if (removed_phi && gimple_seq_empty_p (phi_nodes (bb)))
1285 todo |= TODO_cleanup_cfg;
1287 free (rpo);
1289 /* Removal of stores may make some EH edges dead. Purge such edges from
1290 the CFG as needed. */
1291 if (!bitmap_empty_p (need_eh_cleanup))
1293 gimple_purge_all_dead_eh_edges (need_eh_cleanup);
1294 todo |= TODO_cleanup_cfg;
1297 BITMAP_FREE (need_eh_cleanup);
1299 return todo;
1302 } // anon namespace
1304 gimple_opt_pass *
1305 make_pass_dse (gcc::context *ctxt)
1307 return new pass_dse (ctxt);