* fold-const.c (c_getstr): Clamp STRING_LENGTH to STRING_SIZE.
[official-gcc.git] / gcc / postreload-gcse.c
blobafa61dcede62b2f37fb851becde21a51774e965c
1 /* Post reload partially redundant load elimination
2 Copyright (C) 2004-2018 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "target.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "predict.h"
28 #include "df.h"
29 #include "memmodel.h"
30 #include "tm_p.h"
31 #include "insn-config.h"
32 #include "emit-rtl.h"
33 #include "recog.h"
35 #include "cfgrtl.h"
36 #include "profile.h"
37 #include "expr.h"
38 #include "params.h"
39 #include "tree-pass.h"
40 #include "dbgcnt.h"
41 #include "gcse-common.h"
43 /* The following code implements gcse after reload, the purpose of this
44 pass is to cleanup redundant loads generated by reload and other
45 optimizations that come after gcse. It searches for simple inter-block
46 redundancies and tries to eliminate them by adding moves and loads
47 in cold places.
49 Perform partially redundant load elimination, try to eliminate redundant
50 loads created by the reload pass. We try to look for full or partial
51 redundant loads fed by one or more loads/stores in predecessor BBs,
52 and try adding loads to make them fully redundant. We also check if
53 it's worth adding loads to be able to delete the redundant load.
55 Algorithm:
56 1. Build available expressions hash table:
57 For each load/store instruction, if the loaded/stored memory didn't
58 change until the end of the basic block add this memory expression to
59 the hash table.
60 2. Perform Redundancy elimination:
61 For each load instruction do the following:
62 perform partial redundancy elimination, check if it's worth adding
63 loads to make the load fully redundant. If so add loads and
64 register copies and delete the load.
65 3. Delete instructions made redundant in step 2.
67 Future enhancement:
68 If the loaded register is used/defined between load and some store,
69 look for some other free register between load and all its stores,
70 and replace the load with a copy from this register to the loaded
71 register.
75 /* Keep statistics of this pass. */
76 static struct
78 int moves_inserted;
79 int copies_inserted;
80 int insns_deleted;
81 } stats;
83 /* We need to keep a hash table of expressions. The table entries are of
84 type 'struct expr', and for each expression there is a single linked
85 list of occurrences. */
87 /* Expression elements in the hash table. */
88 struct expr
90 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
91 rtx expr;
93 /* The same hash for this entry. */
94 hashval_t hash;
96 /* Index in the transparent bitmaps. */
97 unsigned int bitmap_index;
99 /* List of available occurrence in basic blocks in the function. */
100 struct occr *avail_occr;
103 /* Hashtable helpers. */
105 struct expr_hasher : nofree_ptr_hash <expr>
107 static inline hashval_t hash (const expr *);
108 static inline bool equal (const expr *, const expr *);
112 /* Hash expression X.
113 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
114 or if the expression contains something we don't want to insert in the
115 table. */
117 static hashval_t
118 hash_expr (rtx x, int *do_not_record_p)
120 *do_not_record_p = 0;
121 return hash_rtx (x, GET_MODE (x), do_not_record_p,
122 NULL, /*have_reg_qty=*/false);
125 /* Callback for hashtab.
126 Return the hash value for expression EXP. We don't actually hash
127 here, we just return the cached hash value. */
129 inline hashval_t
130 expr_hasher::hash (const expr *exp)
132 return exp->hash;
135 /* Callback for hashtab.
136 Return nonzero if exp1 is equivalent to exp2. */
138 inline bool
139 expr_hasher::equal (const expr *exp1, const expr *exp2)
141 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
143 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
144 return equiv_p;
147 /* The table itself. */
148 static hash_table<expr_hasher> *expr_table;
151 static struct obstack expr_obstack;
153 /* Occurrence of an expression.
154 There is at most one occurrence per basic block. If a pattern appears
155 more than once, the last appearance is used. */
157 struct occr
159 /* Next occurrence of this expression. */
160 struct occr *next;
161 /* The insn that computes the expression. */
162 rtx_insn *insn;
163 /* Nonzero if this [anticipatable] occurrence has been deleted. */
164 char deleted_p;
167 static struct obstack occr_obstack;
169 /* The following structure holds the information about the occurrences of
170 the redundant instructions. */
171 struct unoccr
173 struct unoccr *next;
174 edge pred;
175 rtx_insn *insn;
178 static struct obstack unoccr_obstack;
180 /* Array where each element is the CUID if the insn that last set the hard
181 register with the number of the element, since the start of the current
182 basic block.
184 This array is used during the building of the hash table (step 1) to
185 determine if a reg is killed before the end of a basic block.
187 It is also used when eliminating partial redundancies (step 2) to see
188 if a reg was modified since the start of a basic block. */
189 static int *reg_avail_info;
191 /* A list of insns that may modify memory within the current basic block. */
192 struct modifies_mem
194 rtx_insn *insn;
195 struct modifies_mem *next;
197 static struct modifies_mem *modifies_mem_list;
199 /* The modifies_mem structs also go on an obstack, only this obstack is
200 freed each time after completing the analysis or transformations on
201 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
202 object on the obstack to keep track of the bottom of the obstack. */
203 static struct obstack modifies_mem_obstack;
204 static struct modifies_mem *modifies_mem_obstack_bottom;
206 /* Mapping of insn UIDs to CUIDs.
207 CUIDs are like UIDs except they increase monotonically in each basic
208 block, have no gaps, and only apply to real insns. */
209 static int *uid_cuid;
210 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
212 /* Bitmap of blocks which have memory stores. */
213 static bitmap modify_mem_list_set;
215 /* Bitmap of blocks which have calls. */
216 static bitmap blocks_with_calls;
218 /* Vector indexed by block # with a list of all the insns that
219 modify memory within the block. */
220 static vec<rtx_insn *> *modify_mem_list;
222 /* Vector indexed by block # with a canonicalized list of insns
223 that modify memory in the block. */
224 static vec<modify_pair> *canon_modify_mem_list;
226 /* Vector of simple bitmaps indexed by block number. Each component sbitmap
227 indicates which expressions are transparent through the block. */
228 static sbitmap *transp;
231 /* Helpers for memory allocation/freeing. */
232 static void alloc_mem (void);
233 static void free_mem (void);
235 /* Support for hash table construction and transformations. */
236 static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
237 static void record_last_reg_set_info (rtx_insn *, rtx);
238 static void record_last_reg_set_info_regno (rtx_insn *, int);
239 static void record_last_mem_set_info (rtx_insn *);
240 static void record_last_set_info (rtx, const_rtx, void *);
241 static void record_opr_changes (rtx_insn *);
243 static void find_mem_conflicts (rtx, const_rtx, void *);
244 static int load_killed_in_block_p (int, rtx, bool);
245 static void reset_opr_set_tables (void);
247 /* Hash table support. */
248 static hashval_t hash_expr (rtx, int *);
249 static void insert_expr_in_table (rtx, rtx_insn *);
250 static struct expr *lookup_expr_in_table (rtx);
251 static void dump_hash_table (FILE *);
253 /* Helpers for eliminate_partially_redundant_load. */
254 static bool reg_killed_on_edge (rtx, edge);
255 static bool reg_used_on_edge (rtx, edge);
257 static rtx get_avail_load_store_reg (rtx_insn *);
259 static bool bb_has_well_behaved_predecessors (basic_block);
260 static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
261 static void hash_scan_set (rtx_insn *);
262 static void compute_hash_table (void);
264 /* The work horses of this pass. */
265 static void eliminate_partially_redundant_load (basic_block,
266 rtx_insn *,
267 struct expr *);
268 static void eliminate_partially_redundant_loads (void);
271 /* Allocate memory for the CUID mapping array and register/memory
272 tracking tables. */
274 static void
275 alloc_mem (void)
277 int i;
278 basic_block bb;
279 rtx_insn *insn;
281 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
282 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
283 i = 1;
284 FOR_EACH_BB_FN (bb, cfun)
285 FOR_BB_INSNS (bb, insn)
287 if (INSN_P (insn))
288 uid_cuid[INSN_UID (insn)] = i++;
289 else
290 uid_cuid[INSN_UID (insn)] = i;
293 /* Allocate the available expressions hash table. We don't want to
294 make the hash table too small, but unnecessarily making it too large
295 also doesn't help. The i/4 is a gcse.c relic, and seems like a
296 reasonable choice. */
297 expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
299 /* We allocate everything on obstacks because we often can roll back
300 the whole obstack to some point. Freeing obstacks is very fast. */
301 gcc_obstack_init (&expr_obstack);
302 gcc_obstack_init (&occr_obstack);
303 gcc_obstack_init (&unoccr_obstack);
304 gcc_obstack_init (&modifies_mem_obstack);
306 /* Working array used to track the last set for each register
307 in the current block. */
308 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
310 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
311 can roll it back in reset_opr_set_tables. */
312 modifies_mem_obstack_bottom =
313 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
314 sizeof (struct modifies_mem));
316 blocks_with_calls = BITMAP_ALLOC (NULL);
317 modify_mem_list_set = BITMAP_ALLOC (NULL);
319 modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
320 sizeof (vec_rtx_heap));
321 canon_modify_mem_list
322 = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
323 sizeof (vec_modify_pair_heap));
326 /* Free memory allocated by alloc_mem. */
328 static void
329 free_mem (void)
331 free (uid_cuid);
333 delete expr_table;
334 expr_table = NULL;
336 obstack_free (&expr_obstack, NULL);
337 obstack_free (&occr_obstack, NULL);
338 obstack_free (&unoccr_obstack, NULL);
339 obstack_free (&modifies_mem_obstack, NULL);
341 unsigned i;
342 bitmap_iterator bi;
343 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
345 modify_mem_list[i].release ();
346 canon_modify_mem_list[i].release ();
349 BITMAP_FREE (blocks_with_calls);
350 BITMAP_FREE (modify_mem_list_set);
351 free (reg_avail_info);
352 free (modify_mem_list);
353 free (canon_modify_mem_list);
357 /* Insert expression X in INSN in the hash TABLE.
358 If it is already present, record it as the last occurrence in INSN's
359 basic block. */
361 static void
362 insert_expr_in_table (rtx x, rtx_insn *insn)
364 int do_not_record_p;
365 hashval_t hash;
366 struct expr *cur_expr, **slot;
367 struct occr *avail_occr, *last_occr = NULL;
369 hash = hash_expr (x, &do_not_record_p);
371 /* Do not insert expression in the table if it contains volatile operands,
372 or if hash_expr determines the expression is something we don't want
373 to or can't handle. */
374 if (do_not_record_p)
375 return;
377 /* We anticipate that redundant expressions are rare, so for convenience
378 allocate a new hash table element here already and set its fields.
379 If we don't do this, we need a hack with a static struct expr. Anyway,
380 obstack_free is really fast and one more obstack_alloc doesn't hurt if
381 we're going to see more expressions later on. */
382 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
383 sizeof (struct expr));
384 cur_expr->expr = x;
385 cur_expr->hash = hash;
386 cur_expr->avail_occr = NULL;
388 slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
390 if (! (*slot))
392 /* The expression isn't found, so insert it. */
393 *slot = cur_expr;
395 /* Anytime we add an entry to the table, record the index
396 of the new entry. The bitmap index starts counting
397 at zero. */
398 cur_expr->bitmap_index = expr_table->elements () - 1;
400 else
402 /* The expression is already in the table, so roll back the
403 obstack and use the existing table entry. */
404 obstack_free (&expr_obstack, cur_expr);
405 cur_expr = *slot;
408 /* Search for another occurrence in the same basic block. */
409 avail_occr = cur_expr->avail_occr;
410 while (avail_occr
411 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
413 /* If an occurrence isn't found, save a pointer to the end of
414 the list. */
415 last_occr = avail_occr;
416 avail_occr = avail_occr->next;
419 if (avail_occr)
420 /* Found another instance of the expression in the same basic block.
421 Prefer this occurrence to the currently recorded one. We want
422 the last one in the block and the block is scanned from start
423 to end. */
424 avail_occr->insn = insn;
425 else
427 /* First occurrence of this expression in this basic block. */
428 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
429 sizeof (struct occr));
431 /* First occurrence of this expression in any block? */
432 if (cur_expr->avail_occr == NULL)
433 cur_expr->avail_occr = avail_occr;
434 else
435 last_occr->next = avail_occr;
437 avail_occr->insn = insn;
438 avail_occr->next = NULL;
439 avail_occr->deleted_p = 0;
444 /* Lookup pattern PAT in the expression hash table.
445 The result is a pointer to the table entry, or NULL if not found. */
447 static struct expr *
448 lookup_expr_in_table (rtx pat)
450 int do_not_record_p;
451 struct expr **slot, *tmp_expr;
452 hashval_t hash = hash_expr (pat, &do_not_record_p);
454 if (do_not_record_p)
455 return NULL;
457 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
458 sizeof (struct expr));
459 tmp_expr->expr = pat;
460 tmp_expr->hash = hash;
461 tmp_expr->avail_occr = NULL;
463 slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
464 obstack_free (&expr_obstack, tmp_expr);
466 if (!slot)
467 return NULL;
468 else
469 return (*slot);
473 /* Dump all expressions and occurrences that are currently in the
474 expression hash table to FILE. */
476 /* This helper is called via htab_traverse. */
478 dump_expr_hash_table_entry (expr **slot, FILE *file)
480 struct expr *exprs = *slot;
481 struct occr *occr;
483 fprintf (file, "expr: ");
484 print_rtl (file, exprs->expr);
485 fprintf (file,"\nhashcode: %u\n", exprs->hash);
486 fprintf (file,"list of occurrences:\n");
487 occr = exprs->avail_occr;
488 while (occr)
490 rtx_insn *insn = occr->insn;
491 print_rtl_single (file, insn);
492 fprintf (file, "\n");
493 occr = occr->next;
495 fprintf (file, "\n");
496 return 1;
499 static void
500 dump_hash_table (FILE *file)
502 fprintf (file, "\n\nexpression hash table\n");
503 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
504 (long) expr_table->size (),
505 (long) expr_table->elements (),
506 expr_table->collisions ());
507 if (expr_table->elements () > 0)
509 fprintf (file, "\n\ntable entries:\n");
510 expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
512 fprintf (file, "\n");
515 /* Return true if register X is recorded as being set by an instruction
516 whose CUID is greater than the one given. */
518 static bool
519 reg_changed_after_insn_p (rtx x, int cuid)
521 unsigned int regno, end_regno;
523 regno = REGNO (x);
524 end_regno = END_REGNO (x);
526 if (reg_avail_info[regno] > cuid)
527 return true;
528 while (++regno < end_regno);
529 return false;
532 /* Return nonzero if the operands of expression X are unchanged
533 1) from the start of INSN's basic block up to but not including INSN
534 if AFTER_INSN is false, or
535 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
537 static bool
538 oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
540 int i, j;
541 enum rtx_code code;
542 const char *fmt;
544 if (x == 0)
545 return 1;
547 code = GET_CODE (x);
548 switch (code)
550 case REG:
551 /* We are called after register allocation. */
552 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
553 if (after_insn)
554 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
555 else
556 return !reg_changed_after_insn_p (x, 0);
558 case MEM:
559 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
560 return 0;
561 else
562 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
564 case PC:
565 case CC0: /*FIXME*/
566 case CONST:
567 CASE_CONST_ANY:
568 case SYMBOL_REF:
569 case LABEL_REF:
570 case ADDR_VEC:
571 case ADDR_DIFF_VEC:
572 return 1;
574 case PRE_DEC:
575 case PRE_INC:
576 case POST_DEC:
577 case POST_INC:
578 case PRE_MODIFY:
579 case POST_MODIFY:
580 if (after_insn)
581 return 0;
582 break;
584 default:
585 break;
588 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
590 if (fmt[i] == 'e')
592 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
593 return 0;
595 else if (fmt[i] == 'E')
596 for (j = 0; j < XVECLEN (x, i); j++)
597 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
598 return 0;
601 return 1;
605 /* Used for communication between find_mem_conflicts and
606 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
607 conflict between two memory references.
608 This is a bit of a hack to work around the limitations of note_stores. */
609 static int mems_conflict_p;
611 /* DEST is the output of an instruction. If it is a memory reference, and
612 possibly conflicts with the load found in DATA, then set mems_conflict_p
613 to a nonzero value. */
615 static void
616 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
617 void *data)
619 rtx mem_op = (rtx) data;
621 while (GET_CODE (dest) == SUBREG
622 || GET_CODE (dest) == ZERO_EXTRACT
623 || GET_CODE (dest) == STRICT_LOW_PART)
624 dest = XEXP (dest, 0);
626 /* If DEST is not a MEM, then it will not conflict with the load. Note
627 that function calls are assumed to clobber memory, but are handled
628 elsewhere. */
629 if (! MEM_P (dest))
630 return;
632 if (true_dependence (dest, GET_MODE (dest), mem_op))
633 mems_conflict_p = 1;
637 /* Return nonzero if the expression in X (a memory reference) is killed
638 in the current basic block before (if AFTER_INSN is false) or after
639 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
641 This function assumes that the modifies_mem table is flushed when
642 the hash table construction or redundancy elimination phases start
643 processing a new basic block. */
645 static int
646 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
648 struct modifies_mem *list_entry = modifies_mem_list;
650 while (list_entry)
652 rtx_insn *setter = list_entry->insn;
654 /* Ignore entries in the list that do not apply. */
655 if ((after_insn
656 && INSN_CUID (setter) < uid_limit)
657 || (! after_insn
658 && INSN_CUID (setter) > uid_limit))
660 list_entry = list_entry->next;
661 continue;
664 /* If SETTER is a call everything is clobbered. Note that calls
665 to pure functions are never put on the list, so we need not
666 worry about them. */
667 if (CALL_P (setter))
668 return 1;
670 /* SETTER must be an insn of some kind that sets memory. Call
671 note_stores to examine each hunk of memory that is modified.
672 It will set mems_conflict_p to nonzero if there may be a
673 conflict between X and SETTER. */
674 mems_conflict_p = 0;
675 note_stores (PATTERN (setter), find_mem_conflicts, x);
676 if (mems_conflict_p)
677 return 1;
679 list_entry = list_entry->next;
681 return 0;
685 /* Record register first/last/block set information for REGNO in INSN. */
687 static inline void
688 record_last_reg_set_info (rtx_insn *insn, rtx reg)
690 unsigned int regno, end_regno;
692 regno = REGNO (reg);
693 end_regno = END_REGNO (reg);
695 reg_avail_info[regno] = INSN_CUID (insn);
696 while (++regno < end_regno);
699 static inline void
700 record_last_reg_set_info_regno (rtx_insn *insn, int regno)
702 reg_avail_info[regno] = INSN_CUID (insn);
706 /* Record memory modification information for INSN. We do not actually care
707 about the memory location(s) that are set, or even how they are set (consider
708 a CALL_INSN). We merely need to record which insns modify memory. */
710 static void
711 record_last_mem_set_info (rtx_insn *insn)
713 struct modifies_mem *list_entry;
715 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
716 sizeof (struct modifies_mem));
717 list_entry->insn = insn;
718 list_entry->next = modifies_mem_list;
719 modifies_mem_list = list_entry;
721 record_last_mem_set_info_common (insn, modify_mem_list,
722 canon_modify_mem_list,
723 modify_mem_list_set,
724 blocks_with_calls);
727 /* Called from compute_hash_table via note_stores to handle one
728 SET or CLOBBER in an insn. DATA is really the instruction in which
729 the SET is taking place. */
731 static void
732 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
734 rtx_insn *last_set_insn = (rtx_insn *) data;
736 if (GET_CODE (dest) == SUBREG)
737 dest = SUBREG_REG (dest);
739 if (REG_P (dest))
740 record_last_reg_set_info (last_set_insn, dest);
741 else if (MEM_P (dest))
743 /* Ignore pushes, they don't clobber memory. They may still
744 clobber the stack pointer though. Some targets do argument
745 pushes without adding REG_INC notes. See e.g. PR25196,
746 where a pushsi2 on i386 doesn't have REG_INC notes. Note
747 such changes here too. */
748 if (! push_operand (dest, GET_MODE (dest)))
749 record_last_mem_set_info (last_set_insn);
750 else
751 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
756 /* Reset tables used to keep track of what's still available since the
757 start of the block. */
759 static void
760 reset_opr_set_tables (void)
762 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
763 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
764 modifies_mem_list = NULL;
768 /* Record things set by INSN.
769 This data is used by oprs_unchanged_p. */
771 static void
772 record_opr_changes (rtx_insn *insn)
774 rtx note;
776 /* Find all stores and record them. */
777 note_stores (PATTERN (insn), record_last_set_info, insn);
779 /* Also record autoincremented REGs for this insn as changed. */
780 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
781 if (REG_NOTE_KIND (note) == REG_INC)
782 record_last_reg_set_info (insn, XEXP (note, 0));
784 /* Finally, if this is a call, record all call clobbers. */
785 if (CALL_P (insn))
787 unsigned int regno;
788 rtx link, x;
789 hard_reg_set_iterator hrsi;
790 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
791 record_last_reg_set_info_regno (insn, regno);
793 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
795 gcc_assert (GET_CODE (XEXP (link, 0)) != CLOBBER_HIGH);
796 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
798 x = XEXP (XEXP (link, 0), 0);
799 if (REG_P (x))
801 gcc_assert (HARD_REGISTER_P (x));
802 record_last_reg_set_info (insn, x);
807 if (! RTL_CONST_OR_PURE_CALL_P (insn))
808 record_last_mem_set_info (insn);
813 /* Scan the pattern of INSN and add an entry to the hash TABLE.
814 After reload we are interested in loads/stores only. */
816 static void
817 hash_scan_set (rtx_insn *insn)
819 rtx pat = PATTERN (insn);
820 rtx src = SET_SRC (pat);
821 rtx dest = SET_DEST (pat);
823 /* We are only interested in loads and stores. */
824 if (! MEM_P (src) && ! MEM_P (dest))
825 return;
827 /* Don't mess with jumps and nops. */
828 if (JUMP_P (insn) || set_noop_p (pat))
829 return;
831 if (REG_P (dest))
833 if (/* Don't CSE something if we can't do a reg/reg copy. */
834 can_copy_p (GET_MODE (dest))
835 /* Is SET_SRC something we want to gcse? */
836 && general_operand (src, GET_MODE (src))
837 #ifdef STACK_REGS
838 /* Never consider insns touching the register stack. It may
839 create situations that reg-stack cannot handle (e.g. a stack
840 register live across an abnormal edge). */
841 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
842 #endif
843 /* An expression is not available if its operands are
844 subsequently modified, including this insn. */
845 && oprs_unchanged_p (src, insn, true))
847 insert_expr_in_table (src, insn);
850 else if (REG_P (src))
852 /* Only record sets of pseudo-regs in the hash table. */
853 if (/* Don't CSE something if we can't do a reg/reg copy. */
854 can_copy_p (GET_MODE (src))
855 /* Is SET_DEST something we want to gcse? */
856 && general_operand (dest, GET_MODE (dest))
857 #ifdef STACK_REGS
858 /* As above for STACK_REGS. */
859 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
860 #endif
861 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
862 /* Check if the memory expression is killed after insn. */
863 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
864 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
866 insert_expr_in_table (dest, insn);
872 /* Create hash table of memory expressions available at end of basic
873 blocks. Basically you should think of this hash table as the
874 representation of AVAIL_OUT. This is the set of expressions that
875 is generated in a basic block and not killed before the end of the
876 same basic block. Notice that this is really a local computation. */
878 static void
879 compute_hash_table (void)
881 basic_block bb;
883 FOR_EACH_BB_FN (bb, cfun)
885 rtx_insn *insn;
887 /* First pass over the instructions records information used to
888 determine when registers and memory are last set.
889 Since we compute a "local" AVAIL_OUT, reset the tables that
890 help us keep track of what has been modified since the start
891 of the block. */
892 reset_opr_set_tables ();
893 FOR_BB_INSNS (bb, insn)
895 if (INSN_P (insn))
896 record_opr_changes (insn);
899 /* The next pass actually builds the hash table. */
900 FOR_BB_INSNS (bb, insn)
901 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
902 hash_scan_set (insn);
907 /* Check if register REG is killed in any insn waiting to be inserted on
908 edge E. This function is required to check that our data flow analysis
909 is still valid prior to commit_edge_insertions. */
911 static bool
912 reg_killed_on_edge (rtx reg, edge e)
914 rtx_insn *insn;
916 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
917 if (INSN_P (insn) && reg_set_p (reg, insn))
918 return true;
920 return false;
923 /* Similar to above - check if register REG is used in any insn waiting
924 to be inserted on edge E.
925 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
926 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
928 static bool
929 reg_used_on_edge (rtx reg, edge e)
931 rtx_insn *insn;
933 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
934 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
935 return true;
937 return false;
940 /* Return the loaded/stored register of a load/store instruction. */
942 static rtx
943 get_avail_load_store_reg (rtx_insn *insn)
945 if (REG_P (SET_DEST (PATTERN (insn))))
946 /* A load. */
947 return SET_DEST (PATTERN (insn));
948 else
950 /* A store. */
951 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
952 return SET_SRC (PATTERN (insn));
956 /* Return nonzero if the predecessors of BB are "well behaved". */
958 static bool
959 bb_has_well_behaved_predecessors (basic_block bb)
961 edge pred;
962 edge_iterator ei;
964 if (EDGE_COUNT (bb->preds) == 0)
965 return false;
967 FOR_EACH_EDGE (pred, ei, bb->preds)
969 /* commit_one_edge_insertion refuses to insert on abnormal edges even if
970 the source has only one successor so EDGE_CRITICAL_P is too weak. */
971 if ((pred->flags & EDGE_ABNORMAL) && !single_pred_p (pred->dest))
972 return false;
974 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
975 return false;
977 if (tablejump_p (BB_END (pred->src), NULL, NULL))
978 return false;
980 return true;
984 /* Search for the occurrences of expression in BB. */
986 static struct occr*
987 get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
989 struct occr *occr = orig_occr;
991 for (; occr != NULL; occr = occr->next)
992 if (BLOCK_FOR_INSN (occr->insn) == bb)
993 return occr;
995 /* If we could not find an occurrence in BB, see if BB
996 has a single predecessor with an occurrence that is
997 transparent through BB. */
998 if (single_pred_p (bb)
999 && bitmap_bit_p (transp[bb->index], bitmap_index)
1000 && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
1002 rtx avail_reg = get_avail_load_store_reg (occr->insn);
1003 if (!reg_set_between_p (avail_reg,
1004 PREV_INSN (BB_HEAD (bb)),
1005 NEXT_INSN (BB_END (bb)))
1006 && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1007 return occr;
1010 return NULL;
1014 /* This helper is called via htab_traverse. */
1016 compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1018 struct expr *expr = *slot;
1020 compute_transp (expr->expr, expr->bitmap_index, transp,
1021 blocks_with_calls, modify_mem_list_set,
1022 canon_modify_mem_list);
1023 return 1;
1026 /* This handles the case where several stores feed a partially redundant
1027 load. It checks if the redundancy elimination is possible and if it's
1028 worth it.
1030 Redundancy elimination is possible if,
1031 1) None of the operands of an insn have been modified since the start
1032 of the current basic block.
1033 2) In any predecessor of the current basic block, the same expression
1034 is generated.
1036 See the function body for the heuristics that determine if eliminating
1037 a redundancy is also worth doing, assuming it is possible. */
1039 static void
1040 eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1041 struct expr *expr)
1043 edge pred;
1044 rtx_insn *avail_insn = NULL;
1045 rtx avail_reg;
1046 rtx dest, pat;
1047 struct occr *a_occr;
1048 struct unoccr *occr, *avail_occrs = NULL;
1049 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1050 int npred_ok = 0;
1051 profile_count ok_count = profile_count::zero ();
1052 /* Redundant load execution count. */
1053 profile_count critical_count = profile_count::zero ();
1054 /* Execution count of critical edges. */
1055 edge_iterator ei;
1056 bool critical_edge_split = false;
1058 /* The execution count of the loads to be added to make the
1059 load fully redundant. */
1060 profile_count not_ok_count = profile_count::zero ();
1061 basic_block pred_bb;
1063 pat = PATTERN (insn);
1064 dest = SET_DEST (pat);
1066 /* Check that the loaded register is not used, set, or killed from the
1067 beginning of the block. */
1068 if (reg_changed_after_insn_p (dest, 0)
1069 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1070 return;
1072 /* Check potential for replacing load with copy for predecessors. */
1073 FOR_EACH_EDGE (pred, ei, bb->preds)
1075 rtx_insn *next_pred_bb_end;
1077 avail_insn = NULL;
1078 avail_reg = NULL_RTX;
1079 pred_bb = pred->src;
1080 for (a_occr = get_bb_avail_insn (pred_bb,
1081 expr->avail_occr,
1082 expr->bitmap_index);
1083 a_occr;
1084 a_occr = get_bb_avail_insn (pred_bb,
1085 a_occr->next,
1086 expr->bitmap_index))
1088 /* Check if the loaded register is not used. */
1089 avail_insn = a_occr->insn;
1090 avail_reg = get_avail_load_store_reg (avail_insn);
1091 gcc_assert (avail_reg);
1093 /* Make sure we can generate a move from register avail_reg to
1094 dest. */
1095 rtx_insn *move = gen_move_insn (copy_rtx (dest),
1096 copy_rtx (avail_reg));
1097 extract_insn (move);
1098 if (! constrain_operands (1, get_preferred_alternatives (insn,
1099 pred_bb))
1100 || reg_killed_on_edge (avail_reg, pred)
1101 || reg_used_on_edge (dest, pred))
1103 avail_insn = NULL;
1104 continue;
1106 next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1107 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1108 /* AVAIL_INSN remains non-null. */
1109 break;
1110 else
1111 avail_insn = NULL;
1114 if (EDGE_CRITICAL_P (pred) && pred->count ().initialized_p ())
1115 critical_count += pred->count ();
1117 if (avail_insn != NULL_RTX)
1119 npred_ok++;
1120 if (pred->count ().initialized_p ())
1121 ok_count = ok_count + pred->count ();
1122 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1123 copy_rtx (avail_reg)))))
1125 /* Check if there is going to be a split. */
1126 if (EDGE_CRITICAL_P (pred))
1127 critical_edge_split = true;
1129 else /* Its a dead move no need to generate. */
1130 continue;
1131 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1132 sizeof (struct unoccr));
1133 occr->insn = avail_insn;
1134 occr->pred = pred;
1135 occr->next = avail_occrs;
1136 avail_occrs = occr;
1137 if (! rollback_unoccr)
1138 rollback_unoccr = occr;
1140 else
1142 /* Adding a load on a critical edge will cause a split. */
1143 if (EDGE_CRITICAL_P (pred))
1144 critical_edge_split = true;
1145 if (pred->count ().initialized_p ())
1146 not_ok_count = not_ok_count + pred->count ();
1147 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1148 sizeof (struct unoccr));
1149 unoccr->insn = NULL;
1150 unoccr->pred = pred;
1151 unoccr->next = unavail_occrs;
1152 unavail_occrs = unoccr;
1153 if (! rollback_unoccr)
1154 rollback_unoccr = unoccr;
1158 if (/* No load can be replaced by copy. */
1159 npred_ok == 0
1160 /* Prevent exploding the code. */
1161 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1162 /* If we don't have profile information we cannot tell if splitting
1163 a critical edge is profitable or not so don't do it. */
1164 || ((! profile_info || profile_status_for_fn (cfun) != PROFILE_READ
1165 || targetm.cannot_modify_jumps_p ())
1166 && critical_edge_split))
1167 goto cleanup;
1169 /* Check if it's worth applying the partial redundancy elimination. */
1170 if (ok_count.to_gcov_type ()
1171 < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count.to_gcov_type ())
1172 goto cleanup;
1173 if (ok_count.to_gcov_type ()
1174 < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count.to_gcov_type ())
1175 goto cleanup;
1177 /* Generate moves to the loaded register from where
1178 the memory is available. */
1179 for (occr = avail_occrs; occr; occr = occr->next)
1181 avail_insn = occr->insn;
1182 pred = occr->pred;
1183 /* Set avail_reg to be the register having the value of the
1184 memory. */
1185 avail_reg = get_avail_load_store_reg (avail_insn);
1186 gcc_assert (avail_reg);
1188 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1189 copy_rtx (avail_reg)),
1190 pred);
1191 stats.moves_inserted++;
1193 if (dump_file)
1194 fprintf (dump_file,
1195 "generating move from %d to %d on edge from %d to %d\n",
1196 REGNO (avail_reg),
1197 REGNO (dest),
1198 pred->src->index,
1199 pred->dest->index);
1202 /* Regenerate loads where the memory is unavailable. */
1203 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1205 pred = unoccr->pred;
1206 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1207 stats.copies_inserted++;
1209 if (dump_file)
1211 fprintf (dump_file,
1212 "generating on edge from %d to %d a copy of load: ",
1213 pred->src->index,
1214 pred->dest->index);
1215 print_rtl (dump_file, PATTERN (insn));
1216 fprintf (dump_file, "\n");
1220 /* Delete the insn if it is not available in this block and mark it
1221 for deletion if it is available. If insn is available it may help
1222 discover additional redundancies, so mark it for later deletion. */
1223 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1224 a_occr && (a_occr->insn != insn);
1225 a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1228 if (!a_occr)
1230 stats.insns_deleted++;
1232 if (dump_file)
1234 fprintf (dump_file, "deleting insn:\n");
1235 print_rtl_single (dump_file, insn);
1236 fprintf (dump_file, "\n");
1238 delete_insn (insn);
1240 else
1241 a_occr->deleted_p = 1;
1243 cleanup:
1244 if (rollback_unoccr)
1245 obstack_free (&unoccr_obstack, rollback_unoccr);
1248 /* Performing the redundancy elimination as described before. */
1250 static void
1251 eliminate_partially_redundant_loads (void)
1253 rtx_insn *insn;
1254 basic_block bb;
1256 /* Note we start at block 1. */
1258 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1259 return;
1261 FOR_BB_BETWEEN (bb,
1262 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1263 EXIT_BLOCK_PTR_FOR_FN (cfun),
1264 next_bb)
1266 /* Don't try anything on basic blocks with strange predecessors. */
1267 if (! bb_has_well_behaved_predecessors (bb))
1268 continue;
1270 /* Do not try anything on cold basic blocks. */
1271 if (optimize_bb_for_size_p (bb))
1272 continue;
1274 /* Reset the table of things changed since the start of the current
1275 basic block. */
1276 reset_opr_set_tables ();
1278 /* Look at all insns in the current basic block and see if there are
1279 any loads in it that we can record. */
1280 FOR_BB_INSNS (bb, insn)
1282 /* Is it a load - of the form (set (reg) (mem))? */
1283 if (NONJUMP_INSN_P (insn)
1284 && GET_CODE (PATTERN (insn)) == SET
1285 && REG_P (SET_DEST (PATTERN (insn)))
1286 && MEM_P (SET_SRC (PATTERN (insn))))
1288 rtx pat = PATTERN (insn);
1289 rtx src = SET_SRC (pat);
1290 struct expr *expr;
1292 if (!MEM_VOLATILE_P (src)
1293 && GET_MODE (src) != BLKmode
1294 && general_operand (src, GET_MODE (src))
1295 /* Are the operands unchanged since the start of the
1296 block? */
1297 && oprs_unchanged_p (src, insn, false)
1298 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1299 && !side_effects_p (src)
1300 /* Is the expression recorded? */
1301 && (expr = lookup_expr_in_table (src)) != NULL)
1303 /* We now have a load (insn) and an available memory at
1304 its BB start (expr). Try to remove the loads if it is
1305 redundant. */
1306 eliminate_partially_redundant_load (bb, insn, expr);
1310 /* Keep track of everything modified by this insn, so that we
1311 know what has been modified since the start of the current
1312 basic block. */
1313 if (INSN_P (insn))
1314 record_opr_changes (insn);
1318 commit_edge_insertions ();
1321 /* Go over the expression hash table and delete insns that were
1322 marked for later deletion. */
1324 /* This helper is called via htab_traverse. */
1326 delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1328 struct expr *exprs = *slot;
1329 struct occr *occr;
1331 for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1333 if (occr->deleted_p && dbg_cnt (gcse2_delete))
1335 delete_insn (occr->insn);
1336 stats.insns_deleted++;
1338 if (dump_file)
1340 fprintf (dump_file, "deleting insn:\n");
1341 print_rtl_single (dump_file, occr->insn);
1342 fprintf (dump_file, "\n");
1347 return 1;
1350 static void
1351 delete_redundant_insns (void)
1353 expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1354 if (dump_file)
1355 fprintf (dump_file, "\n");
1358 /* Main entry point of the GCSE after reload - clean some redundant loads
1359 due to spilling. */
1361 static void
1362 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1365 memset (&stats, 0, sizeof (stats));
1367 /* Allocate memory for this pass.
1368 Also computes and initializes the insns' CUIDs. */
1369 alloc_mem ();
1371 /* We need alias analysis. */
1372 init_alias_analysis ();
1374 compute_hash_table ();
1376 if (dump_file)
1377 dump_hash_table (dump_file);
1379 if (expr_table->elements () > 0)
1381 /* Knowing which MEMs are transparent through a block can signifiantly
1382 increase the number of redundant loads found. So compute transparency
1383 information for each memory expression in the hash table. */
1384 df_analyze ();
1385 /* This can not be part of the normal allocation routine because
1386 we have to know the number of elements in the hash table. */
1387 transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1388 expr_table->elements ());
1389 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1390 expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1391 eliminate_partially_redundant_loads ();
1392 delete_redundant_insns ();
1393 sbitmap_vector_free (transp);
1395 if (dump_file)
1397 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1398 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1399 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
1400 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
1401 fprintf (dump_file, "\n\n");
1404 statistics_counter_event (cfun, "copies inserted",
1405 stats.copies_inserted);
1406 statistics_counter_event (cfun, "moves inserted",
1407 stats.moves_inserted);
1408 statistics_counter_event (cfun, "insns deleted",
1409 stats.insns_deleted);
1412 /* We are finished with alias. */
1413 end_alias_analysis ();
1415 free_mem ();
1420 static unsigned int
1421 rest_of_handle_gcse2 (void)
1423 gcse_after_reload_main (get_insns ());
1424 rebuild_jump_labels (get_insns ());
1425 return 0;
1428 namespace {
1430 const pass_data pass_data_gcse2 =
1432 RTL_PASS, /* type */
1433 "gcse2", /* name */
1434 OPTGROUP_NONE, /* optinfo_flags */
1435 TV_GCSE_AFTER_RELOAD, /* tv_id */
1436 0, /* properties_required */
1437 0, /* properties_provided */
1438 0, /* properties_destroyed */
1439 0, /* todo_flags_start */
1440 0, /* todo_flags_finish */
1443 class pass_gcse2 : public rtl_opt_pass
1445 public:
1446 pass_gcse2 (gcc::context *ctxt)
1447 : rtl_opt_pass (pass_data_gcse2, ctxt)
1450 /* opt_pass methods: */
1451 virtual bool gate (function *fun)
1453 return (optimize > 0 && flag_gcse_after_reload
1454 && optimize_function_for_speed_p (fun));
1457 virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1459 }; // class pass_gcse2
1461 } // anon namespace
1463 rtl_opt_pass *
1464 make_pass_gcse2 (gcc::context *ctxt)
1466 return new pass_gcse2 (ctxt);