* ipa-prop.c (ipa_modify_expr): Set REF_REVERSE_STORAGE_ORDER on the
[official-gcc.git] / gcc / postreload-gcse.c
blob746d306f22fe9aa8cc229d37480bbbeb90780ca0
1 /* Post reload partially redundant load elimination
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "tm.h"
24 #include "diagnostic-core.h"
26 #include "rtl.h"
27 #include "input.h"
28 #include "alias.h"
29 #include "symtab.h"
30 #include "tree.h"
31 #include "tm_p.h"
32 #include "regs.h"
33 #include "hard-reg-set.h"
34 #include "flags.h"
35 #include "insn-config.h"
36 #include "recog.h"
37 #include "predict.h"
38 #include "function.h"
39 #include "dominance.h"
40 #include "cfg.h"
41 #include "cfgrtl.h"
42 #include "basic-block.h"
43 #include "profile.h"
44 #include "expmed.h"
45 #include "dojump.h"
46 #include "explow.h"
47 #include "calls.h"
48 #include "emit-rtl.h"
49 #include "varasm.h"
50 #include "stmt.h"
51 #include "expr.h"
52 #include "except.h"
53 #include "intl.h"
54 #include "obstack.h"
55 #include "params.h"
56 #include "target.h"
57 #include "tree-pass.h"
58 #include "dbgcnt.h"
59 #include "df.h"
60 #include "gcse-common.h"
62 /* The following code implements gcse after reload, the purpose of this
63 pass is to cleanup redundant loads generated by reload and other
64 optimizations that come after gcse. It searches for simple inter-block
65 redundancies and tries to eliminate them by adding moves and loads
66 in cold places.
68 Perform partially redundant load elimination, try to eliminate redundant
69 loads created by the reload pass. We try to look for full or partial
70 redundant loads fed by one or more loads/stores in predecessor BBs,
71 and try adding loads to make them fully redundant. We also check if
72 it's worth adding loads to be able to delete the redundant load.
74 Algorithm:
75 1. Build available expressions hash table:
76 For each load/store instruction, if the loaded/stored memory didn't
77 change until the end of the basic block add this memory expression to
78 the hash table.
79 2. Perform Redundancy elimination:
80 For each load instruction do the following:
81 perform partial redundancy elimination, check if it's worth adding
82 loads to make the load fully redundant. If so add loads and
83 register copies and delete the load.
84 3. Delete instructions made redundant in step 2.
86 Future enhancement:
87 If the loaded register is used/defined between load and some store,
88 look for some other free register between load and all its stores,
89 and replace the load with a copy from this register to the loaded
90 register.
94 /* Keep statistics of this pass. */
95 static struct
97 int moves_inserted;
98 int copies_inserted;
99 int insns_deleted;
100 } stats;
102 /* We need to keep a hash table of expressions. The table entries are of
103 type 'struct expr', and for each expression there is a single linked
104 list of occurrences. */
106 /* Expression elements in the hash table. */
107 struct expr
109 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
110 rtx expr;
112 /* The same hash for this entry. */
113 hashval_t hash;
115 /* Index in the transparent bitmaps. */
116 unsigned int bitmap_index;
118 /* List of available occurrence in basic blocks in the function. */
119 struct occr *avail_occr;
122 /* Hashtable helpers. */
124 struct expr_hasher : typed_noop_remove <expr>
126 typedef expr *value_type;
127 typedef expr *compare_type;
128 static inline hashval_t hash (const expr *);
129 static inline bool equal (const expr *, const expr *);
133 /* Hash expression X.
134 DO_NOT_RECORD_P is a boolean indicating if a volatile operand is found
135 or if the expression contains something we don't want to insert in the
136 table. */
138 static hashval_t
139 hash_expr (rtx x, int *do_not_record_p)
141 *do_not_record_p = 0;
142 return hash_rtx (x, GET_MODE (x), do_not_record_p,
143 NULL, /*have_reg_qty=*/false);
146 /* Callback for hashtab.
147 Return the hash value for expression EXP. We don't actually hash
148 here, we just return the cached hash value. */
150 inline hashval_t
151 expr_hasher::hash (const expr *exp)
153 return exp->hash;
156 /* Callback for hashtab.
157 Return nonzero if exp1 is equivalent to exp2. */
159 inline bool
160 expr_hasher::equal (const expr *exp1, const expr *exp2)
162 int equiv_p = exp_equiv_p (exp1->expr, exp2->expr, 0, true);
164 gcc_assert (!equiv_p || exp1->hash == exp2->hash);
165 return equiv_p;
168 /* The table itself. */
169 static hash_table<expr_hasher> *expr_table;
172 static struct obstack expr_obstack;
174 /* Occurrence of an expression.
175 There is at most one occurrence per basic block. If a pattern appears
176 more than once, the last appearance is used. */
178 struct occr
180 /* Next occurrence of this expression. */
181 struct occr *next;
182 /* The insn that computes the expression. */
183 rtx_insn *insn;
184 /* Nonzero if this [anticipatable] occurrence has been deleted. */
185 char deleted_p;
188 static struct obstack occr_obstack;
190 /* The following structure holds the information about the occurrences of
191 the redundant instructions. */
192 struct unoccr
194 struct unoccr *next;
195 edge pred;
196 rtx_insn *insn;
199 static struct obstack unoccr_obstack;
201 /* Array where each element is the CUID if the insn that last set the hard
202 register with the number of the element, since the start of the current
203 basic block.
205 This array is used during the building of the hash table (step 1) to
206 determine if a reg is killed before the end of a basic block.
208 It is also used when eliminating partial redundancies (step 2) to see
209 if a reg was modified since the start of a basic block. */
210 static int *reg_avail_info;
212 /* A list of insns that may modify memory within the current basic block. */
213 struct modifies_mem
215 rtx_insn *insn;
216 struct modifies_mem *next;
218 static struct modifies_mem *modifies_mem_list;
220 /* The modifies_mem structs also go on an obstack, only this obstack is
221 freed each time after completing the analysis or transformations on
222 a basic block. So we allocate a dummy modifies_mem_obstack_bottom
223 object on the obstack to keep track of the bottom of the obstack. */
224 static struct obstack modifies_mem_obstack;
225 static struct modifies_mem *modifies_mem_obstack_bottom;
227 /* Mapping of insn UIDs to CUIDs.
228 CUIDs are like UIDs except they increase monotonically in each basic
229 block, have no gaps, and only apply to real insns. */
230 static int *uid_cuid;
231 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
233 /* Bitmap of blocks which have memory stores. */
234 static bitmap modify_mem_list_set;
236 /* Bitmap of blocks which have calls. */
237 static bitmap blocks_with_calls;
239 /* Vector indexed by block # with a list of all the insns that
240 modify memory within the block. */
241 static vec<rtx_insn *> *modify_mem_list;
243 /* Vector indexed by block # with a canonicalized list of insns
244 that modify memory in the block. */
245 static vec<modify_pair> *canon_modify_mem_list;
247 /* Vector of simple bitmaps indexed by block number. Each component sbitmap
248 indicates which expressions are transparent through the block. */
249 static sbitmap *transp;
252 /* Helpers for memory allocation/freeing. */
253 static void alloc_mem (void);
254 static void free_mem (void);
256 /* Support for hash table construction and transformations. */
257 static bool oprs_unchanged_p (rtx, rtx_insn *, bool);
258 static void record_last_reg_set_info (rtx_insn *, rtx);
259 static void record_last_reg_set_info_regno (rtx_insn *, int);
260 static void record_last_mem_set_info (rtx_insn *);
261 static void record_last_set_info (rtx, const_rtx, void *);
262 static void record_opr_changes (rtx_insn *);
264 static void find_mem_conflicts (rtx, const_rtx, void *);
265 static int load_killed_in_block_p (int, rtx, bool);
266 static void reset_opr_set_tables (void);
268 /* Hash table support. */
269 static hashval_t hash_expr (rtx, int *);
270 static void insert_expr_in_table (rtx, rtx_insn *);
271 static struct expr *lookup_expr_in_table (rtx);
272 static void dump_hash_table (FILE *);
274 /* Helpers for eliminate_partially_redundant_load. */
275 static bool reg_killed_on_edge (rtx, edge);
276 static bool reg_used_on_edge (rtx, edge);
278 static rtx get_avail_load_store_reg (rtx_insn *);
280 static bool bb_has_well_behaved_predecessors (basic_block);
281 static struct occr* get_bb_avail_insn (basic_block, struct occr *, int);
282 static void hash_scan_set (rtx_insn *);
283 static void compute_hash_table (void);
285 /* The work horses of this pass. */
286 static void eliminate_partially_redundant_load (basic_block,
287 rtx_insn *,
288 struct expr *);
289 static void eliminate_partially_redundant_loads (void);
292 /* Allocate memory for the CUID mapping array and register/memory
293 tracking tables. */
295 static void
296 alloc_mem (void)
298 int i;
299 basic_block bb;
300 rtx_insn *insn;
302 /* Find the largest UID and create a mapping from UIDs to CUIDs. */
303 uid_cuid = XCNEWVEC (int, get_max_uid () + 1);
304 i = 1;
305 FOR_EACH_BB_FN (bb, cfun)
306 FOR_BB_INSNS (bb, insn)
308 if (INSN_P (insn))
309 uid_cuid[INSN_UID (insn)] = i++;
310 else
311 uid_cuid[INSN_UID (insn)] = i;
314 /* Allocate the available expressions hash table. We don't want to
315 make the hash table too small, but unnecessarily making it too large
316 also doesn't help. The i/4 is a gcse.c relic, and seems like a
317 reasonable choice. */
318 expr_table = new hash_table<expr_hasher> (MAX (i / 4, 13));
320 /* We allocate everything on obstacks because we often can roll back
321 the whole obstack to some point. Freeing obstacks is very fast. */
322 gcc_obstack_init (&expr_obstack);
323 gcc_obstack_init (&occr_obstack);
324 gcc_obstack_init (&unoccr_obstack);
325 gcc_obstack_init (&modifies_mem_obstack);
327 /* Working array used to track the last set for each register
328 in the current block. */
329 reg_avail_info = (int *) xmalloc (FIRST_PSEUDO_REGISTER * sizeof (int));
331 /* Put a dummy modifies_mem object on the modifies_mem_obstack, so we
332 can roll it back in reset_opr_set_tables. */
333 modifies_mem_obstack_bottom =
334 (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
335 sizeof (struct modifies_mem));
337 blocks_with_calls = BITMAP_ALLOC (NULL);
338 modify_mem_list_set = BITMAP_ALLOC (NULL);
340 modify_mem_list = (vec_rtx_heap *) xcalloc (last_basic_block_for_fn (cfun),
341 sizeof (vec_rtx_heap));
342 canon_modify_mem_list
343 = (vec_modify_pair_heap *) xcalloc (last_basic_block_for_fn (cfun),
344 sizeof (vec_modify_pair_heap));
347 /* Free memory allocated by alloc_mem. */
349 static void
350 free_mem (void)
352 free (uid_cuid);
354 delete expr_table;
355 expr_table = NULL;
357 obstack_free (&expr_obstack, NULL);
358 obstack_free (&occr_obstack, NULL);
359 obstack_free (&unoccr_obstack, NULL);
360 obstack_free (&modifies_mem_obstack, NULL);
362 unsigned i;
363 bitmap_iterator bi;
364 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set, 0, i, bi)
366 modify_mem_list[i].release ();
367 canon_modify_mem_list[i].release ();
370 BITMAP_FREE (blocks_with_calls);
371 BITMAP_FREE (modify_mem_list_set);
372 free (reg_avail_info);
376 /* Insert expression X in INSN in the hash TABLE.
377 If it is already present, record it as the last occurrence in INSN's
378 basic block. */
380 static void
381 insert_expr_in_table (rtx x, rtx_insn *insn)
383 int do_not_record_p;
384 hashval_t hash;
385 struct expr *cur_expr, **slot;
386 struct occr *avail_occr, *last_occr = NULL;
388 hash = hash_expr (x, &do_not_record_p);
390 /* Do not insert expression in the table if it contains volatile operands,
391 or if hash_expr determines the expression is something we don't want
392 to or can't handle. */
393 if (do_not_record_p)
394 return;
396 /* We anticipate that redundant expressions are rare, so for convenience
397 allocate a new hash table element here already and set its fields.
398 If we don't do this, we need a hack with a static struct expr. Anyway,
399 obstack_free is really fast and one more obstack_alloc doesn't hurt if
400 we're going to see more expressions later on. */
401 cur_expr = (struct expr *) obstack_alloc (&expr_obstack,
402 sizeof (struct expr));
403 cur_expr->expr = x;
404 cur_expr->hash = hash;
405 cur_expr->avail_occr = NULL;
407 slot = expr_table->find_slot_with_hash (cur_expr, hash, INSERT);
409 if (! (*slot))
411 /* The expression isn't found, so insert it. */
412 *slot = cur_expr;
414 /* Anytime we add an entry to the table, record the index
415 of the new entry. The bitmap index starts counting
416 at zero. */
417 cur_expr->bitmap_index = expr_table->elements () - 1;
419 else
421 /* The expression is already in the table, so roll back the
422 obstack and use the existing table entry. */
423 obstack_free (&expr_obstack, cur_expr);
424 cur_expr = *slot;
427 /* Search for another occurrence in the same basic block. */
428 avail_occr = cur_expr->avail_occr;
429 while (avail_occr
430 && BLOCK_FOR_INSN (avail_occr->insn) != BLOCK_FOR_INSN (insn))
432 /* If an occurrence isn't found, save a pointer to the end of
433 the list. */
434 last_occr = avail_occr;
435 avail_occr = avail_occr->next;
438 if (avail_occr)
439 /* Found another instance of the expression in the same basic block.
440 Prefer this occurrence to the currently recorded one. We want
441 the last one in the block and the block is scanned from start
442 to end. */
443 avail_occr->insn = insn;
444 else
446 /* First occurrence of this expression in this basic block. */
447 avail_occr = (struct occr *) obstack_alloc (&occr_obstack,
448 sizeof (struct occr));
450 /* First occurrence of this expression in any block? */
451 if (cur_expr->avail_occr == NULL)
452 cur_expr->avail_occr = avail_occr;
453 else
454 last_occr->next = avail_occr;
456 avail_occr->insn = insn;
457 avail_occr->next = NULL;
458 avail_occr->deleted_p = 0;
463 /* Lookup pattern PAT in the expression hash table.
464 The result is a pointer to the table entry, or NULL if not found. */
466 static struct expr *
467 lookup_expr_in_table (rtx pat)
469 int do_not_record_p;
470 struct expr **slot, *tmp_expr;
471 hashval_t hash = hash_expr (pat, &do_not_record_p);
473 if (do_not_record_p)
474 return NULL;
476 tmp_expr = (struct expr *) obstack_alloc (&expr_obstack,
477 sizeof (struct expr));
478 tmp_expr->expr = pat;
479 tmp_expr->hash = hash;
480 tmp_expr->avail_occr = NULL;
482 slot = expr_table->find_slot_with_hash (tmp_expr, hash, INSERT);
483 obstack_free (&expr_obstack, tmp_expr);
485 if (!slot)
486 return NULL;
487 else
488 return (*slot);
492 /* Dump all expressions and occurrences that are currently in the
493 expression hash table to FILE. */
495 /* This helper is called via htab_traverse. */
497 dump_expr_hash_table_entry (expr **slot, FILE *file)
499 struct expr *exprs = *slot;
500 struct occr *occr;
502 fprintf (file, "expr: ");
503 print_rtl (file, exprs->expr);
504 fprintf (file,"\nhashcode: %u\n", exprs->hash);
505 fprintf (file,"list of occurrences:\n");
506 occr = exprs->avail_occr;
507 while (occr)
509 rtx_insn *insn = occr->insn;
510 print_rtl_single (file, insn);
511 fprintf (file, "\n");
512 occr = occr->next;
514 fprintf (file, "\n");
515 return 1;
518 static void
519 dump_hash_table (FILE *file)
521 fprintf (file, "\n\nexpression hash table\n");
522 fprintf (file, "size %ld, %ld elements, %f collision/search ratio\n",
523 (long) expr_table->size (),
524 (long) expr_table->elements (),
525 expr_table->collisions ());
526 if (expr_table->elements () > 0)
528 fprintf (file, "\n\ntable entries:\n");
529 expr_table->traverse <FILE *, dump_expr_hash_table_entry> (file);
531 fprintf (file, "\n");
534 /* Return true if register X is recorded as being set by an instruction
535 whose CUID is greater than the one given. */
537 static bool
538 reg_changed_after_insn_p (rtx x, int cuid)
540 unsigned int regno, end_regno;
542 regno = REGNO (x);
543 end_regno = END_REGNO (x);
545 if (reg_avail_info[regno] > cuid)
546 return true;
547 while (++regno < end_regno);
548 return false;
551 /* Return nonzero if the operands of expression X are unchanged
552 1) from the start of INSN's basic block up to but not including INSN
553 if AFTER_INSN is false, or
554 2) from INSN to the end of INSN's basic block if AFTER_INSN is true. */
556 static bool
557 oprs_unchanged_p (rtx x, rtx_insn *insn, bool after_insn)
559 int i, j;
560 enum rtx_code code;
561 const char *fmt;
563 if (x == 0)
564 return 1;
566 code = GET_CODE (x);
567 switch (code)
569 case REG:
570 /* We are called after register allocation. */
571 gcc_assert (REGNO (x) < FIRST_PSEUDO_REGISTER);
572 if (after_insn)
573 return !reg_changed_after_insn_p (x, INSN_CUID (insn) - 1);
574 else
575 return !reg_changed_after_insn_p (x, 0);
577 case MEM:
578 if (load_killed_in_block_p (INSN_CUID (insn), x, after_insn))
579 return 0;
580 else
581 return oprs_unchanged_p (XEXP (x, 0), insn, after_insn);
583 case PC:
584 case CC0: /*FIXME*/
585 case CONST:
586 CASE_CONST_ANY:
587 case SYMBOL_REF:
588 case LABEL_REF:
589 case ADDR_VEC:
590 case ADDR_DIFF_VEC:
591 return 1;
593 case PRE_DEC:
594 case PRE_INC:
595 case POST_DEC:
596 case POST_INC:
597 case PRE_MODIFY:
598 case POST_MODIFY:
599 if (after_insn)
600 return 0;
601 break;
603 default:
604 break;
607 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
609 if (fmt[i] == 'e')
611 if (! oprs_unchanged_p (XEXP (x, i), insn, after_insn))
612 return 0;
614 else if (fmt[i] == 'E')
615 for (j = 0; j < XVECLEN (x, i); j++)
616 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, after_insn))
617 return 0;
620 return 1;
624 /* Used for communication between find_mem_conflicts and
625 load_killed_in_block_p. Nonzero if find_mem_conflicts finds a
626 conflict between two memory references.
627 This is a bit of a hack to work around the limitations of note_stores. */
628 static int mems_conflict_p;
630 /* DEST is the output of an instruction. If it is a memory reference, and
631 possibly conflicts with the load found in DATA, then set mems_conflict_p
632 to a nonzero value. */
634 static void
635 find_mem_conflicts (rtx dest, const_rtx setter ATTRIBUTE_UNUSED,
636 void *data)
638 rtx mem_op = (rtx) data;
640 while (GET_CODE (dest) == SUBREG
641 || GET_CODE (dest) == ZERO_EXTRACT
642 || GET_CODE (dest) == STRICT_LOW_PART)
643 dest = XEXP (dest, 0);
645 /* If DEST is not a MEM, then it will not conflict with the load. Note
646 that function calls are assumed to clobber memory, but are handled
647 elsewhere. */
648 if (! MEM_P (dest))
649 return;
651 if (true_dependence (dest, GET_MODE (dest), mem_op))
652 mems_conflict_p = 1;
656 /* Return nonzero if the expression in X (a memory reference) is killed
657 in the current basic block before (if AFTER_INSN is false) or after
658 (if AFTER_INSN is true) the insn with the CUID in UID_LIMIT.
660 This function assumes that the modifies_mem table is flushed when
661 the hash table construction or redundancy elimination phases start
662 processing a new basic block. */
664 static int
665 load_killed_in_block_p (int uid_limit, rtx x, bool after_insn)
667 struct modifies_mem *list_entry = modifies_mem_list;
669 while (list_entry)
671 rtx_insn *setter = list_entry->insn;
673 /* Ignore entries in the list that do not apply. */
674 if ((after_insn
675 && INSN_CUID (setter) < uid_limit)
676 || (! after_insn
677 && INSN_CUID (setter) > uid_limit))
679 list_entry = list_entry->next;
680 continue;
683 /* If SETTER is a call everything is clobbered. Note that calls
684 to pure functions are never put on the list, so we need not
685 worry about them. */
686 if (CALL_P (setter))
687 return 1;
689 /* SETTER must be an insn of some kind that sets memory. Call
690 note_stores to examine each hunk of memory that is modified.
691 It will set mems_conflict_p to nonzero if there may be a
692 conflict between X and SETTER. */
693 mems_conflict_p = 0;
694 note_stores (PATTERN (setter), find_mem_conflicts, x);
695 if (mems_conflict_p)
696 return 1;
698 list_entry = list_entry->next;
700 return 0;
704 /* Record register first/last/block set information for REGNO in INSN. */
706 static inline void
707 record_last_reg_set_info (rtx_insn *insn, rtx reg)
709 unsigned int regno, end_regno;
711 regno = REGNO (reg);
712 end_regno = END_REGNO (reg);
714 reg_avail_info[regno] = INSN_CUID (insn);
715 while (++regno < end_regno);
718 static inline void
719 record_last_reg_set_info_regno (rtx_insn *insn, int regno)
721 reg_avail_info[regno] = INSN_CUID (insn);
725 /* Record memory modification information for INSN. We do not actually care
726 about the memory location(s) that are set, or even how they are set (consider
727 a CALL_INSN). We merely need to record which insns modify memory. */
729 static void
730 record_last_mem_set_info (rtx_insn *insn)
732 struct modifies_mem *list_entry;
734 list_entry = (struct modifies_mem *) obstack_alloc (&modifies_mem_obstack,
735 sizeof (struct modifies_mem));
736 list_entry->insn = insn;
737 list_entry->next = modifies_mem_list;
738 modifies_mem_list = list_entry;
740 record_last_mem_set_info_common (insn, modify_mem_list,
741 canon_modify_mem_list,
742 modify_mem_list_set,
743 blocks_with_calls);
746 /* Called from compute_hash_table via note_stores to handle one
747 SET or CLOBBER in an insn. DATA is really the instruction in which
748 the SET is taking place. */
750 static void
751 record_last_set_info (rtx dest, const_rtx setter ATTRIBUTE_UNUSED, void *data)
753 rtx_insn *last_set_insn = (rtx_insn *) data;
755 if (GET_CODE (dest) == SUBREG)
756 dest = SUBREG_REG (dest);
758 if (REG_P (dest))
759 record_last_reg_set_info (last_set_insn, dest);
760 else if (MEM_P (dest))
762 /* Ignore pushes, they don't clobber memory. They may still
763 clobber the stack pointer though. Some targets do argument
764 pushes without adding REG_INC notes. See e.g. PR25196,
765 where a pushsi2 on i386 doesn't have REG_INC notes. Note
766 such changes here too. */
767 if (! push_operand (dest, GET_MODE (dest)))
768 record_last_mem_set_info (last_set_insn);
769 else
770 record_last_reg_set_info_regno (last_set_insn, STACK_POINTER_REGNUM);
775 /* Reset tables used to keep track of what's still available since the
776 start of the block. */
778 static void
779 reset_opr_set_tables (void)
781 memset (reg_avail_info, 0, FIRST_PSEUDO_REGISTER * sizeof (int));
782 obstack_free (&modifies_mem_obstack, modifies_mem_obstack_bottom);
783 modifies_mem_list = NULL;
787 /* Record things set by INSN.
788 This data is used by oprs_unchanged_p. */
790 static void
791 record_opr_changes (rtx_insn *insn)
793 rtx note;
795 /* Find all stores and record them. */
796 note_stores (PATTERN (insn), record_last_set_info, insn);
798 /* Also record autoincremented REGs for this insn as changed. */
799 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
800 if (REG_NOTE_KIND (note) == REG_INC)
801 record_last_reg_set_info (insn, XEXP (note, 0));
803 /* Finally, if this is a call, record all call clobbers. */
804 if (CALL_P (insn))
806 unsigned int regno;
807 rtx link, x;
808 hard_reg_set_iterator hrsi;
809 EXECUTE_IF_SET_IN_HARD_REG_SET (regs_invalidated_by_call, 0, regno, hrsi)
810 record_last_reg_set_info_regno (insn, regno);
812 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
813 if (GET_CODE (XEXP (link, 0)) == CLOBBER)
815 x = XEXP (XEXP (link, 0), 0);
816 if (REG_P (x))
818 gcc_assert (HARD_REGISTER_P (x));
819 record_last_reg_set_info (insn, x);
823 if (! RTL_CONST_OR_PURE_CALL_P (insn))
824 record_last_mem_set_info (insn);
829 /* Scan the pattern of INSN and add an entry to the hash TABLE.
830 After reload we are interested in loads/stores only. */
832 static void
833 hash_scan_set (rtx_insn *insn)
835 rtx pat = PATTERN (insn);
836 rtx src = SET_SRC (pat);
837 rtx dest = SET_DEST (pat);
839 /* We are only interested in loads and stores. */
840 if (! MEM_P (src) && ! MEM_P (dest))
841 return;
843 /* Don't mess with jumps and nops. */
844 if (JUMP_P (insn) || set_noop_p (pat))
845 return;
847 if (REG_P (dest))
849 if (/* Don't CSE something if we can't do a reg/reg copy. */
850 can_copy_p (GET_MODE (dest))
851 /* Is SET_SRC something we want to gcse? */
852 && general_operand (src, GET_MODE (src))
853 #ifdef STACK_REGS
854 /* Never consider insns touching the register stack. It may
855 create situations that reg-stack cannot handle (e.g. a stack
856 register live across an abnormal edge). */
857 && (REGNO (dest) < FIRST_STACK_REG || REGNO (dest) > LAST_STACK_REG)
858 #endif
859 /* An expression is not available if its operands are
860 subsequently modified, including this insn. */
861 && oprs_unchanged_p (src, insn, true))
863 insert_expr_in_table (src, insn);
866 else if (REG_P (src))
868 /* Only record sets of pseudo-regs in the hash table. */
869 if (/* Don't CSE something if we can't do a reg/reg copy. */
870 can_copy_p (GET_MODE (src))
871 /* Is SET_DEST something we want to gcse? */
872 && general_operand (dest, GET_MODE (dest))
873 #ifdef STACK_REGS
874 /* As above for STACK_REGS. */
875 && (REGNO (src) < FIRST_STACK_REG || REGNO (src) > LAST_STACK_REG)
876 #endif
877 && ! (flag_float_store && FLOAT_MODE_P (GET_MODE (dest)))
878 /* Check if the memory expression is killed after insn. */
879 && ! load_killed_in_block_p (INSN_CUID (insn) + 1, dest, true)
880 && oprs_unchanged_p (XEXP (dest, 0), insn, true))
882 insert_expr_in_table (dest, insn);
888 /* Create hash table of memory expressions available at end of basic
889 blocks. Basically you should think of this hash table as the
890 representation of AVAIL_OUT. This is the set of expressions that
891 is generated in a basic block and not killed before the end of the
892 same basic block. Notice that this is really a local computation. */
894 static void
895 compute_hash_table (void)
897 basic_block bb;
899 FOR_EACH_BB_FN (bb, cfun)
901 rtx_insn *insn;
903 /* First pass over the instructions records information used to
904 determine when registers and memory are last set.
905 Since we compute a "local" AVAIL_OUT, reset the tables that
906 help us keep track of what has been modified since the start
907 of the block. */
908 reset_opr_set_tables ();
909 FOR_BB_INSNS (bb, insn)
911 if (INSN_P (insn))
912 record_opr_changes (insn);
915 /* The next pass actually builds the hash table. */
916 FOR_BB_INSNS (bb, insn)
917 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SET)
918 hash_scan_set (insn);
923 /* Check if register REG is killed in any insn waiting to be inserted on
924 edge E. This function is required to check that our data flow analysis
925 is still valid prior to commit_edge_insertions. */
927 static bool
928 reg_killed_on_edge (rtx reg, edge e)
930 rtx_insn *insn;
932 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
933 if (INSN_P (insn) && reg_set_p (reg, insn))
934 return true;
936 return false;
939 /* Similar to above - check if register REG is used in any insn waiting
940 to be inserted on edge E.
941 Assumes no such insn can be a CALL_INSN; if so call reg_used_between_p
942 with PREV(insn),NEXT(insn) instead of calling reg_overlap_mentioned_p. */
944 static bool
945 reg_used_on_edge (rtx reg, edge e)
947 rtx_insn *insn;
949 for (insn = e->insns.r; insn; insn = NEXT_INSN (insn))
950 if (INSN_P (insn) && reg_overlap_mentioned_p (reg, PATTERN (insn)))
951 return true;
953 return false;
956 /* Return the loaded/stored register of a load/store instruction. */
958 static rtx
959 get_avail_load_store_reg (rtx_insn *insn)
961 if (REG_P (SET_DEST (PATTERN (insn))))
962 /* A load. */
963 return SET_DEST (PATTERN (insn));
964 else
966 /* A store. */
967 gcc_assert (REG_P (SET_SRC (PATTERN (insn))));
968 return SET_SRC (PATTERN (insn));
972 /* Return nonzero if the predecessors of BB are "well behaved". */
974 static bool
975 bb_has_well_behaved_predecessors (basic_block bb)
977 edge pred;
978 edge_iterator ei;
980 if (EDGE_COUNT (bb->preds) == 0)
981 return false;
983 FOR_EACH_EDGE (pred, ei, bb->preds)
985 if ((pred->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (pred))
986 return false;
988 if ((pred->flags & EDGE_ABNORMAL_CALL) && cfun->has_nonlocal_label)
989 return false;
991 if (tablejump_p (BB_END (pred->src), NULL, NULL))
992 return false;
994 return true;
998 /* Search for the occurrences of expression in BB. */
1000 static struct occr*
1001 get_bb_avail_insn (basic_block bb, struct occr *orig_occr, int bitmap_index)
1003 struct occr *occr = orig_occr;
1005 for (; occr != NULL; occr = occr->next)
1006 if (BLOCK_FOR_INSN (occr->insn) == bb)
1007 return occr;
1009 /* If we could not find an occurrence in BB, see if BB
1010 has a single predecessor with an occurrence that is
1011 transparent through BB. */
1012 if (single_pred_p (bb)
1013 && bitmap_bit_p (transp[bb->index], bitmap_index)
1014 && (occr = get_bb_avail_insn (single_pred (bb), orig_occr, bitmap_index)))
1016 rtx avail_reg = get_avail_load_store_reg (occr->insn);
1017 if (!reg_set_between_p (avail_reg,
1018 PREV_INSN (BB_HEAD (bb)),
1019 NEXT_INSN (BB_END (bb)))
1020 && !reg_killed_on_edge (avail_reg, single_pred_edge (bb)))
1021 return occr;
1024 return NULL;
1028 /* This helper is called via htab_traverse. */
1030 compute_expr_transp (expr **slot, FILE *dump_file ATTRIBUTE_UNUSED)
1032 struct expr *expr = *slot;
1034 compute_transp (expr->expr, expr->bitmap_index, transp,
1035 blocks_with_calls, modify_mem_list_set,
1036 canon_modify_mem_list);
1037 return 1;
1040 /* This handles the case where several stores feed a partially redundant
1041 load. It checks if the redundancy elimination is possible and if it's
1042 worth it.
1044 Redundancy elimination is possible if,
1045 1) None of the operands of an insn have been modified since the start
1046 of the current basic block.
1047 2) In any predecessor of the current basic block, the same expression
1048 is generated.
1050 See the function body for the heuristics that determine if eliminating
1051 a redundancy is also worth doing, assuming it is possible. */
1053 static void
1054 eliminate_partially_redundant_load (basic_block bb, rtx_insn *insn,
1055 struct expr *expr)
1057 edge pred;
1058 rtx_insn *avail_insn = NULL;
1059 rtx avail_reg;
1060 rtx dest, pat;
1061 struct occr *a_occr;
1062 struct unoccr *occr, *avail_occrs = NULL;
1063 struct unoccr *unoccr, *unavail_occrs = NULL, *rollback_unoccr = NULL;
1064 int npred_ok = 0;
1065 gcov_type ok_count = 0; /* Redundant load execution count. */
1066 gcov_type critical_count = 0; /* Execution count of critical edges. */
1067 edge_iterator ei;
1068 bool critical_edge_split = false;
1070 /* The execution count of the loads to be added to make the
1071 load fully redundant. */
1072 gcov_type not_ok_count = 0;
1073 basic_block pred_bb;
1075 pat = PATTERN (insn);
1076 dest = SET_DEST (pat);
1078 /* Check that the loaded register is not used, set, or killed from the
1079 beginning of the block. */
1080 if (reg_changed_after_insn_p (dest, 0)
1081 || reg_used_between_p (dest, PREV_INSN (BB_HEAD (bb)), insn))
1082 return;
1084 /* Check potential for replacing load with copy for predecessors. */
1085 FOR_EACH_EDGE (pred, ei, bb->preds)
1087 rtx_insn *next_pred_bb_end;
1089 avail_insn = NULL;
1090 avail_reg = NULL_RTX;
1091 pred_bb = pred->src;
1092 for (a_occr = get_bb_avail_insn (pred_bb,
1093 expr->avail_occr,
1094 expr->bitmap_index);
1095 a_occr;
1096 a_occr = get_bb_avail_insn (pred_bb,
1097 a_occr->next,
1098 expr->bitmap_index))
1100 /* Check if the loaded register is not used. */
1101 avail_insn = a_occr->insn;
1102 avail_reg = get_avail_load_store_reg (avail_insn);
1103 gcc_assert (avail_reg);
1105 /* Make sure we can generate a move from register avail_reg to
1106 dest. */
1107 rtx_insn *move = gen_move_insn (copy_rtx (dest),
1108 copy_rtx (avail_reg));
1109 extract_insn (move);
1110 if (! constrain_operands (1, get_preferred_alternatives (insn,
1111 pred_bb))
1112 || reg_killed_on_edge (avail_reg, pred)
1113 || reg_used_on_edge (dest, pred))
1115 avail_insn = NULL;
1116 continue;
1118 next_pred_bb_end = NEXT_INSN (BB_END (BLOCK_FOR_INSN (avail_insn)));
1119 if (!reg_set_between_p (avail_reg, avail_insn, next_pred_bb_end))
1120 /* AVAIL_INSN remains non-null. */
1121 break;
1122 else
1123 avail_insn = NULL;
1126 if (EDGE_CRITICAL_P (pred))
1127 critical_count += pred->count;
1129 if (avail_insn != NULL_RTX)
1131 npred_ok++;
1132 ok_count += pred->count;
1133 if (! set_noop_p (PATTERN (gen_move_insn (copy_rtx (dest),
1134 copy_rtx (avail_reg)))))
1136 /* Check if there is going to be a split. */
1137 if (EDGE_CRITICAL_P (pred))
1138 critical_edge_split = true;
1140 else /* Its a dead move no need to generate. */
1141 continue;
1142 occr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1143 sizeof (struct unoccr));
1144 occr->insn = avail_insn;
1145 occr->pred = pred;
1146 occr->next = avail_occrs;
1147 avail_occrs = occr;
1148 if (! rollback_unoccr)
1149 rollback_unoccr = occr;
1151 else
1153 /* Adding a load on a critical edge will cause a split. */
1154 if (EDGE_CRITICAL_P (pred))
1155 critical_edge_split = true;
1156 not_ok_count += pred->count;
1157 unoccr = (struct unoccr *) obstack_alloc (&unoccr_obstack,
1158 sizeof (struct unoccr));
1159 unoccr->insn = NULL;
1160 unoccr->pred = pred;
1161 unoccr->next = unavail_occrs;
1162 unavail_occrs = unoccr;
1163 if (! rollback_unoccr)
1164 rollback_unoccr = unoccr;
1168 if (/* No load can be replaced by copy. */
1169 npred_ok == 0
1170 /* Prevent exploding the code. */
1171 || (optimize_bb_for_size_p (bb) && npred_ok > 1)
1172 /* If we don't have profile information we cannot tell if splitting
1173 a critical edge is profitable or not so don't do it. */
1174 || ((! profile_info || ! flag_branch_probabilities
1175 || targetm.cannot_modify_jumps_p ())
1176 && critical_edge_split))
1177 goto cleanup;
1179 /* Check if it's worth applying the partial redundancy elimination. */
1180 if (ok_count < GCSE_AFTER_RELOAD_PARTIAL_FRACTION * not_ok_count)
1181 goto cleanup;
1182 if (ok_count < GCSE_AFTER_RELOAD_CRITICAL_FRACTION * critical_count)
1183 goto cleanup;
1185 /* Generate moves to the loaded register from where
1186 the memory is available. */
1187 for (occr = avail_occrs; occr; occr = occr->next)
1189 avail_insn = occr->insn;
1190 pred = occr->pred;
1191 /* Set avail_reg to be the register having the value of the
1192 memory. */
1193 avail_reg = get_avail_load_store_reg (avail_insn);
1194 gcc_assert (avail_reg);
1196 insert_insn_on_edge (gen_move_insn (copy_rtx (dest),
1197 copy_rtx (avail_reg)),
1198 pred);
1199 stats.moves_inserted++;
1201 if (dump_file)
1202 fprintf (dump_file,
1203 "generating move from %d to %d on edge from %d to %d\n",
1204 REGNO (avail_reg),
1205 REGNO (dest),
1206 pred->src->index,
1207 pred->dest->index);
1210 /* Regenerate loads where the memory is unavailable. */
1211 for (unoccr = unavail_occrs; unoccr; unoccr = unoccr->next)
1213 pred = unoccr->pred;
1214 insert_insn_on_edge (copy_insn (PATTERN (insn)), pred);
1215 stats.copies_inserted++;
1217 if (dump_file)
1219 fprintf (dump_file,
1220 "generating on edge from %d to %d a copy of load: ",
1221 pred->src->index,
1222 pred->dest->index);
1223 print_rtl (dump_file, PATTERN (insn));
1224 fprintf (dump_file, "\n");
1228 /* Delete the insn if it is not available in this block and mark it
1229 for deletion if it is available. If insn is available it may help
1230 discover additional redundancies, so mark it for later deletion. */
1231 for (a_occr = get_bb_avail_insn (bb, expr->avail_occr, expr->bitmap_index);
1232 a_occr && (a_occr->insn != insn);
1233 a_occr = get_bb_avail_insn (bb, a_occr->next, expr->bitmap_index))
1236 if (!a_occr)
1238 stats.insns_deleted++;
1240 if (dump_file)
1242 fprintf (dump_file, "deleting insn:\n");
1243 print_rtl_single (dump_file, insn);
1244 fprintf (dump_file, "\n");
1246 delete_insn (insn);
1248 else
1249 a_occr->deleted_p = 1;
1251 cleanup:
1252 if (rollback_unoccr)
1253 obstack_free (&unoccr_obstack, rollback_unoccr);
1256 /* Performing the redundancy elimination as described before. */
1258 static void
1259 eliminate_partially_redundant_loads (void)
1261 rtx_insn *insn;
1262 basic_block bb;
1264 /* Note we start at block 1. */
1266 if (ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb == EXIT_BLOCK_PTR_FOR_FN (cfun))
1267 return;
1269 FOR_BB_BETWEEN (bb,
1270 ENTRY_BLOCK_PTR_FOR_FN (cfun)->next_bb->next_bb,
1271 EXIT_BLOCK_PTR_FOR_FN (cfun),
1272 next_bb)
1274 /* Don't try anything on basic blocks with strange predecessors. */
1275 if (! bb_has_well_behaved_predecessors (bb))
1276 continue;
1278 /* Do not try anything on cold basic blocks. */
1279 if (optimize_bb_for_size_p (bb))
1280 continue;
1282 /* Reset the table of things changed since the start of the current
1283 basic block. */
1284 reset_opr_set_tables ();
1286 /* Look at all insns in the current basic block and see if there are
1287 any loads in it that we can record. */
1288 FOR_BB_INSNS (bb, insn)
1290 /* Is it a load - of the form (set (reg) (mem))? */
1291 if (NONJUMP_INSN_P (insn)
1292 && GET_CODE (PATTERN (insn)) == SET
1293 && REG_P (SET_DEST (PATTERN (insn)))
1294 && MEM_P (SET_SRC (PATTERN (insn))))
1296 rtx pat = PATTERN (insn);
1297 rtx src = SET_SRC (pat);
1298 struct expr *expr;
1300 if (!MEM_VOLATILE_P (src)
1301 && GET_MODE (src) != BLKmode
1302 && general_operand (src, GET_MODE (src))
1303 /* Are the operands unchanged since the start of the
1304 block? */
1305 && oprs_unchanged_p (src, insn, false)
1306 && !(cfun->can_throw_non_call_exceptions && may_trap_p (src))
1307 && !side_effects_p (src)
1308 /* Is the expression recorded? */
1309 && (expr = lookup_expr_in_table (src)) != NULL)
1311 /* We now have a load (insn) and an available memory at
1312 its BB start (expr). Try to remove the loads if it is
1313 redundant. */
1314 eliminate_partially_redundant_load (bb, insn, expr);
1318 /* Keep track of everything modified by this insn, so that we
1319 know what has been modified since the start of the current
1320 basic block. */
1321 if (INSN_P (insn))
1322 record_opr_changes (insn);
1326 commit_edge_insertions ();
1329 /* Go over the expression hash table and delete insns that were
1330 marked for later deletion. */
1332 /* This helper is called via htab_traverse. */
1334 delete_redundant_insns_1 (expr **slot, void *data ATTRIBUTE_UNUSED)
1336 struct expr *exprs = *slot;
1337 struct occr *occr;
1339 for (occr = exprs->avail_occr; occr != NULL; occr = occr->next)
1341 if (occr->deleted_p && dbg_cnt (gcse2_delete))
1343 delete_insn (occr->insn);
1344 stats.insns_deleted++;
1346 if (dump_file)
1348 fprintf (dump_file, "deleting insn:\n");
1349 print_rtl_single (dump_file, occr->insn);
1350 fprintf (dump_file, "\n");
1355 return 1;
1358 static void
1359 delete_redundant_insns (void)
1361 expr_table->traverse <void *, delete_redundant_insns_1> (NULL);
1362 if (dump_file)
1363 fprintf (dump_file, "\n");
1366 /* Main entry point of the GCSE after reload - clean some redundant loads
1367 due to spilling. */
1369 static void
1370 gcse_after_reload_main (rtx f ATTRIBUTE_UNUSED)
1373 memset (&stats, 0, sizeof (stats));
1375 /* Allocate memory for this pass.
1376 Also computes and initializes the insns' CUIDs. */
1377 alloc_mem ();
1379 /* We need alias analysis. */
1380 init_alias_analysis ();
1382 compute_hash_table ();
1384 if (dump_file)
1385 dump_hash_table (dump_file);
1387 if (expr_table->elements () > 0)
1389 /* Knowing which MEMs are transparent through a block can signifiantly
1390 increase the number of redundant loads found. So compute transparency
1391 information for each memory expression in the hash table. */
1392 df_analyze ();
1393 /* This can not be part of the normal allocation routine because
1394 we have to know the number of elements in the hash table. */
1395 transp = sbitmap_vector_alloc (last_basic_block_for_fn (cfun),
1396 expr_table->elements ());
1397 bitmap_vector_ones (transp, last_basic_block_for_fn (cfun));
1398 expr_table->traverse <FILE *, compute_expr_transp> (dump_file);
1399 eliminate_partially_redundant_loads ();
1400 delete_redundant_insns ();
1401 sbitmap_vector_free (transp);
1403 if (dump_file)
1405 fprintf (dump_file, "GCSE AFTER RELOAD stats:\n");
1406 fprintf (dump_file, "copies inserted: %d\n", stats.copies_inserted);
1407 fprintf (dump_file, "moves inserted: %d\n", stats.moves_inserted);
1408 fprintf (dump_file, "insns deleted: %d\n", stats.insns_deleted);
1409 fprintf (dump_file, "\n\n");
1412 statistics_counter_event (cfun, "copies inserted",
1413 stats.copies_inserted);
1414 statistics_counter_event (cfun, "moves inserted",
1415 stats.moves_inserted);
1416 statistics_counter_event (cfun, "insns deleted",
1417 stats.insns_deleted);
1420 /* We are finished with alias. */
1421 end_alias_analysis ();
1423 free_mem ();
1428 static unsigned int
1429 rest_of_handle_gcse2 (void)
1431 gcse_after_reload_main (get_insns ());
1432 rebuild_jump_labels (get_insns ());
1433 return 0;
1436 namespace {
1438 const pass_data pass_data_gcse2 =
1440 RTL_PASS, /* type */
1441 "gcse2", /* name */
1442 OPTGROUP_NONE, /* optinfo_flags */
1443 TV_GCSE_AFTER_RELOAD, /* tv_id */
1444 0, /* properties_required */
1445 0, /* properties_provided */
1446 0, /* properties_destroyed */
1447 0, /* todo_flags_start */
1448 0, /* todo_flags_finish */
1451 class pass_gcse2 : public rtl_opt_pass
1453 public:
1454 pass_gcse2 (gcc::context *ctxt)
1455 : rtl_opt_pass (pass_data_gcse2, ctxt)
1458 /* opt_pass methods: */
1459 virtual bool gate (function *fun)
1461 return (optimize > 0 && flag_gcse_after_reload
1462 && optimize_function_for_speed_p (fun));
1465 virtual unsigned int execute (function *) { return rest_of_handle_gcse2 (); }
1467 }; // class pass_gcse2
1469 } // anon namespace
1471 rtl_opt_pass *
1472 make_pass_gcse2 (gcc::context *ctxt)
1474 return new pass_gcse2 (ctxt);