re PR c++/84691 (internal compiler error: in poplevel_class, at cp/name-lookup.c...
[official-gcc.git] / gcc / early-remat.c
blob28eb9b4b2de04ccb92b5c8f654ee35fc61ea245a
1 /* Early (pre-RA) rematerialization
2 Copyright (C) 2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 #include "config.h"
21 #include "system.h"
22 #include "coretypes.h"
23 #include "backend.h"
24 #include "rtl.h"
25 #include "df.h"
26 #include "tree-pass.h"
27 #include "memmodel.h"
28 #include "emit-rtl.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 /* FIXME: The next two are only needed for gen_move_insn. */
32 #include "tree.h"
33 #include "expr.h"
34 #include "target.h"
35 #include "inchash.h"
36 #include "rtlhash.h"
37 #include "print-rtl.h"
38 #include "rtl-iter.h"
40 /* This pass runs before register allocation and implements an aggressive
41 form of rematerialization. It looks for pseudo registers R of mode M
42 for which:
44 (a) there are no call-preserved registers of mode M; and
45 (b) spilling R to the stack is expensive.
47 The assumption is that it's better to recompute R after each call instead
48 of spilling it, even if this extends the live ranges of other registers.
50 The motivating example for which these conditions hold are AArch64 SVE
51 vectors and predicates. Spilling them to the stack makes the frame
52 variable-sized, which we'd like to avoid if possible. It's also very
53 rare for SVE values to be "naturally" live across a call: usually this
54 happens as a result of CSE or other code motion.
56 The pass is split into the following phases:
58 Collection phase
59 ================
61 First we go through all pseudo registers looking for any that meet
62 the conditions above. For each such register R, we go through each
63 instruction that defines R to see whether any of them are suitable
64 rematerialization candidates. If at least one is, we treat all the
65 instructions that define R as candidates, but record which ones are
66 not in fact suitable. These unsuitable candidates exist only for the
67 sake of calculating reaching definitions (see below).
69 A "candidate" is a single instruction that we want to rematerialize
70 and a "candidate register" is a register that is set by at least one
71 candidate.
73 Candidate sorting
74 =================
76 Next we sort the candidates based on the cfg postorder, so that if
77 candidate C1 uses candidate C2, C1 has a lower index than C2.
78 This is useful when iterating through candidate bitmaps.
80 Reaching definition calculation
81 ===============================
83 We then compute standard reaching-definition sets for each candidate.
84 Each set specifies which candidates might provide the current definition
85 of a live candidate register.
87 From here on, a candidate C is "live" at a point P if the candidate
88 register defined by C is live at P and if C's definition reaches P.
89 An instruction I "uses" a candidate C if I takes the register defined by
90 C as input and if C is one of the reaching definitions of that register.
92 Candidate validation and value numbering
93 ========================================
95 Next we simultaneously decide which candidates are valid and look
96 for candidates that are equivalent to each other, assigning numbers
97 to each unique candidate value. A candidate C is invalid if:
99 (a) C uses an invalid candidate;
101 (b) there is a cycle of candidate uses involving C; or
103 (c) C takes a candidate register R as input and the reaching
104 definitions of R do not have the same value number.
106 We assign a "representative" candidate C to each value number and from
107 here on replace references to other candidates with that value number
108 with references to C. It is then only possible to rematerialize a
109 register R at point P if (after this replacement) there is a single
110 reaching definition of R at P.
112 Local phase
113 ===========
115 During this phase we go through each block and look for cases in which:
117 (a) an instruction I comes between two call instructions CI1 and CI2;
119 (b) I uses a candidate register R;
121 (c) a candidate C provides the only reaching definition of R; and
123 (d) C does not come between CI1 and I.
125 We then emit a copy of C after CI1, as well as the transitive closure
126 TC of the candidates used by C. The copies of TC might use the original
127 candidate registers or new temporary registers, depending on circumstances.
129 For example, if elsewhere we have:
131 C3: R3 <- f3 (...)
133 C2: R2 <- f2 (...)
135 C1: R1 <- f1 (R2, R3, ...) // uses C2 and C3
137 then for a block containing:
139 CI1: call
141 I: use R1 // uses C1
143 CI2: call
145 we would emit:
147 CI1: call
148 C3': R3' <- f3 (...)
149 C2': R2' <- f2 (...)
150 C1': R1 <- f1 (R2', R3', ...)
152 I: use R1
154 CI2: call
156 where R2' and R3' might be fresh registers. If instead we had:
158 CI1: call
160 I1: use R1 // uses C1
162 I2: use R3 // uses C3
164 CI2: call
166 we would keep the original R3:
168 CI1: call
169 C3': R3 <- f3 (...)
170 C2': R2' <- f2 (...)
171 C1': R1 <- f1 (R2', R3, ...)
173 I1: use R1 // uses C1
175 I2: use R3 // uses C3
177 CI2: call
179 We also record the last call in each block (if any) and compute:
181 rd_after_call:
182 The set of candidates that either (a) are defined outside the block
183 and are live after the last call or (b) are defined within the block
184 and reach the end of the last call. (We don't track whether the
185 latter values are live or not.)
187 required_after_call:
188 The set of candidates that need to be rematerialized after the
189 last call in order to satisfy uses in the block itself.
191 required_in:
192 The set of candidates that are live on entry to the block and are
193 used without an intervening call.
195 In addition, we compute the initial values of the sets required by
196 the global phase below.
198 Global phase
199 ============
201 We next compute a maximal solution to the following availability
202 problem:
204 available_in:
205 The set of candidates that are live on entry to a block and can
206 be used at that point without rematerialization.
208 available_out:
209 The set of candidates that are live on exit from a block and can
210 be used at that point without rematerialization.
212 available_locally:
213 The subset of available_out that is due to code in the block itself.
214 It contains candidates that are defined or used in the block and
215 not invalidated by a later call.
217 We then go through each block B and look for an appropriate place
218 to insert copies of required_in - available_in. Conceptually we
219 start by placing the copies at the head of B, but then move the
220 copy of a candidate C to predecessors if:
222 (a) that seems cheaper;
224 (b) there is more than one reaching definition of C's register at
225 the head of B; or
227 (c) copying C would clobber a hard register that is live on entry to B.
229 Moving a copy of C to a predecessor block PB involves:
231 (1) adding C to PB's required_after_call, if PB contains a call; or
233 (2) adding C PB's required_in otherwise.
235 C is then available on output from each PB and on input to B.
237 Once all this is done, we emit instructions for the final required_in
238 and required_after_call sets. */
240 namespace {
242 /* An invalid candidate index, used to indicate that there is more than
243 one reaching definition. */
244 const unsigned int MULTIPLE_CANDIDATES = -1U;
246 /* Pass-specific information about one basic block. */
247 struct remat_block_info {
248 /* The last call instruction in the block. */
249 rtx_insn *last_call;
251 /* The set of candidates that are live on entry to the block. NULL is
252 equivalent to an empty set. */
253 bitmap rd_in;
255 /* The set of candidates that are live on exit from the block. This might
256 reuse rd_in. NULL is equivalent to an empty set. */
257 bitmap rd_out;
259 /* The subset of RD_OUT that comes from local definitions. NULL is
260 equivalent to an empty set. */
261 bitmap rd_gen;
263 /* The set of candidates that the block invalidates (because it defines
264 the register to something else, or because the register's value is
265 no longer important). NULL is equivalent to an empty set. */
266 bitmap rd_kill;
268 /* The set of candidates that either (a) are defined outside the block
269 and are live after LAST_CALL or (b) are defined within the block
270 and reach the instruction after LAST_CALL. (We don't track whether
271 the latter values are live or not.)
273 Only used if LAST_CALL is nonnull. NULL is equivalent to an
274 empty set. */
275 bitmap rd_after_call;
277 /* Candidates that are live and available without rematerialization
278 on entry to the block. NULL is equivalent to an empty set. */
279 bitmap available_in;
281 /* Candidates that become available without rematerialization within the
282 block, and remain so on exit. NULL is equivalent to an empty set. */
283 bitmap available_locally;
285 /* Candidates that are available without rematerialization on exit from
286 the block. This might reuse available_in or available_locally. */
287 bitmap available_out;
289 /* Candidates that need to be rematerialized either at the start of the
290 block or before entering the block. */
291 bitmap required_in;
293 /* Candidates that need to be rematerialized after LAST_CALL.
294 Only used if LAST_CALL is nonnull. */
295 bitmap required_after_call;
297 /* The number of candidates in the block. */
298 unsigned int num_candidates;
300 /* The earliest candidate in the block (i.e. the one with the
301 highest index). Only valid if NUM_CANDIDATES is nonzero. */
302 unsigned int first_candidate;
304 /* The best (lowest) execution frequency for rematerializing REQUIRED_IN.
305 This is the execution frequency of the block if LOCAL_REMAT_CHEAPER_P,
306 otherwise it is the sum of the execution frequencies of whichever
307 predecessor blocks would do the rematerialization. */
308 int remat_frequency;
310 /* True if the block ends with an abnormal call. */
311 unsigned int abnormal_call_p : 1;
313 /* Used to record whether a graph traversal has visited this block. */
314 unsigned int visited_p : 1;
316 /* True if we have calculated REMAT_FREQUENCY. */
317 unsigned int remat_frequency_valid_p : 1;
319 /* True if it is cheaper to rematerialize candidates at the start of
320 the block, rather than moving them to predecessor blocks. */
321 unsigned int local_remat_cheaper_p : 1;
324 /* Information about a group of candidates with the same value number. */
325 struct remat_equiv_class {
326 /* The candidates that have the same value number. */
327 bitmap members;
329 /* The candidate that was first added to MEMBERS. */
330 unsigned int earliest;
332 /* The candidate that represents the others. This is always the one
333 with the highest index. */
334 unsigned int representative;
337 /* Information about an instruction that we might want to rematerialize. */
338 struct remat_candidate {
339 /* The pseudo register that the instruction sets. */
340 unsigned int regno;
342 /* A temporary register used when rematerializing uses of this candidate,
343 if REGNO doesn't have the right value or isn't worth using. */
344 unsigned int copy_regno;
346 /* True if we intend to rematerialize this instruction by emitting
347 a move of a constant into REGNO, false if we intend to emit a
348 copy of the original instruction. */
349 unsigned int constant_p : 1;
351 /* True if we still think it's possible to rematerialize INSN. */
352 unsigned int can_copy_p : 1;
354 /* Used to record whether a graph traversal has visited this candidate. */
355 unsigned int visited_p : 1;
357 /* True if we have verified that it's possible to rematerialize INSN.
358 Once this is true, both it and CAN_COPY_P remain true. */
359 unsigned int validated_p : 1;
361 /* True if we have "stabilized" INSN, i.e. ensured that all non-candidate
362 registers read by INSN will have the same value when rematerializing INSN.
363 Only ever true if CAN_COPY_P. */
364 unsigned int stabilized_p : 1;
366 /* Hash value used for value numbering. */
367 hashval_t hash;
369 /* The instruction that sets REGNO. */
370 rtx_insn *insn;
372 /* If CONSTANT_P, the value that should be moved into REGNO when
373 rematerializing, otherwise the pattern of the instruction that
374 should be used. */
375 rtx remat_rtx;
377 /* The set of candidates that INSN takes as input. NULL is equivalent
378 to the empty set. All candidates in this set have a higher index
379 than the current candidate. */
380 bitmap uses;
382 /* The set of hard registers that would be clobbered by rematerializing
383 the candidate, including (transitively) all those that would be
384 clobbered by rematerializing USES. */
385 bitmap clobbers;
387 /* The equivalence class to which the candidate belongs, or null if none. */
388 remat_equiv_class *equiv_class;
391 /* Hash functions used for value numbering. */
392 struct remat_candidate_hasher : nofree_ptr_hash <remat_candidate>
394 typedef value_type compare_type;
395 static hashval_t hash (const remat_candidate *);
396 static bool equal (const remat_candidate *, const remat_candidate *);
399 /* Main class for this pass. */
400 class early_remat {
401 public:
402 early_remat (function *, sbitmap);
403 ~early_remat ();
405 void run (void);
407 private:
408 bitmap alloc_bitmap (void);
409 bitmap get_bitmap (bitmap *);
410 void init_temp_bitmap (bitmap *);
411 void copy_temp_bitmap (bitmap *, bitmap *);
413 void dump_insn_id (rtx_insn *);
414 void dump_candidate_bitmap (bitmap);
415 void dump_all_candidates (void);
416 void dump_edge_list (basic_block, bool);
417 void dump_block_info (basic_block);
418 void dump_all_blocks (void);
420 bool interesting_regno_p (unsigned int);
421 remat_candidate *add_candidate (rtx_insn *, unsigned int, bool);
422 bool maybe_add_candidate (rtx_insn *, unsigned int);
423 bool collect_candidates (void);
424 void init_block_info (void);
425 void sort_candidates (void);
426 void finalize_candidate_indices (void);
427 void record_equiv_candidates (unsigned int, unsigned int);
428 static bool rd_confluence_n (edge);
429 static bool rd_transfer (int);
430 void compute_rd (void);
431 unsigned int canon_candidate (unsigned int);
432 void canon_bitmap (bitmap *);
433 unsigned int resolve_reaching_def (bitmap);
434 bool check_candidate_uses (unsigned int);
435 void compute_clobbers (unsigned int);
436 void assign_value_number (unsigned int);
437 void decide_candidate_validity (void);
438 bool stable_use_p (unsigned int);
439 void emit_copy_before (unsigned int, rtx, rtx);
440 void stabilize_pattern (unsigned int);
441 void replace_dest_with_copy (unsigned int);
442 void stabilize_candidate_uses (unsigned int, bitmap, bitmap, bitmap,
443 bitmap);
444 void emit_remat_insns (bitmap, bitmap, bitmap, rtx_insn *);
445 bool set_available_out (remat_block_info *);
446 void process_block (basic_block);
447 void local_phase (void);
448 static bool avail_confluence_n (edge);
449 static bool avail_transfer (int);
450 void compute_availability (void);
451 void unshare_available_sets (remat_block_info *);
452 bool can_move_across_edge_p (edge);
453 bool local_remat_cheaper_p (unsigned int);
454 bool need_to_move_candidate_p (unsigned int, unsigned int);
455 void compute_minimum_move_set (unsigned int, bitmap);
456 void move_to_predecessors (unsigned int, bitmap, bitmap);
457 void choose_rematerialization_points (void);
458 void emit_remat_insns_for_block (basic_block);
459 void global_phase (void);
461 /* The function that we're optimizing. */
462 function *m_fn;
464 /* The modes that we want to rematerialize. */
465 sbitmap m_selected_modes;
467 /* All rematerialization candidates, identified by their index into the
468 vector. */
469 auto_vec<remat_candidate> m_candidates;
471 /* The set of candidate registers. */
472 bitmap_head m_candidate_regnos;
474 /* Temporary sets. */
475 bitmap_head m_tmp_bitmap;
476 bitmap m_available;
477 bitmap m_required;
479 /* Information about each basic block. */
480 auto_vec<remat_block_info> m_block_info;
482 /* A mapping from register numbers to the set of associated candidates.
483 Only valid for registers in M_CANDIDATE_REGNOS. */
484 auto_vec<bitmap> m_regno_to_candidates;
486 /* An obstack used for allocating bitmaps, so that we can free them all
487 in one go. */
488 bitmap_obstack m_obstack;
490 /* A hash table of candidates used for value numbering. If a candidate
491 in the table is in an equivalence class, the candidate is marked as
492 the earliest member of the class. */
493 hash_table<remat_candidate_hasher> m_value_table;
495 /* Used temporarily by callback functions. */
496 static early_remat *er;
501 early_remat *early_remat::er;
503 /* rtx_equal_p_cb callback that treats any two SCRATCHes as equal.
504 This allows us to compare two copies of a pattern, even though their
505 SCRATCHes are always distinct. */
507 static int
508 scratch_equal (const_rtx *x, const_rtx *y, rtx *nx, rtx *ny)
510 if (GET_CODE (*x) == SCRATCH && GET_CODE (*y) == SCRATCH)
512 *nx = const0_rtx;
513 *ny = const0_rtx;
514 return 1;
516 return 0;
519 /* Hash callback functions for remat_candidate. */
521 hashval_t
522 remat_candidate_hasher::hash (const remat_candidate *cand)
524 return cand->hash;
527 bool
528 remat_candidate_hasher::equal (const remat_candidate *cand1,
529 const remat_candidate *cand2)
531 return (cand1->regno == cand2->regno
532 && cand1->constant_p == cand2->constant_p
533 && (cand1->constant_p
534 ? rtx_equal_p (cand1->remat_rtx, cand2->remat_rtx)
535 : rtx_equal_p_cb (cand1->remat_rtx, cand2->remat_rtx,
536 scratch_equal))
537 && (!cand1->uses || bitmap_equal_p (cand1->uses, cand2->uses)));
540 /* Return true if B is null or empty. */
542 inline bool
543 empty_p (bitmap b)
545 return !b || bitmap_empty_p (b);
548 /* Allocate a new bitmap. It will be automatically freed at the end of
549 the pass. */
551 inline bitmap
552 early_remat::alloc_bitmap (void)
554 return bitmap_alloc (&m_obstack);
557 /* Initialize *PTR to an empty bitmap if it is currently null. */
559 inline bitmap
560 early_remat::get_bitmap (bitmap *ptr)
562 if (!*ptr)
563 *ptr = alloc_bitmap ();
564 return *ptr;
567 /* *PTR is either null or empty. If it is null, initialize it to an
568 empty bitmap. */
570 inline void
571 early_remat::init_temp_bitmap (bitmap *ptr)
573 if (!*ptr)
574 *ptr = alloc_bitmap ();
575 else
576 gcc_checking_assert (bitmap_empty_p (*ptr));
579 /* Move *SRC to *DEST and leave *SRC empty. */
581 inline void
582 early_remat::copy_temp_bitmap (bitmap *dest, bitmap *src)
584 if (!empty_p (*src))
586 *dest = *src;
587 *src = NULL;
589 else
590 *dest = NULL;
593 /* Print INSN's identifier to the dump file. */
595 void
596 early_remat::dump_insn_id (rtx_insn *insn)
598 fprintf (dump_file, "%d[bb:%d]", INSN_UID (insn),
599 BLOCK_FOR_INSN (insn)->index);
602 /* Print candidate set CANDIDATES to the dump file, with a leading space. */
604 void
605 early_remat::dump_candidate_bitmap (bitmap candidates)
607 if (empty_p (candidates))
609 fprintf (dump_file, " none");
610 return;
613 unsigned int cand_index;
614 bitmap_iterator bi;
615 EXECUTE_IF_SET_IN_BITMAP (candidates, 0, cand_index, bi)
616 fprintf (dump_file, " %d", cand_index);
619 /* Print information about all candidates to the dump file. */
621 void
622 early_remat::dump_all_candidates (void)
624 fprintf (dump_file, "\n;; Candidates:\n;;\n");
625 fprintf (dump_file, ";; %5s %5s %8s %s\n", "#", "reg", "mode", "insn");
626 fprintf (dump_file, ";; %5s %5s %8s %s\n", "=", "===", "====", "====");
627 unsigned int cand_index;
628 remat_candidate *cand;
629 FOR_EACH_VEC_ELT (m_candidates, cand_index, cand)
631 fprintf (dump_file, ";; %5d %5d %8s ", cand_index, cand->regno,
632 GET_MODE_NAME (GET_MODE (regno_reg_rtx[cand->regno])));
633 dump_insn_id (cand->insn);
634 if (!cand->can_copy_p)
635 fprintf (dump_file, " -- can't copy");
636 fprintf (dump_file, "\n");
639 fprintf (dump_file, "\n;; Register-to-candidate mapping:\n;;\n");
640 unsigned int regno;
641 bitmap_iterator bi;
642 EXECUTE_IF_SET_IN_BITMAP (&m_candidate_regnos, 0, regno, bi)
644 fprintf (dump_file, ";; %5d:", regno);
645 dump_candidate_bitmap (m_regno_to_candidates[regno]);
646 fprintf (dump_file, "\n");
650 /* Print the predecessors or successors of BB to the dump file, with a
651 leading space. DO_SUCC is true to print successors and false to print
652 predecessors. */
654 void
655 early_remat::dump_edge_list (basic_block bb, bool do_succ)
657 edge e;
658 edge_iterator ei;
659 FOR_EACH_EDGE (e, ei, do_succ ? bb->succs : bb->preds)
660 dump_edge_info (dump_file, e, 0, do_succ);
663 /* Print information about basic block BB to the dump file. */
665 void
666 early_remat::dump_block_info (basic_block bb)
668 remat_block_info *info = &m_block_info[bb->index];
669 fprintf (dump_file, ";;\n;; Block %d:", bb->index);
670 int width = 25;
672 fprintf (dump_file, "\n;;%*s:", width, "predecessors");
673 dump_edge_list (bb, false);
675 fprintf (dump_file, "\n;;%*s:", width, "successors");
676 dump_edge_list (bb, true);
678 fprintf (dump_file, "\n;;%*s: %d", width, "frequency",
679 bb->count.to_frequency (m_fn));
681 if (info->last_call)
682 fprintf (dump_file, "\n;;%*s: %d", width, "last call",
683 INSN_UID (info->last_call));
685 if (!empty_p (info->rd_in))
687 fprintf (dump_file, "\n;;%*s:", width, "RD in");
688 dump_candidate_bitmap (info->rd_in);
690 if (!empty_p (info->rd_kill))
692 fprintf (dump_file, "\n;;%*s:", width, "RD kill");
693 dump_candidate_bitmap (info->rd_kill);
695 if (!empty_p (info->rd_gen))
697 fprintf (dump_file, "\n;;%*s:", width, "RD gen");
698 dump_candidate_bitmap (info->rd_gen);
700 if (!empty_p (info->rd_after_call))
702 fprintf (dump_file, "\n;;%*s:", width, "RD after call");
703 dump_candidate_bitmap (info->rd_after_call);
705 if (!empty_p (info->rd_out))
707 fprintf (dump_file, "\n;;%*s:", width, "RD out");
708 if (info->rd_in == info->rd_out)
709 fprintf (dump_file, " RD in");
710 else
711 dump_candidate_bitmap (info->rd_out);
713 if (!empty_p (info->available_in))
715 fprintf (dump_file, "\n;;%*s:", width, "available in");
716 dump_candidate_bitmap (info->available_in);
718 if (!empty_p (info->available_locally))
720 fprintf (dump_file, "\n;;%*s:", width, "available locally");
721 dump_candidate_bitmap (info->available_locally);
723 if (!empty_p (info->available_out))
725 fprintf (dump_file, "\n;;%*s:", width, "available out");
726 if (info->available_in == info->available_out)
727 fprintf (dump_file, " available in");
728 else if (info->available_locally == info->available_out)
729 fprintf (dump_file, " available locally");
730 else
731 dump_candidate_bitmap (info->available_out);
733 if (!empty_p (info->required_in))
735 fprintf (dump_file, "\n;;%*s:", width, "required in");
736 dump_candidate_bitmap (info->required_in);
738 if (!empty_p (info->required_after_call))
740 fprintf (dump_file, "\n;;%*s:", width, "required after call");
741 dump_candidate_bitmap (info->required_after_call);
743 fprintf (dump_file, "\n");
746 /* Print information about all basic blocks to the dump file. */
748 void
749 early_remat::dump_all_blocks (void)
751 basic_block bb;
752 FOR_EACH_BB_FN (bb, m_fn)
753 dump_block_info (bb);
756 /* Return true if REGNO is worth rematerializing. */
758 bool
759 early_remat::interesting_regno_p (unsigned int regno)
761 /* Ignore unused registers. */
762 rtx reg = regno_reg_rtx[regno];
763 if (!reg || DF_REG_DEF_COUNT (regno) == 0)
764 return false;
766 /* Make sure the register has a mode that we want to rematerialize. */
767 if (!bitmap_bit_p (m_selected_modes, GET_MODE (reg)))
768 return false;
770 /* Ignore values that might sometimes be used uninitialized. We could
771 instead add dummy candidates for the entry block definition, and so
772 handle uses that are definitely not uninitialized, but the combination
773 of the two should be rare in practice. */
774 if (bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (m_fn)), regno))
775 return false;
777 return true;
780 /* Record the set of register REGNO in instruction INSN as a
781 rematerialization candidate. CAN_COPY_P is true unless we already
782 know that rematerialization is impossible (in which case the candidate
783 only exists for the reaching definition calculation).
785 The candidate's index is not fixed at this stage. */
787 remat_candidate *
788 early_remat::add_candidate (rtx_insn *insn, unsigned int regno,
789 bool can_copy_p)
791 remat_candidate cand;
792 memset (&cand, 0, sizeof (cand));
793 cand.regno = regno;
794 cand.insn = insn;
795 cand.remat_rtx = PATTERN (insn);
796 cand.can_copy_p = can_copy_p;
797 m_candidates.safe_push (cand);
799 bitmap_set_bit (&m_candidate_regnos, regno);
801 return &m_candidates.last ();
804 /* Return true if we can rematerialize the set of register REGNO in
805 instruction INSN, and add it as a candidate if so. When returning
806 false, print the reason to the dump file. */
808 bool
809 early_remat::maybe_add_candidate (rtx_insn *insn, unsigned int regno)
811 #define FAILURE_FORMAT ";; Can't rematerialize set of reg %d in %d[bb:%d]: "
812 #define FAILURE_ARGS regno, INSN_UID (insn), BLOCK_FOR_INSN (insn)->index
814 /* The definition must come from an ordinary instruction. */
815 basic_block bb = BLOCK_FOR_INSN (insn);
816 if (!NONJUMP_INSN_P (insn)
817 || (insn == BB_END (bb)
818 && has_abnormal_or_eh_outgoing_edge_p (bb)))
820 if (dump_file)
821 fprintf (dump_file, FAILURE_FORMAT "insn alters control flow\n",
822 FAILURE_ARGS);
823 return false;
826 /* Prefer to rematerialize constants directly -- it's much easier. */
827 machine_mode mode = GET_MODE (regno_reg_rtx[regno]);
828 if (rtx note = find_reg_equal_equiv_note (insn))
830 rtx val = XEXP (note, 0);
831 if (CONSTANT_P (val)
832 && targetm.legitimate_constant_p (mode, val))
834 remat_candidate *cand = add_candidate (insn, regno, true);
835 cand->constant_p = true;
836 cand->remat_rtx = val;
837 return true;
841 /* See whether the target has reasons to prevent a copy. */
842 if (targetm.cannot_copy_insn_p && targetm.cannot_copy_insn_p (insn))
844 if (dump_file)
845 fprintf (dump_file, FAILURE_FORMAT "target forbids copying\n",
846 FAILURE_ARGS);
847 return false;
850 /* We can't copy trapping instructions. */
851 rtx pat = PATTERN (insn);
852 if (may_trap_p (pat))
854 if (dump_file)
855 fprintf (dump_file, FAILURE_FORMAT "insn might trap\n", FAILURE_ARGS);
856 return false;
859 /* We can't copy instructions that read memory, unless we know that
860 the contents never change. */
861 subrtx_iterator::array_type array;
862 FOR_EACH_SUBRTX (iter, array, pat, ALL)
863 if (MEM_P (*iter) && !MEM_READONLY_P (*iter))
865 if (dump_file)
866 fprintf (dump_file, FAILURE_FORMAT "insn references non-constant"
867 " memory\n", FAILURE_ARGS);
868 return false;
871 /* Check each defined register. */
872 df_ref ref;
873 FOR_EACH_INSN_DEF (ref, insn)
875 unsigned int def_regno = DF_REF_REGNO (ref);
876 if (def_regno == regno)
878 /* Make sure the definition is write-only. (Partial definitions,
879 such as setting the low part and clobbering the high part,
880 are otherwise OK.) */
881 if (DF_REF_FLAGS_IS_SET (ref, DF_REF_READ_WRITE))
883 if (dump_file)
884 fprintf (dump_file, FAILURE_FORMAT "destination is"
885 " read-modify-write\n", FAILURE_ARGS);
886 return false;
889 else
891 /* The instruction can set additional registers, provided that
892 they're call-clobbered hard registers. This is useful for
893 instructions that alter the condition codes. */
894 if (!HARD_REGISTER_NUM_P (def_regno))
896 if (dump_file)
897 fprintf (dump_file, FAILURE_FORMAT "insn also sets"
898 " pseudo reg %d\n", FAILURE_ARGS, def_regno);
899 return false;
901 if (global_regs[def_regno])
903 if (dump_file)
904 fprintf (dump_file, FAILURE_FORMAT "insn also sets"
905 " global reg %d\n", FAILURE_ARGS, def_regno);
906 return false;
908 if (!TEST_HARD_REG_BIT (regs_invalidated_by_call, def_regno))
910 if (dump_file)
911 fprintf (dump_file, FAILURE_FORMAT "insn also sets"
912 " call-preserved reg %d\n", FAILURE_ARGS, def_regno);
913 return false;
918 /* If the instruction uses fixed hard registers, check that those
919 registers have the same value throughout the function. If the
920 instruction uses non-fixed hard registers, check that we can
921 replace them with pseudos. */
922 FOR_EACH_INSN_USE (ref, insn)
924 unsigned int use_regno = DF_REF_REGNO (ref);
925 if (HARD_REGISTER_NUM_P (use_regno) && fixed_regs[use_regno])
927 if (rtx_unstable_p (DF_REF_REAL_REG (ref)))
929 if (dump_file)
930 fprintf (dump_file, FAILURE_FORMAT "insn uses fixed hard reg"
931 " %d\n", FAILURE_ARGS, use_regno);
932 return false;
935 else if (HARD_REGISTER_NUM_P (use_regno))
937 /* Allocate a dummy pseudo register and temporarily install it.
938 Make the register number depend on the mode, which should
939 provide enough sharing for match_dup while also weeding
940 out cases in which operands with different modes are
941 explicitly tied. */
942 rtx *loc = DF_REF_REAL_LOC (ref);
943 unsigned int size = RTX_CODE_SIZE (REG);
944 rtx new_reg = (rtx) alloca (size);
945 memset (new_reg, 0, size);
946 PUT_CODE (new_reg, REG);
947 set_mode_and_regno (new_reg, GET_MODE (*loc),
948 LAST_VIRTUAL_REGISTER + 1 + GET_MODE (*loc));
949 validate_change (insn, loc, new_reg, 1);
952 bool ok_p = verify_changes (0);
953 cancel_changes (0);
954 if (!ok_p)
956 if (dump_file)
957 fprintf (dump_file, FAILURE_FORMAT "insn does not allow hard"
958 " register inputs to be replaced\n", FAILURE_ARGS);
959 return false;
962 #undef FAILURE_ARGS
963 #undef FAILURE_FORMAT
965 add_candidate (insn, regno, true);
966 return true;
969 /* Calculate the set of rematerialization candidates. Return true if
970 we find at least one. */
972 bool
973 early_remat::collect_candidates (void)
975 unsigned int nregs = DF_REG_SIZE (df);
976 for (unsigned int regno = FIRST_PSEUDO_REGISTER; regno < nregs; ++regno)
977 if (interesting_regno_p (regno))
979 /* Create candidates for all suitable definitions. */
980 bitmap_clear (&m_tmp_bitmap);
981 unsigned int bad = 0;
982 unsigned int id = 0;
983 for (df_ref ref = DF_REG_DEF_CHAIN (regno); ref;
984 ref = DF_REF_NEXT_REG (ref))
986 rtx_insn *insn = DF_REF_INSN (ref);
987 if (maybe_add_candidate (insn, regno))
988 bitmap_set_bit (&m_tmp_bitmap, id);
989 else
990 bad += 1;
991 id += 1;
994 /* If we found at least one suitable definition, add dummy
995 candidates for the rest, so that we can see which definitions
996 are live where. */
997 if (!bitmap_empty_p (&m_tmp_bitmap) && bad)
999 id = 0;
1000 for (df_ref ref = DF_REG_DEF_CHAIN (regno); ref;
1001 ref = DF_REF_NEXT_REG (ref))
1003 if (!bitmap_bit_p (&m_tmp_bitmap, id))
1004 add_candidate (DF_REF_INSN (ref), regno, false);
1005 id += 1;
1011 return !m_candidates.is_empty ();
1014 /* Initialize the m_block_info array. */
1016 void
1017 early_remat::init_block_info (void)
1019 unsigned int n_blocks = last_basic_block_for_fn (m_fn);
1020 m_block_info.safe_grow_cleared (n_blocks);
1023 /* Maps basic block indices to their position in the post order. */
1024 static unsigned int *postorder_index;
1026 /* Order remat_candidates X_IN and Y_IN according to the cfg postorder. */
1028 static int
1029 compare_candidates (const void *x_in, const void *y_in)
1031 const remat_candidate *x = (const remat_candidate *) x_in;
1032 const remat_candidate *y = (const remat_candidate *) y_in;
1033 basic_block x_bb = BLOCK_FOR_INSN (x->insn);
1034 basic_block y_bb = BLOCK_FOR_INSN (y->insn);
1035 if (x_bb != y_bb)
1036 /* Make X and Y follow block postorder. */
1037 return postorder_index[x_bb->index] - postorder_index[y_bb->index];
1039 /* Make X and Y follow a backward traversal of the containing block. */
1040 return DF_INSN_LUID (y->insn) - DF_INSN_LUID (x->insn);
1043 /* Sort the collected rematerialization candidates so that they follow
1044 cfg postorder. */
1046 void
1047 early_remat::sort_candidates (void)
1049 /* Make sure the DF LUIDs are up-to-date for all the blocks we
1050 care about. */
1051 bitmap_clear (&m_tmp_bitmap);
1052 unsigned int cand_index;
1053 remat_candidate *cand;
1054 FOR_EACH_VEC_ELT (m_candidates, cand_index, cand)
1056 basic_block bb = BLOCK_FOR_INSN (cand->insn);
1057 if (bitmap_set_bit (&m_tmp_bitmap, bb->index))
1058 df_recompute_luids (bb);
1061 /* Create a mapping from block numbers to their position in the
1062 postorder. */
1063 unsigned int n_blocks = last_basic_block_for_fn (m_fn);
1064 int *postorder = df_get_postorder (DF_BACKWARD);
1065 unsigned int postorder_len = df_get_n_blocks (DF_BACKWARD);
1066 postorder_index = new unsigned int[n_blocks];
1067 for (unsigned int i = 0; i < postorder_len; ++i)
1068 postorder_index[postorder[i]] = i;
1070 m_candidates.qsort (compare_candidates);
1072 delete postorder_index;
1075 /* Commit to the current candidate indices and initialize cross-references. */
1077 void
1078 early_remat::finalize_candidate_indices (void)
1080 /* Create a bitmap for each candidate register. */
1081 m_regno_to_candidates.safe_grow (max_reg_num ());
1082 unsigned int regno;
1083 bitmap_iterator bi;
1084 EXECUTE_IF_SET_IN_BITMAP (&m_candidate_regnos, 0, regno, bi)
1085 m_regno_to_candidates[regno] = alloc_bitmap ();
1087 /* Go through each candidate and record its index. */
1088 unsigned int cand_index;
1089 remat_candidate *cand;
1090 FOR_EACH_VEC_ELT (m_candidates, cand_index, cand)
1092 basic_block bb = BLOCK_FOR_INSN (cand->insn);
1093 remat_block_info *info = &m_block_info[bb->index];
1094 info->num_candidates += 1;
1095 info->first_candidate = cand_index;
1096 bitmap_set_bit (m_regno_to_candidates[cand->regno], cand_index);
1100 /* Record that candidates CAND1_INDEX and CAND2_INDEX are equivalent.
1101 CAND1_INDEX might already have an equivalence class, but CAND2_INDEX
1102 doesn't. */
1104 void
1105 early_remat::record_equiv_candidates (unsigned int cand1_index,
1106 unsigned int cand2_index)
1108 if (dump_file)
1109 fprintf (dump_file, ";; Candidate %d is equivalent to candidate %d\n",
1110 cand2_index, cand1_index);
1112 remat_candidate *cand1 = &m_candidates[cand1_index];
1113 remat_candidate *cand2 = &m_candidates[cand2_index];
1114 gcc_checking_assert (!cand2->equiv_class);
1116 remat_equiv_class *ec = cand1->equiv_class;
1117 if (!ec)
1119 ec = XOBNEW (&m_obstack.obstack, remat_equiv_class);
1120 ec->members = alloc_bitmap ();
1121 bitmap_set_bit (ec->members, cand1_index);
1122 ec->earliest = cand1_index;
1123 ec->representative = cand1_index;
1124 cand1->equiv_class = ec;
1126 cand1 = &m_candidates[ec->representative];
1127 cand2->equiv_class = ec;
1128 bitmap_set_bit (ec->members, cand2_index);
1129 if (cand2_index > ec->representative)
1130 ec->representative = cand2_index;
1133 /* Propagate information from the rd_out set of E->src to the rd_in set
1134 of E->dest, when computing global reaching definitions. Return true
1135 if something changed. */
1137 bool
1138 early_remat::rd_confluence_n (edge e)
1140 remat_block_info *src = &er->m_block_info[e->src->index];
1141 remat_block_info *dest = &er->m_block_info[e->dest->index];
1143 /* available_in temporarily contains the set of candidates whose
1144 registers are live on entry. */
1145 if (empty_p (src->rd_out) || empty_p (dest->available_in))
1146 return false;
1148 return bitmap_ior_and_into (er->get_bitmap (&dest->rd_in),
1149 src->rd_out, dest->available_in);
1152 /* Propagate information from the rd_in set of block BB_INDEX to rd_out.
1153 Return true if something changed. */
1155 bool
1156 early_remat::rd_transfer (int bb_index)
1158 remat_block_info *info = &er->m_block_info[bb_index];
1160 if (empty_p (info->rd_in))
1161 return false;
1163 if (empty_p (info->rd_kill))
1165 gcc_checking_assert (empty_p (info->rd_gen));
1166 if (!info->rd_out)
1167 info->rd_out = info->rd_in;
1168 else
1169 gcc_checking_assert (info->rd_out == info->rd_in);
1170 /* Assume that we only get called if something changed. */
1171 return true;
1174 if (empty_p (info->rd_gen))
1175 return bitmap_and_compl (er->get_bitmap (&info->rd_out),
1176 info->rd_in, info->rd_kill);
1178 return bitmap_ior_and_compl (er->get_bitmap (&info->rd_out), info->rd_gen,
1179 info->rd_in, info->rd_kill);
1182 /* Calculate the rd_* sets for each block. */
1184 void
1185 early_remat::compute_rd (void)
1187 /* First calculate the rd_kill and rd_gen sets, using the fact
1188 that m_candidates is sorted in order of decreasing LUID. */
1189 unsigned int cand_index;
1190 remat_candidate *cand;
1191 FOR_EACH_VEC_ELT_REVERSE (m_candidates, cand_index, cand)
1193 rtx_insn *insn = cand->insn;
1194 basic_block bb = BLOCK_FOR_INSN (insn);
1195 remat_block_info *info = &m_block_info[bb->index];
1196 bitmap kill = m_regno_to_candidates[cand->regno];
1197 bitmap_ior_into (get_bitmap (&info->rd_kill), kill);
1198 if (bitmap_bit_p (DF_LR_OUT (bb), cand->regno))
1200 bitmap_and_compl_into (get_bitmap (&info->rd_gen), kill);
1201 bitmap_set_bit (info->rd_gen, cand_index);
1205 /* Set up the initial values of the other sets. */
1206 basic_block bb;
1207 FOR_EACH_BB_FN (bb, m_fn)
1209 remat_block_info *info = &m_block_info[bb->index];
1210 unsigned int regno;
1211 bitmap_iterator bi;
1212 EXECUTE_IF_AND_IN_BITMAP (DF_LR_IN (bb), &m_candidate_regnos,
1213 0, regno, bi)
1215 /* Use available_in to record the set of candidates whose
1216 registers are live on entry (i.e. a maximum bound on rd_in). */
1217 bitmap_ior_into (get_bitmap (&info->available_in),
1218 m_regno_to_candidates[regno]);
1220 /* Add registers that die in a block to the block's kill set,
1221 so that we don't needlessly propagate them through the rest
1222 of the function. */
1223 if (!bitmap_bit_p (DF_LR_OUT (bb), regno))
1224 bitmap_ior_into (get_bitmap (&info->rd_kill),
1225 m_regno_to_candidates[regno]);
1228 /* Initialize each block's rd_out to the minimal set (the set of
1229 local definitions). */
1230 if (!empty_p (info->rd_gen))
1231 bitmap_copy (get_bitmap (&info->rd_out), info->rd_gen);
1234 /* Iterate until we reach a fixed point. */
1235 er = this;
1236 bitmap_clear (&m_tmp_bitmap);
1237 bitmap_set_range (&m_tmp_bitmap, 0, last_basic_block_for_fn (m_fn));
1238 df_simple_dataflow (DF_FORWARD, NULL, NULL, rd_confluence_n, rd_transfer,
1239 &m_tmp_bitmap, df_get_postorder (DF_FORWARD),
1240 df_get_n_blocks (DF_FORWARD));
1241 er = 0;
1243 /* Work out which definitions reach which candidates, again taking
1244 advantage of the candidate order. */
1245 bitmap_head reaching;
1246 bitmap_initialize (&reaching, &m_obstack);
1247 basic_block old_bb = NULL;
1248 FOR_EACH_VEC_ELT_REVERSE (m_candidates, cand_index, cand)
1250 bb = BLOCK_FOR_INSN (cand->insn);
1251 if (bb != old_bb)
1253 /* Get the definitions that reach the start of the new block. */
1254 remat_block_info *info = &m_block_info[bb->index];
1255 if (info->rd_in)
1256 bitmap_copy (&reaching, info->rd_in);
1257 else
1258 bitmap_clear (&reaching);
1259 old_bb = bb;
1261 else
1263 /* Process the definitions of the previous instruction. */
1264 bitmap kill = m_regno_to_candidates[cand[1].regno];
1265 bitmap_and_compl_into (&reaching, kill);
1266 bitmap_set_bit (&reaching, cand_index + 1);
1269 if (cand->can_copy_p && !cand->constant_p)
1271 df_ref ref;
1272 FOR_EACH_INSN_USE (ref, cand->insn)
1274 unsigned int regno = DF_REF_REGNO (ref);
1275 if (bitmap_bit_p (&m_candidate_regnos, regno))
1277 bitmap defs = m_regno_to_candidates[regno];
1278 bitmap_and (&m_tmp_bitmap, defs, &reaching);
1279 bitmap_ior_into (get_bitmap (&cand->uses), &m_tmp_bitmap);
1284 bitmap_clear (&reaching);
1287 /* If CAND_INDEX is in an equivalence class, return the representative
1288 of the class, otherwise return CAND_INDEX. */
1290 inline unsigned int
1291 early_remat::canon_candidate (unsigned int cand_index)
1293 if (remat_equiv_class *ec = m_candidates[cand_index].equiv_class)
1294 return ec->representative;
1295 return cand_index;
1298 /* Make candidate set *PTR refer to candidates using the representative
1299 of each equivalence class. */
1301 void
1302 early_remat::canon_bitmap (bitmap *ptr)
1304 bitmap old_set = *ptr;
1305 if (empty_p (old_set))
1306 return;
1308 bitmap new_set = NULL;
1309 unsigned int old_index;
1310 bitmap_iterator bi;
1311 EXECUTE_IF_SET_IN_BITMAP (old_set, 0, old_index, bi)
1313 unsigned int new_index = canon_candidate (old_index);
1314 if (old_index != new_index)
1316 if (!new_set)
1318 new_set = alloc_bitmap ();
1319 bitmap_copy (new_set, old_set);
1321 bitmap_clear_bit (new_set, old_index);
1322 bitmap_set_bit (new_set, new_index);
1325 if (new_set)
1327 BITMAP_FREE (*ptr);
1328 *ptr = new_set;
1332 /* If the candidates in REACHING all have the same value, return the
1333 earliest instance of that value (i.e. the first one to be added
1334 to m_value_table), otherwise return MULTIPLE_CANDIDATES. */
1336 unsigned int
1337 early_remat::resolve_reaching_def (bitmap reaching)
1339 unsigned int cand_index = bitmap_first_set_bit (reaching);
1340 if (remat_equiv_class *ec = m_candidates[cand_index].equiv_class)
1342 if (!bitmap_intersect_compl_p (reaching, ec->members))
1343 return ec->earliest;
1345 else if (bitmap_single_bit_set_p (reaching))
1346 return cand_index;
1348 return MULTIPLE_CANDIDATES;
1351 /* Check whether all candidate registers used by candidate CAND_INDEX have
1352 unique definitions. Return true if so, replacing the candidate's uses
1353 set with the appropriate form for value numbering. */
1355 bool
1356 early_remat::check_candidate_uses (unsigned int cand_index)
1358 remat_candidate *cand = &m_candidates[cand_index];
1360 /* Process the uses for each register in turn. */
1361 bitmap_head uses;
1362 bitmap_initialize (&uses, &m_obstack);
1363 bitmap_copy (&uses, cand->uses);
1364 bitmap uses_ec = alloc_bitmap ();
1365 while (!bitmap_empty_p (&uses))
1367 /* Get the register for the lowest-indexed candidate remaining,
1368 and the reaching definitions of that register. */
1369 unsigned int first = bitmap_first_set_bit (&uses);
1370 unsigned int regno = m_candidates[first].regno;
1371 bitmap_and (&m_tmp_bitmap, &uses, m_regno_to_candidates[regno]);
1373 /* See whether all reaching definitions have the same value and if
1374 so get the index of the first candidate we saw with that value. */
1375 unsigned int def = resolve_reaching_def (&m_tmp_bitmap);
1376 if (def == MULTIPLE_CANDIDATES)
1378 if (dump_file)
1379 fprintf (dump_file, ";; Removing candidate %d because there is"
1380 " more than one reaching definition of reg %d\n",
1381 cand_index, regno);
1382 cand->can_copy_p = false;
1383 break;
1385 bitmap_set_bit (uses_ec, def);
1386 bitmap_and_compl_into (&uses, &m_tmp_bitmap);
1388 BITMAP_FREE (cand->uses);
1389 cand->uses = uses_ec;
1390 return cand->can_copy_p;
1393 /* Calculate the set of hard registers that would be clobbered by
1394 rematerializing candidate CAND_INDEX. At this point the candidate's
1395 set of uses is final. */
1397 void
1398 early_remat::compute_clobbers (unsigned int cand_index)
1400 remat_candidate *cand = &m_candidates[cand_index];
1401 if (cand->uses)
1403 unsigned int use_index;
1404 bitmap_iterator bi;
1405 EXECUTE_IF_SET_IN_BITMAP (cand->uses, 0, use_index, bi)
1406 if (bitmap clobbers = m_candidates[use_index].clobbers)
1407 bitmap_ior_into (get_bitmap (&cand->clobbers), clobbers);
1410 df_ref ref;
1411 FOR_EACH_INSN_DEF (ref, cand->insn)
1413 unsigned int def_regno = DF_REF_REGNO (ref);
1414 if (def_regno != cand->regno)
1415 bitmap_set_bit (get_bitmap (&cand->clobbers), def_regno);
1419 /* Mark candidate CAND_INDEX as validated and add it to the value table. */
1421 void
1422 early_remat::assign_value_number (unsigned int cand_index)
1424 remat_candidate *cand = &m_candidates[cand_index];
1425 gcc_checking_assert (cand->can_copy_p && !cand->validated_p);
1427 compute_clobbers (cand_index);
1428 cand->validated_p = true;
1430 inchash::hash h;
1431 h.add_int (cand->regno);
1432 inchash::add_rtx (cand->remat_rtx, h);
1433 cand->hash = h.end ();
1435 remat_candidate **slot
1436 = m_value_table.find_slot_with_hash (cand, cand->hash, INSERT);
1437 if (!*slot)
1439 *slot = cand;
1440 if (dump_file)
1441 fprintf (dump_file, ";; Candidate %d is not equivalent to"
1442 " others seen so far\n", cand_index);
1444 else
1445 record_equiv_candidates (*slot - m_candidates.address (), cand_index);
1448 /* Make a final decision about which candidates are valid and assign
1449 value numbers to those that are. */
1451 void
1452 early_remat::decide_candidate_validity (void)
1454 auto_vec<unsigned int, 16> stack;
1455 unsigned int cand1_index;
1456 remat_candidate *cand1;
1457 FOR_EACH_VEC_ELT_REVERSE (m_candidates, cand1_index, cand1)
1459 if (!cand1->can_copy_p || cand1->validated_p)
1460 continue;
1462 if (empty_p (cand1->uses))
1464 assign_value_number (cand1_index);
1465 continue;
1468 stack.safe_push (cand1_index);
1469 while (!stack.is_empty ())
1471 unsigned int cand2_index = stack.last ();
1472 unsigned int watermark = stack.length ();
1473 remat_candidate *cand2 = &m_candidates[cand2_index];
1474 if (!cand2->can_copy_p || cand2->validated_p)
1476 stack.pop ();
1477 continue;
1479 cand2->visited_p = true;
1480 unsigned int cand3_index;
1481 bitmap_iterator bi;
1482 EXECUTE_IF_SET_IN_BITMAP (cand2->uses, 0, cand3_index, bi)
1484 remat_candidate *cand3 = &m_candidates[cand3_index];
1485 if (!cand3->can_copy_p)
1487 if (dump_file)
1488 fprintf (dump_file, ";; Removing candidate %d because"
1489 " it uses removed candidate %d\n", cand2_index,
1490 cand3_index);
1491 cand2->can_copy_p = false;
1492 break;
1494 if (!cand3->validated_p)
1496 if (empty_p (cand3->uses))
1497 assign_value_number (cand3_index);
1498 else if (cand3->visited_p)
1500 if (dump_file)
1501 fprintf (dump_file, ";; Removing candidate %d"
1502 " because its definition is cyclic\n",
1503 cand2_index);
1504 cand2->can_copy_p = false;
1505 break;
1507 else
1508 stack.safe_push (cand3_index);
1511 if (!cand2->can_copy_p)
1513 cand2->visited_p = false;
1514 stack.truncate (watermark - 1);
1516 else if (watermark == stack.length ())
1518 cand2->visited_p = false;
1519 if (check_candidate_uses (cand2_index))
1520 assign_value_number (cand2_index);
1521 stack.pop ();
1526 /* Ensure that the candidates always use the same candidate index
1527 to refer to an equivalence class. */
1528 FOR_EACH_VEC_ELT_REVERSE (m_candidates, cand1_index, cand1)
1529 if (cand1->can_copy_p && !empty_p (cand1->uses))
1531 canon_bitmap (&cand1->uses);
1532 gcc_checking_assert (bitmap_first_set_bit (cand1->uses) > cand1_index);
1536 /* Assuming that every path reaching a point P contains a copy of a
1537 use U of REGNO, return true if another copy of U at P would have
1538 access to the same value of REGNO. */
1540 bool
1541 early_remat::stable_use_p (unsigned int regno)
1543 /* Conservatively assume not for hard registers. */
1544 if (HARD_REGISTER_NUM_P (regno))
1545 return false;
1547 /* See if REGNO has a single definition and is never used uninitialized.
1548 In this case the definition of REGNO dominates the common dominator
1549 of the uses U, which in turn dominates P. */
1550 if (DF_REG_DEF_COUNT (regno) == 1
1551 && !bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (m_fn)), regno))
1552 return true;
1554 return false;
1557 /* Emit a copy from register DEST to register SRC before candidate
1558 CAND_INDEX's instruction. */
1560 void
1561 early_remat::emit_copy_before (unsigned int cand_index, rtx dest, rtx src)
1563 remat_candidate *cand = &m_candidates[cand_index];
1564 if (dump_file)
1566 fprintf (dump_file, ";; Stabilizing insn ");
1567 dump_insn_id (cand->insn);
1568 fprintf (dump_file, " by copying source reg %d:%s to temporary reg %d\n",
1569 REGNO (src), GET_MODE_NAME (GET_MODE (src)), REGNO (dest));
1571 emit_insn_before (gen_move_insn (dest, src), cand->insn);
1574 /* Check whether any inputs to candidate CAND_INDEX's instruction could
1575 change at rematerialization points and replace them with new pseudo
1576 registers if so. */
1578 void
1579 early_remat::stabilize_pattern (unsigned int cand_index)
1581 remat_candidate *cand = &m_candidates[cand_index];
1582 if (cand->stabilized_p)
1583 return;
1585 remat_equiv_class *ec = cand->equiv_class;
1586 gcc_checking_assert (!ec || cand_index == ec->representative);
1588 /* Record the replacements we've made so far, so that we don't
1589 create two separate registers for match_dups. Lookup is O(n),
1590 but the n is very small. */
1591 typedef std::pair<rtx, rtx> reg_pair;
1592 auto_vec<reg_pair, 16> reg_map;
1594 rtx_insn *insn = cand->insn;
1595 df_ref ref;
1596 FOR_EACH_INSN_USE (ref, insn)
1598 unsigned int old_regno = DF_REF_REGNO (ref);
1599 rtx *loc = DF_REF_REAL_LOC (ref);
1601 if (HARD_REGISTER_NUM_P (old_regno) && fixed_regs[old_regno])
1603 /* We checked when adding the candidate that the value is stable. */
1604 gcc_checking_assert (!rtx_unstable_p (*loc));
1605 continue;
1608 if (bitmap_bit_p (&m_candidate_regnos, old_regno))
1609 /* We already know which candidate provides the definition
1610 and will handle it during copying. */
1611 continue;
1613 if (stable_use_p (old_regno))
1614 /* We can continue to use the existing register. */
1615 continue;
1617 /* We need to replace the register. See whether we've already
1618 created a suitable copy. */
1619 rtx old_reg = *loc;
1620 rtx new_reg = NULL_RTX;
1621 machine_mode mode = GET_MODE (old_reg);
1622 reg_pair *p;
1623 unsigned int pi;
1624 FOR_EACH_VEC_ELT (reg_map, pi, p)
1625 if (REGNO (p->first) == old_regno
1626 && GET_MODE (p->first) == mode)
1628 new_reg = p->second;
1629 break;
1632 if (!new_reg)
1634 /* Create a new register and initialize it just before
1635 the instruction. */
1636 new_reg = gen_reg_rtx (mode);
1637 reg_map.safe_push (reg_pair (old_reg, new_reg));
1638 if (ec)
1640 unsigned int member_index;
1641 bitmap_iterator bi;
1642 EXECUTE_IF_SET_IN_BITMAP (ec->members, 0, member_index, bi)
1643 emit_copy_before (member_index, new_reg, old_reg);
1645 else
1646 emit_copy_before (cand_index, new_reg, old_reg);
1648 validate_change (insn, loc, new_reg, true);
1650 if (num_changes_pending ())
1652 if (!apply_change_group ())
1653 /* We checked when adding the candidates that the pattern allows
1654 hard registers to be replaced. Nothing else should make the
1655 changes invalid. */
1656 gcc_unreachable ();
1658 if (ec)
1660 /* Copy the new pattern to other members of the equivalence
1661 class. */
1662 unsigned int member_index;
1663 bitmap_iterator bi;
1664 EXECUTE_IF_SET_IN_BITMAP (ec->members, 0, member_index, bi)
1665 if (cand_index != member_index)
1667 rtx_insn *other_insn = m_candidates[member_index].insn;
1668 if (!validate_change (other_insn, &PATTERN (other_insn),
1669 copy_insn (PATTERN (insn)), 0))
1670 /* If the original instruction was valid then the copy
1671 should be too. */
1672 gcc_unreachable ();
1677 cand->stabilized_p = true;
1680 /* Change CAND's instruction so that it sets CAND->copy_regno instead
1681 of CAND->regno. */
1683 void
1684 early_remat::replace_dest_with_copy (unsigned int cand_index)
1686 remat_candidate *cand = &m_candidates[cand_index];
1687 df_ref def;
1688 FOR_EACH_INSN_DEF (def, cand->insn)
1689 if (DF_REF_REGNO (def) == cand->regno)
1690 validate_change (cand->insn, DF_REF_REAL_LOC (def),
1691 regno_reg_rtx[cand->copy_regno], 1);
1694 /* Make sure that the candidates used by candidate CAND_INDEX are available.
1695 There are two ways of doing this for an input candidate I:
1697 (1) Using the existing register number and ensuring that I is available.
1699 (2) Using a new register number (recorded in copy_regno) and adding I
1700 to VIA_COPY. This guarantees that making I available does not
1701 conflict with other uses of the original register.
1703 REQUIRED is the set of candidates that are required but not available
1704 before the copy of CAND_INDEX. AVAILABLE is the set of candidates
1705 that are already available before the copy of CAND_INDEX. REACHING
1706 is the set of candidates that reach the copy of CAND_INDEX. VIA_COPY
1707 is the set of candidates that will use new register numbers recorded
1708 in copy_regno instead of the original ones. */
1710 void
1711 early_remat::stabilize_candidate_uses (unsigned int cand_index,
1712 bitmap required, bitmap available,
1713 bitmap reaching, bitmap via_copy)
1715 remat_candidate *cand = &m_candidates[cand_index];
1716 df_ref use;
1717 FOR_EACH_INSN_USE (use, cand->insn)
1719 unsigned int regno = DF_REF_REGNO (use);
1720 if (!bitmap_bit_p (&m_candidate_regnos, regno))
1721 continue;
1723 /* Work out which candidate provides the definition. */
1724 bitmap defs = m_regno_to_candidates[regno];
1725 bitmap_and (&m_tmp_bitmap, cand->uses, defs);
1726 gcc_checking_assert (bitmap_single_bit_set_p (&m_tmp_bitmap));
1727 unsigned int def_index = bitmap_first_set_bit (&m_tmp_bitmap);
1729 /* First see if DEF_INDEX is the only reaching definition of REGNO
1730 at this point too and if it is or will become available. We can
1731 continue to use REGNO if so. */
1732 bitmap_and (&m_tmp_bitmap, reaching, defs);
1733 if (bitmap_single_bit_set_p (&m_tmp_bitmap)
1734 && bitmap_first_set_bit (&m_tmp_bitmap) == def_index
1735 && ((available && bitmap_bit_p (available, def_index))
1736 || bitmap_bit_p (required, def_index)))
1738 if (dump_file)
1739 fprintf (dump_file, ";; Keeping reg %d for use of candidate %d"
1740 " in candidate %d\n", regno, def_index, cand_index);
1741 continue;
1744 /* Otherwise fall back to using a copy. There are other cases
1745 in which we *could* continue to use REGNO, but there's not
1746 really much point. Using a separate register ought to make
1747 things easier for the register allocator. */
1748 remat_candidate *def_cand = &m_candidates[def_index];
1749 rtx *loc = DF_REF_REAL_LOC (use);
1750 rtx new_reg;
1751 if (bitmap_set_bit (via_copy, def_index))
1753 new_reg = gen_reg_rtx (GET_MODE (*loc));
1754 def_cand->copy_regno = REGNO (new_reg);
1755 if (dump_file)
1756 fprintf (dump_file, ";; Creating reg %d for use of candidate %d"
1757 " in candidate %d\n", REGNO (new_reg), def_index,
1758 cand_index);
1760 else
1761 new_reg = regno_reg_rtx[def_cand->copy_regno];
1762 validate_change (cand->insn, loc, new_reg, 1);
1766 /* Rematerialize the candidates in REQUIRED after instruction INSN,
1767 given that the candidates in AVAILABLE are already available
1768 and that REACHING is the set of candidates live after INSN.
1769 REQUIRED and AVAILABLE are disjoint on entry.
1771 Clear REQUIRED on exit. */
1773 void
1774 early_remat::emit_remat_insns (bitmap required, bitmap available,
1775 bitmap reaching, rtx_insn *insn)
1777 /* Quick exit if there's nothing to do. */
1778 if (empty_p (required))
1779 return;
1781 /* Only reaching definitions should be available or required. */
1782 gcc_checking_assert (!bitmap_intersect_compl_p (required, reaching));
1783 if (available)
1784 gcc_checking_assert (!bitmap_intersect_compl_p (available, reaching));
1786 bitmap_head via_copy;
1787 bitmap_initialize (&via_copy, &m_obstack);
1788 while (!bitmap_empty_p (required) || !bitmap_empty_p (&via_copy))
1790 /* Pick the lowest-indexed candidate left. */
1791 unsigned int required_index = (bitmap_empty_p (required)
1792 ? ~0U : bitmap_first_set_bit (required));
1793 unsigned int via_copy_index = (bitmap_empty_p (&via_copy)
1794 ? ~0U : bitmap_first_set_bit (&via_copy));
1795 unsigned int cand_index = MIN (required_index, via_copy_index);
1796 remat_candidate *cand = &m_candidates[cand_index];
1798 bool via_copy_p = (cand_index == via_copy_index);
1799 if (via_copy_p)
1800 bitmap_clear_bit (&via_copy, cand_index);
1801 else
1803 /* Remove all candidates for the same register from REQUIRED. */
1804 bitmap_and (&m_tmp_bitmap, reaching,
1805 m_regno_to_candidates[cand->regno]);
1806 bitmap_and_compl_into (required, &m_tmp_bitmap);
1807 gcc_checking_assert (!bitmap_bit_p (required, cand_index));
1809 /* Only rematerialize if we have a single reaching definition
1810 of the register. */
1811 if (!bitmap_single_bit_set_p (&m_tmp_bitmap))
1813 if (dump_file)
1815 fprintf (dump_file, ";; Can't rematerialize reg %d after ",
1816 cand->regno);
1817 dump_insn_id (insn);
1818 fprintf (dump_file, ": more than one reaching definition\n");
1820 continue;
1823 /* Skip candidates that can't be rematerialized. */
1824 if (!cand->can_copy_p)
1825 continue;
1827 /* Check the function precondition. */
1828 gcc_checking_assert (!available
1829 || !bitmap_bit_p (available, cand_index));
1832 /* Invalid candidates should have been weeded out by now. */
1833 gcc_assert (cand->can_copy_p);
1835 rtx new_pattern;
1836 if (cand->constant_p)
1838 /* Emit a simple move. */
1839 unsigned int regno = via_copy_p ? cand->copy_regno : cand->regno;
1840 new_pattern = gen_move_insn (regno_reg_rtx[regno], cand->remat_rtx);
1842 else
1844 /* If this is the first time we've copied the instruction, make
1845 sure that any inputs will have the same value after INSN. */
1846 stabilize_pattern (cand_index);
1848 /* Temporarily adjust the original instruction so that it has
1849 the right form for the copy. */
1850 if (via_copy_p)
1851 replace_dest_with_copy (cand_index);
1852 if (cand->uses)
1853 stabilize_candidate_uses (cand_index, required, available,
1854 reaching, &via_copy);
1856 /* Get the new instruction pattern. */
1857 new_pattern = copy_insn (cand->remat_rtx);
1859 /* Undo the temporary changes. */
1860 cancel_changes (0);
1863 /* Emit the new instruction. */
1864 rtx_insn *new_insn = emit_insn_after (new_pattern, insn);
1866 if (dump_file)
1868 fprintf (dump_file, ";; Rematerializing candidate %d after ",
1869 cand_index);
1870 dump_insn_id (insn);
1871 if (via_copy_p)
1872 fprintf (dump_file, " with new destination reg %d",
1873 cand->copy_regno);
1874 fprintf (dump_file, ":\n\n");
1875 print_rtl_single (dump_file, new_insn);
1876 fprintf (dump_file, "\n");
1881 /* Recompute INFO's available_out set, given that it's distinct from
1882 available_in and available_locally. */
1884 bool
1885 early_remat::set_available_out (remat_block_info *info)
1887 if (empty_p (info->available_locally))
1888 return bitmap_and_compl (get_bitmap (&info->available_out),
1889 info->available_in, info->rd_kill);
1891 if (empty_p (info->rd_kill))
1892 return bitmap_ior (get_bitmap (&info->available_out),
1893 info->available_locally, info->available_in);
1895 return bitmap_ior_and_compl (get_bitmap (&info->available_out),
1896 info->available_locally, info->available_in,
1897 info->rd_kill);
1900 /* If BB has more than one call, decide which candidates should be
1901 rematerialized after the non-final calls and emit the associated
1902 instructions. Record other information about the block in preparation
1903 for the global phase. */
1905 void
1906 early_remat::process_block (basic_block bb)
1908 remat_block_info *info = &m_block_info[bb->index];
1909 rtx_insn *last_call = NULL;
1910 rtx_insn *insn;
1912 /* Ensure that we always use the same candidate index to refer to an
1913 equivalence class. */
1914 if (info->rd_out == info->rd_in)
1916 canon_bitmap (&info->rd_in);
1917 info->rd_out = info->rd_in;
1919 else
1921 canon_bitmap (&info->rd_in);
1922 canon_bitmap (&info->rd_out);
1924 canon_bitmap (&info->rd_kill);
1925 canon_bitmap (&info->rd_gen);
1927 /* The set of candidates that should be rematerialized on entry to the
1928 block or after the previous call (whichever is more recent). */
1929 init_temp_bitmap (&m_required);
1931 /* The set of candidates that reach the current instruction (i.e. are
1932 live just before the instruction). */
1933 bitmap_head reaching;
1934 bitmap_initialize (&reaching, &m_obstack);
1935 if (info->rd_in)
1936 bitmap_copy (&reaching, info->rd_in);
1938 /* The set of candidates that are live and available without
1939 rematerialization just before the current instruction. This only
1940 accounts for earlier candidates in the block, or those that become
1941 available by being added to M_REQUIRED. */
1942 init_temp_bitmap (&m_available);
1944 /* Get the range of candidates in the block. */
1945 unsigned int next_candidate = info->first_candidate;
1946 unsigned int num_candidates = info->num_candidates;
1947 remat_candidate *next_def = (num_candidates > 0
1948 ? &m_candidates[next_candidate]
1949 : NULL);
1951 FOR_BB_INSNS (bb, insn)
1953 if (!NONDEBUG_INSN_P (insn))
1954 continue;
1956 /* First process uses, since this is a forward walk. */
1957 df_ref ref;
1958 FOR_EACH_INSN_USE (ref, insn)
1960 unsigned int regno = DF_REF_REGNO (ref);
1961 if (bitmap_bit_p (&m_candidate_regnos, regno))
1963 bitmap defs = m_regno_to_candidates[regno];
1964 bitmap_and (&m_tmp_bitmap, defs, &reaching);
1965 gcc_checking_assert (!bitmap_empty_p (&m_tmp_bitmap));
1966 if (!bitmap_intersect_p (defs, m_available))
1968 /* There has been no definition of the register since
1969 the last call or the start of the block (whichever
1970 is most recent). Mark the reaching definitions
1971 as required at that point and thus available here. */
1972 bitmap_ior_into (m_required, &m_tmp_bitmap);
1973 bitmap_ior_into (m_available, &m_tmp_bitmap);
1978 if (CALL_P (insn))
1980 if (!last_call)
1982 /* The first call in the block. Record which candidates are
1983 required at the start of the block. */
1984 copy_temp_bitmap (&info->required_in, &m_required);
1985 init_temp_bitmap (&m_required);
1987 else
1988 /* The fully-local case: candidates that need to be
1989 rematerialized after a previous call in the block. */
1990 emit_remat_insns (m_required, NULL, info->rd_after_call,
1991 last_call);
1992 last_call = insn;
1993 bitmap_clear (m_available);
1994 gcc_checking_assert (empty_p (m_required));
1997 /* Now process definitions. */
1998 if (next_def && insn == next_def->insn)
2000 unsigned int gen = canon_candidate (next_candidate);
2002 /* Other candidates with the same regno are not available
2003 any more. */
2004 bitmap kill = m_regno_to_candidates[next_def->regno];
2005 bitmap_and_compl_into (m_available, kill);
2006 bitmap_and_compl_into (&reaching, kill);
2008 /* Record that this candidate is available without
2009 rematerialization. */
2010 bitmap_set_bit (m_available, gen);
2011 bitmap_set_bit (&reaching, gen);
2013 /* Find the next candidate in the block. */
2014 num_candidates -= 1;
2015 next_candidate -= 1;
2016 if (num_candidates > 0)
2017 next_def -= 1;
2018 else
2019 next_def = NULL;
2022 if (insn == last_call)
2023 bitmap_copy (get_bitmap (&info->rd_after_call), &reaching);
2025 bitmap_clear (&reaching);
2026 gcc_checking_assert (num_candidates == 0);
2028 /* Remove values from the available set if they aren't live (and so
2029 aren't interesting to successor blocks). */
2030 if (info->rd_out)
2031 bitmap_and_into (m_available, info->rd_out);
2033 /* Record the accumulated information. */
2034 info->last_call = last_call;
2035 info->abnormal_call_p = (last_call
2036 && last_call == BB_END (bb)
2037 && has_abnormal_or_eh_outgoing_edge_p (bb));
2038 copy_temp_bitmap (&info->available_locally, &m_available);
2039 if (last_call)
2040 copy_temp_bitmap (&info->required_after_call, &m_required);
2041 else
2042 copy_temp_bitmap (&info->required_in, &m_required);
2044 /* Assume at first that all live-in values are available without
2045 rematerialization (i.e. start with the most optimistic assumption). */
2046 if (info->available_in)
2048 if (info->rd_in)
2049 bitmap_copy (info->available_in, info->rd_in);
2050 else
2051 BITMAP_FREE (info->available_in);
2054 if (last_call || empty_p (info->available_in))
2055 /* The values available on exit from the block are exactly those that
2056 are available locally. This set doesn't change. */
2057 info->available_out = info->available_locally;
2058 else if (empty_p (info->available_locally) && empty_p (info->rd_kill))
2059 /* The values available on exit are the same as those available on entry.
2060 Updating one updates the other. */
2061 info->available_out = info->available_in;
2062 else
2063 set_available_out (info);
2066 /* Process each block as for process_block, visiting dominators before
2067 the blocks they dominate. */
2069 void
2070 early_remat::local_phase (void)
2072 if (dump_file)
2073 fprintf (dump_file, "\n;; Local phase:\n");
2075 int *postorder = df_get_postorder (DF_BACKWARD);
2076 unsigned int postorder_len = df_get_n_blocks (DF_BACKWARD);
2077 for (unsigned int i = postorder_len; i-- > 0; )
2078 if (postorder[i] >= NUM_FIXED_BLOCKS)
2079 process_block (BASIC_BLOCK_FOR_FN (m_fn, postorder[i]));
2082 /* Return true if available values survive across edge E. */
2084 static inline bool
2085 available_across_edge_p (edge e)
2087 return (e->flags & EDGE_EH) == 0;
2090 /* Propagate information from the available_out set of E->src to the
2091 available_in set of E->dest, when computing global availability.
2092 Return true if something changed. */
2094 bool
2095 early_remat::avail_confluence_n (edge e)
2097 remat_block_info *src = &er->m_block_info[e->src->index];
2098 remat_block_info *dest = &er->m_block_info[e->dest->index];
2100 if (!available_across_edge_p (e))
2101 return false;
2103 if (empty_p (dest->available_in))
2104 return false;
2106 if (!src->available_out)
2108 bitmap_clear (dest->available_in);
2109 return true;
2112 return bitmap_and_into (dest->available_in, src->available_out);
2115 /* Propagate information from the available_in set of block BB_INDEX
2116 to available_out. Return true if something changed. */
2118 bool
2119 early_remat::avail_transfer (int bb_index)
2121 remat_block_info *info = &er->m_block_info[bb_index];
2123 if (info->available_out == info->available_locally)
2124 return false;
2126 if (info->available_out == info->available_in)
2127 /* Assume that we are only called if the input changed. */
2128 return true;
2130 return er->set_available_out (info);
2133 /* Compute global availability for the function, starting with the local
2134 information computed by local_phase. */
2136 void
2137 early_remat::compute_availability (void)
2139 /* We use df_simple_dataflow instead of the lcm routines for three reasons:
2141 (1) it avoids recomputing the traversal order;
2142 (2) many of the sets are likely to be sparse, so we don't necessarily
2143 want to use sbitmaps; and
2144 (3) it means we can avoid creating an explicit kill set for the call. */
2145 er = this;
2146 bitmap_clear (&m_tmp_bitmap);
2147 bitmap_set_range (&m_tmp_bitmap, 0, last_basic_block_for_fn (m_fn));
2148 df_simple_dataflow (DF_FORWARD, NULL, NULL,
2149 avail_confluence_n, avail_transfer,
2150 &m_tmp_bitmap, df_get_postorder (DF_FORWARD),
2151 df_get_n_blocks (DF_FORWARD));
2152 er = 0;
2154 /* Restrict the required_in sets to values that aren't available. */
2155 basic_block bb;
2156 FOR_EACH_BB_FN (bb, m_fn)
2158 remat_block_info *info = &m_block_info[bb->index];
2159 if (info->required_in && info->available_in)
2160 bitmap_and_compl_into (info->required_in, info->available_in);
2164 /* Make sure that INFO's available_out and available_in sets are unique. */
2166 inline void
2167 early_remat::unshare_available_sets (remat_block_info *info)
2169 if (info->available_in && info->available_in == info->available_out)
2171 info->available_in = alloc_bitmap ();
2172 bitmap_copy (info->available_in, info->available_out);
2176 /* Return true if it is possible to move rematerializations from the
2177 destination of E to the source of E. */
2179 inline bool
2180 early_remat::can_move_across_edge_p (edge e)
2182 return (available_across_edge_p (e)
2183 && !m_block_info[e->src->index].abnormal_call_p);
2186 /* Return true if it is cheaper to rematerialize values at the head of
2187 block QUERY_BB_INDEX instead of rematerializing in its predecessors. */
2189 bool
2190 early_remat::local_remat_cheaper_p (unsigned int query_bb_index)
2192 if (m_block_info[query_bb_index].remat_frequency_valid_p)
2193 return m_block_info[query_bb_index].local_remat_cheaper_p;
2195 /* Iteratively compute the cost of rematerializing values in the
2196 predecessor blocks, then compare that with the cost of
2197 rematerializing at the head of the block.
2199 A cycle indicates that there is no call on that execution path,
2200 so it isn't necessary to rematerialize on that path. */
2201 auto_vec<basic_block, 16> stack;
2202 stack.quick_push (BASIC_BLOCK_FOR_FN (m_fn, query_bb_index));
2203 while (!stack.is_empty ())
2205 basic_block bb = stack.last ();
2206 remat_block_info *info = &m_block_info[bb->index];
2207 if (info->remat_frequency_valid_p)
2209 stack.pop ();
2210 continue;
2213 info->visited_p = true;
2214 int frequency = 0;
2215 bool can_move_p = true;
2216 edge e;
2217 edge_iterator ei;
2218 FOR_EACH_EDGE (e, ei, bb->preds)
2219 if (!can_move_across_edge_p (e))
2221 can_move_p = false;
2222 break;
2224 else if (m_block_info[e->src->index].last_call)
2225 /* We'll rematerialize after the call. */
2226 frequency += e->src->count.to_frequency (m_fn);
2227 else if (m_block_info[e->src->index].remat_frequency_valid_p)
2228 /* Add the cost of rematerializing at the head of E->src
2229 or in its predecessors (whichever is cheaper). */
2230 frequency += m_block_info[e->src->index].remat_frequency;
2231 else if (!m_block_info[e->src->index].visited_p)
2232 /* Queue E->src and then revisit this block again. */
2233 stack.safe_push (e->src);
2235 /* Come back to this block later if we need to process some of
2236 its predecessors. */
2237 if (stack.last () != bb)
2238 continue;
2240 /* If rematerializing in and before the block have equal cost, prefer
2241 rematerializing in the block. This should shorten the live range. */
2242 int bb_frequency = bb->count.to_frequency (m_fn);
2243 if (!can_move_p || frequency >= bb_frequency)
2245 info->local_remat_cheaper_p = true;
2246 info->remat_frequency = bb_frequency;
2248 else
2249 info->remat_frequency = frequency;
2250 info->remat_frequency_valid_p = true;
2251 info->visited_p = false;
2252 if (dump_file)
2254 if (!can_move_p)
2255 fprintf (dump_file, ";; Need to rematerialize at the head of"
2256 " block %d; cannot move to predecessors.\n", bb->index);
2257 else
2259 fprintf (dump_file, ";; Block %d has frequency %d,"
2260 " rematerializing in predecessors has frequency %d",
2261 bb->index, bb_frequency, frequency);
2262 if (info->local_remat_cheaper_p)
2263 fprintf (dump_file, "; prefer to rematerialize"
2264 " in the block\n");
2265 else
2266 fprintf (dump_file, "; prefer to rematerialize"
2267 " in predecessors\n");
2270 stack.pop ();
2272 return m_block_info[query_bb_index].local_remat_cheaper_p;
2275 /* Return true if we cannot rematerialize candidate CAND_INDEX at the head of
2276 block BB_INDEX. */
2278 bool
2279 early_remat::need_to_move_candidate_p (unsigned int bb_index,
2280 unsigned int cand_index)
2282 remat_block_info *info = &m_block_info[bb_index];
2283 remat_candidate *cand = &m_candidates[cand_index];
2284 basic_block bb = BASIC_BLOCK_FOR_FN (m_fn, bb_index);
2286 /* If there is more than one reaching definition of REGNO,
2287 we'll need to rematerialize in predecessors instead. */
2288 bitmap_and (&m_tmp_bitmap, info->rd_in, m_regno_to_candidates[cand->regno]);
2289 if (!bitmap_single_bit_set_p (&m_tmp_bitmap))
2291 if (dump_file)
2292 fprintf (dump_file, ";; Cannot rematerialize %d at the"
2293 " head of block %d because there is more than one"
2294 " reaching definition of reg %d\n", cand_index,
2295 bb_index, cand->regno);
2296 return true;
2299 /* Likewise if rematerializing CAND here would clobber a live register. */
2300 if (cand->clobbers
2301 && bitmap_intersect_p (cand->clobbers, DF_LR_IN (bb)))
2303 if (dump_file)
2304 fprintf (dump_file, ";; Cannot rematerialize %d at the"
2305 " head of block %d because it would clobber live"
2306 " registers\n", cand_index, bb_index);
2307 return true;
2310 return false;
2313 /* Set REQUIRED to the minimum set of candidates that must be rematerialized
2314 in predecessors of block BB_INDEX instead of at the start of the block. */
2316 void
2317 early_remat::compute_minimum_move_set (unsigned int bb_index,
2318 bitmap required)
2320 remat_block_info *info = &m_block_info[bb_index];
2321 bitmap_head remaining;
2323 bitmap_clear (required);
2324 bitmap_initialize (&remaining, &m_obstack);
2325 bitmap_copy (&remaining, info->required_in);
2326 while (!bitmap_empty_p (&remaining))
2328 unsigned int cand_index = bitmap_first_set_bit (&remaining);
2329 remat_candidate *cand = &m_candidates[cand_index];
2330 bitmap_clear_bit (&remaining, cand_index);
2332 /* Leave invalid candidates where they are. */
2333 if (!cand->can_copy_p)
2334 continue;
2336 /* Decide whether to move this candidate. */
2337 if (!bitmap_bit_p (required, cand_index))
2339 if (!need_to_move_candidate_p (bb_index, cand_index))
2340 continue;
2341 bitmap_set_bit (required, cand_index);
2344 /* Also move values used by the candidate, so that we don't
2345 rematerialize them twice. */
2346 if (cand->uses)
2348 bitmap_ior_and_into (required, cand->uses, info->required_in);
2349 bitmap_ior_and_into (&remaining, cand->uses, info->required_in);
2354 /* Make the predecessors of BB_INDEX rematerialize the candidates in
2355 REQUIRED. Add any blocks whose required_in set changes to
2356 PENDING_BLOCKS. */
2358 void
2359 early_remat::move_to_predecessors (unsigned int bb_index, bitmap required,
2360 bitmap pending_blocks)
2362 if (empty_p (required))
2363 return;
2364 remat_block_info *dest_info = &m_block_info[bb_index];
2365 basic_block bb = BASIC_BLOCK_FOR_FN (m_fn, bb_index);
2366 edge e;
2367 edge_iterator ei;
2368 FOR_EACH_EDGE (e, ei, bb->preds)
2370 remat_block_info *src_info = &m_block_info[e->src->index];
2372 /* Restrict the set we add to the reaching definitions. */
2373 bitmap_and (&m_tmp_bitmap, required, src_info->rd_out);
2374 if (bitmap_empty_p (&m_tmp_bitmap))
2375 continue;
2377 if (!can_move_across_edge_p (e))
2379 /* We can't move the rematerialization and we can't do it at
2380 the start of the block either. In this case we just give up
2381 and rely on spilling to make the values available across E. */
2382 if (dump_file)
2384 fprintf (dump_file, ";; Cannot rematerialize the following"
2385 " candidates in block %d:", e->src->index);
2386 dump_candidate_bitmap (required);
2387 fprintf (dump_file, "\n");
2389 continue;
2392 /* Remove candidates that are already available. */
2393 if (src_info->available_out)
2395 bitmap_and_compl_into (&m_tmp_bitmap, src_info->available_out);
2396 if (bitmap_empty_p (&m_tmp_bitmap))
2397 continue;
2400 /* Add the remaining candidates to the appropriate required set. */
2401 if (dump_file)
2403 fprintf (dump_file, ";; Moving this set from block %d"
2404 " to block %d:", bb_index, e->src->index);
2405 dump_candidate_bitmap (&m_tmp_bitmap);
2406 fprintf (dump_file, "\n");
2408 /* If the source block contains a call, we want to rematerialize
2409 after the call, otherwise we want to rematerialize at the start
2410 of the block. */
2411 bitmap src_required = get_bitmap (src_info->last_call
2412 ? &src_info->required_after_call
2413 : &src_info->required_in);
2414 if (bitmap_ior_into (src_required, &m_tmp_bitmap))
2416 if (!src_info->last_call)
2417 bitmap_set_bit (pending_blocks, e->src->index);
2418 unshare_available_sets (src_info);
2419 bitmap_ior_into (get_bitmap (&src_info->available_out),
2420 &m_tmp_bitmap);
2424 /* The candidates are now available on entry to the block. */
2425 bitmap_and_compl_into (dest_info->required_in, required);
2426 unshare_available_sets (dest_info);
2427 bitmap_ior_into (get_bitmap (&dest_info->available_in), required);
2430 /* Go through the candidates that are currently marked as being
2431 rematerialized at the beginning of a block. Decide in each case
2432 whether that's valid and profitable; if it isn't, move the
2433 rematerialization to predecessor blocks instead. */
2435 void
2436 early_remat::choose_rematerialization_points (void)
2438 bitmap_head required;
2439 bitmap_head pending_blocks;
2441 int *postorder = df_get_postorder (DF_BACKWARD);
2442 unsigned int postorder_len = df_get_n_blocks (DF_BACKWARD);
2443 bitmap_initialize (&required, &m_obstack);
2444 bitmap_initialize (&pending_blocks, &m_obstack);
2446 /* Process the blocks in postorder, to reduce the number of iterations
2447 of the outer loop. */
2448 for (unsigned int i = 0; i < postorder_len; ++i)
2450 unsigned int bb_index = postorder[i];
2451 remat_block_info *info = &m_block_info[bb_index];
2452 bitmap_clear_bit (&pending_blocks, bb_index);
2454 if (empty_p (info->required_in))
2455 continue;
2457 if (info->available_in)
2458 gcc_checking_assert (!bitmap_intersect_p (info->required_in,
2459 info->available_in));
2461 if (local_remat_cheaper_p (bb_index))
2463 /* We'd prefer to rematerialize at the head of the block.
2464 Only move candidates if we need to. */
2465 compute_minimum_move_set (bb_index, &required);
2466 move_to_predecessors (bb_index, &required, &pending_blocks);
2468 else
2469 move_to_predecessors (bb_index, info->required_in,
2470 &pending_blocks);
2472 while (!bitmap_empty_p (&pending_blocks));
2473 bitmap_clear (&required);
2476 /* Emit all rematerialization instructions queued for BB. */
2478 void
2479 early_remat::emit_remat_insns_for_block (basic_block bb)
2481 remat_block_info *info = &m_block_info[bb->index];
2483 if (info->last_call && !empty_p (info->required_after_call))
2484 emit_remat_insns (info->required_after_call, NULL,
2485 info->rd_after_call, info->last_call);
2487 if (!empty_p (info->required_in))
2489 rtx_insn *insn = BB_HEAD (bb);
2490 while (insn != BB_END (bb)
2491 && !INSN_P (NEXT_INSN (insn)))
2492 insn = NEXT_INSN (insn);
2493 emit_remat_insns (info->required_in, info->available_in,
2494 info->rd_in, insn);
2498 /* Decide which candidates in each block's REQUIRED_IN set need to be
2499 rematerialized and decide where the rematerialization instructions
2500 should go. Emit queued rematerialization instructions at the start
2501 of blocks and after the last calls in blocks. */
2503 void
2504 early_remat::global_phase (void)
2506 compute_availability ();
2507 if (dump_file)
2509 fprintf (dump_file, "\n;; Blocks after computing global"
2510 " availability:\n");
2511 dump_all_blocks ();
2514 choose_rematerialization_points ();
2515 if (dump_file)
2517 fprintf (dump_file, "\n;; Blocks after choosing rematerialization"
2518 " points:\n");
2519 dump_all_blocks ();
2522 basic_block bb;
2523 FOR_EACH_BB_FN (bb, m_fn)
2524 emit_remat_insns_for_block (bb);
2527 /* Main function for the pass. */
2529 void
2530 early_remat::run (void)
2532 df_analyze ();
2534 if (!collect_candidates ())
2535 return;
2537 init_block_info ();
2538 sort_candidates ();
2539 finalize_candidate_indices ();
2540 if (dump_file)
2541 dump_all_candidates ();
2543 compute_rd ();
2544 decide_candidate_validity ();
2545 local_phase ();
2546 global_phase ();
2549 early_remat::early_remat (function *fn, sbitmap selected_modes)
2550 : m_fn (fn),
2551 m_selected_modes (selected_modes),
2552 m_available (0),
2553 m_required (0),
2554 m_value_table (63)
2556 bitmap_obstack_initialize (&m_obstack);
2557 bitmap_initialize (&m_candidate_regnos, &m_obstack);
2558 bitmap_initialize (&m_tmp_bitmap, &m_obstack);
2561 early_remat::~early_remat ()
2563 bitmap_obstack_release (&m_obstack);
2566 namespace {
2568 const pass_data pass_data_early_remat =
2570 RTL_PASS, /* type */
2571 "early_remat", /* name */
2572 OPTGROUP_NONE, /* optinfo_flags */
2573 TV_EARLY_REMAT, /* tv_id */
2574 0, /* properties_required */
2575 0, /* properties_provided */
2576 0, /* properties_destroyed */
2577 0, /* todo_flags_start */
2578 TODO_df_finish, /* todo_flags_finish */
2581 class pass_early_remat : public rtl_opt_pass
2583 public:
2584 pass_early_remat (gcc::context *ctxt)
2585 : rtl_opt_pass (pass_data_early_remat, ctxt)
2588 /* opt_pass methods: */
2589 virtual bool gate (function *)
2591 return optimize > 1 && NUM_POLY_INT_COEFFS > 1;
2594 virtual unsigned int execute (function *f)
2596 auto_sbitmap selected_modes (NUM_MACHINE_MODES);
2597 bitmap_clear (selected_modes);
2598 targetm.select_early_remat_modes (selected_modes);
2599 if (!bitmap_empty_p (selected_modes))
2600 early_remat (f, selected_modes).run ();
2601 return 0;
2603 }; // class pass_early_remat
2605 } // anon namespace
2607 rtl_opt_pass *
2608 make_pass_early_remat (gcc::context *ctxt)
2610 return new pass_early_remat (ctxt);