1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
156 #include "hard-reg-set.h"
159 #include "insn-config.h"
161 #include "basic-block.h"
163 #include "function.h"
172 #include "tree-pass.h"
174 /* Propagate flow information through back edges and thus enable PRE's
175 moving loop invariant calculations out of loops.
177 Originally this tended to create worse overall code, but several
178 improvements during the development of PRE seem to have made following
179 back edges generally a win.
181 Note much of the loop invariant code motion done here would normally
182 be done by loop.c, which has more heuristics for when to move invariants
183 out of loops. At some point we might need to move some of those
184 heuristics into gcse.c. */
186 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
187 are a superset of those done by GCSE.
189 We perform the following steps:
191 1) Compute basic block information.
193 2) Compute table of places where registers are set.
195 3) Perform copy/constant propagation.
197 4) Perform global cse using lazy code motion if not optimizing
198 for size, or code hoisting if we are.
200 5) Perform another pass of copy/constant propagation.
202 Two passes of copy/constant propagation are done because the first one
203 enables more GCSE and the second one helps to clean up the copies that
204 GCSE creates. This is needed more for PRE than for Classic because Classic
205 GCSE will try to use an existing register containing the common
206 subexpression rather than create a new one. This is harder to do for PRE
207 because of the code motion (which Classic GCSE doesn't do).
209 Expressions we are interested in GCSE-ing are of the form
210 (set (pseudo-reg) (expression)).
211 Function want_to_gcse_p says what these are.
213 PRE handles moving invariant expressions out of loops (by treating them as
214 partially redundant).
216 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
217 assignment) based GVN (global value numbering). L. T. Simpson's paper
218 (Rice University) on value numbering is a useful reference for this.
220 **********************
222 We used to support multiple passes but there are diminishing returns in
223 doing so. The first pass usually makes 90% of the changes that are doable.
224 A second pass can make a few more changes made possible by the first pass.
225 Experiments show any further passes don't make enough changes to justify
228 A study of spec92 using an unlimited number of passes:
229 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
230 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
231 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
233 It was found doing copy propagation between each pass enables further
236 PRE is quite expensive in complicated functions because the DFA can take
237 a while to converge. Hence we only perform one pass. The parameter
238 max-gcse-passes can be modified if one wants to experiment.
240 **********************
242 The steps for PRE are:
244 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
246 2) Perform the data flow analysis for PRE.
248 3) Delete the redundant instructions
250 4) Insert the required copies [if any] that make the partially
251 redundant instructions fully redundant.
253 5) For other reaching expressions, insert an instruction to copy the value
254 to a newly created pseudo that will reach the redundant instruction.
256 The deletion is done first so that when we do insertions we
257 know which pseudo reg to use.
259 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
260 argue it is not. The number of iterations for the algorithm to converge
261 is typically 2-4 so I don't view it as that expensive (relatively speaking).
263 PRE GCSE depends heavily on the second CSE pass to clean up the copies
264 we create. To make an expression reach the place where it's redundant,
265 the result of the expression is copied to a new register, and the redundant
266 expression is deleted by replacing it with this new register. Classic GCSE
267 doesn't have this problem as much as it computes the reaching defs of
268 each register in each block and thus can try to use an existing
271 /* GCSE global vars. */
274 static FILE *gcse_file
;
276 /* Note whether or not we should run jump optimization after gcse. We
277 want to do this for two cases.
279 * If we changed any jumps via cprop.
281 * If we added any labels via edge splitting. */
282 static int run_jump_opt_after_gcse
;
284 /* Bitmaps are normally not included in debugging dumps.
285 However it's useful to be able to print them from GDB.
286 We could create special functions for this, but it's simpler to
287 just allow passing stderr to the dump_foo fns. Since stderr can
288 be a macro, we store a copy here. */
289 static FILE *debug_stderr
;
291 /* An obstack for our working variables. */
292 static struct obstack gcse_obstack
;
294 struct reg_use
{rtx reg_rtx
; };
296 /* Hash table of expressions. */
300 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
302 /* Index in the available expression bitmaps. */
304 /* Next entry with the same hash. */
305 struct expr
*next_same_hash
;
306 /* List of anticipatable occurrences in basic blocks in the function.
307 An "anticipatable occurrence" is one that is the first occurrence in the
308 basic block, the operands are not modified in the basic block prior
309 to the occurrence and the output is not used between the start of
310 the block and the occurrence. */
311 struct occr
*antic_occr
;
312 /* List of available occurrence in basic blocks in the function.
313 An "available occurrence" is one that is the last occurrence in the
314 basic block and the operands are not modified by following statements in
315 the basic block [including this insn]. */
316 struct occr
*avail_occr
;
317 /* Non-null if the computation is PRE redundant.
318 The value is the newly created pseudo-reg to record a copy of the
319 expression in all the places that reach the redundant copy. */
323 /* Occurrence of an expression.
324 There is one per basic block. If a pattern appears more than once the
325 last appearance is used [or first for anticipatable expressions]. */
329 /* Next occurrence of this expression. */
331 /* The insn that computes the expression. */
333 /* Nonzero if this [anticipatable] occurrence has been deleted. */
335 /* Nonzero if this [available] occurrence has been copied to
337 /* ??? This is mutually exclusive with deleted_p, so they could share
342 /* Expression and copy propagation hash tables.
343 Each hash table is an array of buckets.
344 ??? It is known that if it were an array of entries, structure elements
345 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
346 not clear whether in the final analysis a sufficient amount of memory would
347 be saved as the size of the available expression bitmaps would be larger
348 [one could build a mapping table without holes afterwards though].
349 Someday I'll perform the computation and figure it out. */
354 This is an array of `expr_hash_table_size' elements. */
357 /* Size of the hash table, in elements. */
360 /* Number of hash table elements. */
361 unsigned int n_elems
;
363 /* Whether the table is expression of copy propagation one. */
367 /* Expression hash table. */
368 static struct hash_table expr_hash_table
;
370 /* Copy propagation hash table. */
371 static struct hash_table set_hash_table
;
373 /* Mapping of uids to cuids.
374 Only real insns get cuids. */
375 static int *uid_cuid
;
377 /* Highest UID in UID_CUID. */
380 /* Get the cuid of an insn. */
381 #ifdef ENABLE_CHECKING
382 #define INSN_CUID(INSN) \
383 (gcc_assert (INSN_UID (INSN) <= max_uid), uid_cuid[INSN_UID (INSN)])
385 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
388 /* Number of cuids. */
391 /* Mapping of cuids to insns. */
392 static rtx
*cuid_insn
;
394 /* Get insn from cuid. */
395 #define CUID_INSN(CUID) (cuid_insn[CUID])
397 /* Maximum register number in function prior to doing gcse + 1.
398 Registers created during this pass have regno >= max_gcse_regno.
399 This is named with "gcse" to not collide with global of same name. */
400 static unsigned int max_gcse_regno
;
402 /* Table of registers that are modified.
404 For each register, each element is a list of places where the pseudo-reg
407 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
408 requires knowledge of which blocks kill which regs [and thus could use
409 a bitmap instead of the lists `reg_set_table' uses].
411 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
412 num-regs) [however perhaps it may be useful to keep the data as is]. One
413 advantage of recording things this way is that `reg_set_table' is fairly
414 sparse with respect to pseudo regs but for hard regs could be fairly dense
415 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
416 up functions like compute_transp since in the case of pseudo-regs we only
417 need to iterate over the number of times a pseudo-reg is set, not over the
418 number of basic blocks [clearly there is a bit of a slow down in the cases
419 where a pseudo is set more than once in a block, however it is believed
420 that the net effect is to speed things up]. This isn't done for hard-regs
421 because recording call-clobbered hard-regs in `reg_set_table' at each
422 function call can consume a fair bit of memory, and iterating over
423 hard-regs stored this way in compute_transp will be more expensive. */
425 typedef struct reg_set
427 /* The next setting of this register. */
428 struct reg_set
*next
;
429 /* The index of the block where it was set. */
433 static reg_set
**reg_set_table
;
435 /* Size of `reg_set_table'.
436 The table starts out at max_gcse_regno + slop, and is enlarged as
438 static int reg_set_table_size
;
440 /* Amount to grow `reg_set_table' by when it's full. */
441 #define REG_SET_TABLE_SLOP 100
443 /* This is a list of expressions which are MEMs and will be used by load
445 Load motion tracks MEMs which aren't killed by
446 anything except itself. (i.e., loads and stores to a single location).
447 We can then allow movement of these MEM refs with a little special
448 allowance. (all stores copy the same value to the reaching reg used
449 for the loads). This means all values used to store into memory must have
450 no side effects so we can re-issue the setter value.
451 Store Motion uses this structure as an expression table to track stores
452 which look interesting, and might be moveable towards the exit block. */
456 struct expr
* expr
; /* Gcse expression reference for LM. */
457 rtx pattern
; /* Pattern of this mem. */
458 rtx pattern_regs
; /* List of registers mentioned by the mem. */
459 rtx loads
; /* INSN list of loads seen. */
460 rtx stores
; /* INSN list of stores seen. */
461 struct ls_expr
* next
; /* Next in the list. */
462 int invalid
; /* Invalid for some reason. */
463 int index
; /* If it maps to a bitmap index. */
464 unsigned int hash_index
; /* Index when in a hash table. */
465 rtx reaching_reg
; /* Register to use when re-writing. */
468 /* Array of implicit set patterns indexed by basic block index. */
469 static rtx
*implicit_sets
;
471 /* Head of the list of load/store memory refs. */
472 static struct ls_expr
* pre_ldst_mems
= NULL
;
474 /* Bitmap containing one bit for each register in the program.
475 Used when performing GCSE to track which registers have been set since
476 the start of the basic block. */
477 static regset reg_set_bitmap
;
479 /* For each block, a bitmap of registers set in the block.
480 This is used by compute_transp.
481 It is computed during hash table computation and not by compute_sets
482 as it includes registers added since the last pass (or between cprop and
483 gcse) and it's currently not easy to realloc sbitmap vectors. */
484 static sbitmap
*reg_set_in_block
;
486 /* Array, indexed by basic block number for a list of insns which modify
487 memory within that block. */
488 static rtx
* modify_mem_list
;
489 static bitmap modify_mem_list_set
;
491 /* This array parallels modify_mem_list, but is kept canonicalized. */
492 static rtx
* canon_modify_mem_list
;
494 /* Bitmap indexed by block numbers to record which blocks contain
496 static bitmap blocks_with_calls
;
498 /* Various variables for statistics gathering. */
500 /* Memory used in a pass.
501 This isn't intended to be absolutely precise. Its intent is only
502 to keep an eye on memory usage. */
503 static int bytes_used
;
505 /* GCSE substitutions made. */
506 static int gcse_subst_count
;
507 /* Number of copy instructions created. */
508 static int gcse_create_count
;
509 /* Number of local constants propagated. */
510 static int local_const_prop_count
;
511 /* Number of local copies propagated. */
512 static int local_copy_prop_count
;
513 /* Number of global constants propagated. */
514 static int global_const_prop_count
;
515 /* Number of global copies propagated. */
516 static int global_copy_prop_count
;
518 /* For available exprs */
519 static sbitmap
*ae_kill
, *ae_gen
;
521 static void compute_can_copy (void);
522 static void *gmalloc (size_t) ATTRIBUTE_MALLOC
;
523 static void *gcalloc (size_t, size_t) ATTRIBUTE_MALLOC
;
524 static void *grealloc (void *, size_t);
525 static void *gcse_alloc (unsigned long);
526 static void alloc_gcse_mem (void);
527 static void free_gcse_mem (void);
528 static void alloc_reg_set_mem (int);
529 static void free_reg_set_mem (void);
530 static void record_one_set (int, rtx
);
531 static void record_set_info (rtx
, rtx
, void *);
532 static void compute_sets (void);
533 static void hash_scan_insn (rtx
, struct hash_table
*, int);
534 static void hash_scan_set (rtx
, rtx
, struct hash_table
*);
535 static void hash_scan_clobber (rtx
, rtx
, struct hash_table
*);
536 static void hash_scan_call (rtx
, rtx
, struct hash_table
*);
537 static int want_to_gcse_p (rtx
);
538 static bool can_assign_to_reg_p (rtx
);
539 static bool gcse_constant_p (rtx
);
540 static int oprs_unchanged_p (rtx
, rtx
, int);
541 static int oprs_anticipatable_p (rtx
, rtx
);
542 static int oprs_available_p (rtx
, rtx
);
543 static void insert_expr_in_table (rtx
, enum machine_mode
, rtx
, int, int,
544 struct hash_table
*);
545 static void insert_set_in_table (rtx
, rtx
, struct hash_table
*);
546 static unsigned int hash_expr (rtx
, enum machine_mode
, int *, int);
547 static unsigned int hash_set (int, int);
548 static int expr_equiv_p (rtx
, rtx
);
549 static void record_last_reg_set_info (rtx
, int);
550 static void record_last_mem_set_info (rtx
);
551 static void record_last_set_info (rtx
, rtx
, void *);
552 static void compute_hash_table (struct hash_table
*);
553 static void alloc_hash_table (int, struct hash_table
*, int);
554 static void free_hash_table (struct hash_table
*);
555 static void compute_hash_table_work (struct hash_table
*);
556 static void dump_hash_table (FILE *, const char *, struct hash_table
*);
557 static struct expr
*lookup_set (unsigned int, struct hash_table
*);
558 static struct expr
*next_set (unsigned int, struct expr
*);
559 static void reset_opr_set_tables (void);
560 static int oprs_not_set_p (rtx
, rtx
);
561 static void mark_call (rtx
);
562 static void mark_set (rtx
, rtx
);
563 static void mark_clobber (rtx
, rtx
);
564 static void mark_oprs_set (rtx
);
565 static void alloc_cprop_mem (int, int);
566 static void free_cprop_mem (void);
567 static void compute_transp (rtx
, int, sbitmap
*, int);
568 static void compute_transpout (void);
569 static void compute_local_properties (sbitmap
*, sbitmap
*, sbitmap
*,
570 struct hash_table
*);
571 static void compute_cprop_data (void);
572 static void find_used_regs (rtx
*, void *);
573 static int try_replace_reg (rtx
, rtx
, rtx
);
574 static struct expr
*find_avail_set (int, rtx
);
575 static int cprop_jump (basic_block
, rtx
, rtx
, rtx
, rtx
);
576 static void mems_conflict_for_gcse_p (rtx
, rtx
, void *);
577 static int load_killed_in_block_p (basic_block
, int, rtx
, int);
578 static void canon_list_insert (rtx
, rtx
, void *);
579 static int cprop_insn (rtx
, int);
580 static int cprop (int);
581 static void find_implicit_sets (void);
582 static int one_cprop_pass (int, bool, bool);
583 static bool constprop_register (rtx
, rtx
, rtx
, bool);
584 static struct expr
*find_bypass_set (int, int);
585 static bool reg_killed_on_edge (rtx
, edge
);
586 static int bypass_block (basic_block
, rtx
, rtx
);
587 static int bypass_conditional_jumps (void);
588 static void alloc_pre_mem (int, int);
589 static void free_pre_mem (void);
590 static void compute_pre_data (void);
591 static int pre_expr_reaches_here_p (basic_block
, struct expr
*,
593 static void insert_insn_end_bb (struct expr
*, basic_block
, int);
594 static void pre_insert_copy_insn (struct expr
*, rtx
);
595 static void pre_insert_copies (void);
596 static int pre_delete (void);
597 static int pre_gcse (void);
598 static int one_pre_gcse_pass (int);
599 static void add_label_notes (rtx
, rtx
);
600 static void alloc_code_hoist_mem (int, int);
601 static void free_code_hoist_mem (void);
602 static void compute_code_hoist_vbeinout (void);
603 static void compute_code_hoist_data (void);
604 static int hoist_expr_reaches_here_p (basic_block
, int, basic_block
, char *);
605 static void hoist_code (void);
606 static int one_code_hoisting_pass (void);
607 static rtx
process_insert_insn (struct expr
*);
608 static int pre_edge_insert (struct edge_list
*, struct expr
**);
609 static int pre_expr_reaches_here_p_work (basic_block
, struct expr
*,
610 basic_block
, char *);
611 static struct ls_expr
* ldst_entry (rtx
);
612 static void free_ldst_entry (struct ls_expr
*);
613 static void free_ldst_mems (void);
614 static void print_ldst_list (FILE *);
615 static struct ls_expr
* find_rtx_in_ldst (rtx
);
616 static int enumerate_ldsts (void);
617 static inline struct ls_expr
* first_ls_expr (void);
618 static inline struct ls_expr
* next_ls_expr (struct ls_expr
*);
619 static int simple_mem (rtx
);
620 static void invalidate_any_buried_refs (rtx
);
621 static void compute_ld_motion_mems (void);
622 static void trim_ld_motion_mems (void);
623 static void update_ld_motion_stores (struct expr
*);
624 static void reg_set_info (rtx
, rtx
, void *);
625 static void reg_clear_last_set (rtx
, rtx
, void *);
626 static bool store_ops_ok (rtx
, int *);
627 static rtx
extract_mentioned_regs (rtx
);
628 static rtx
extract_mentioned_regs_helper (rtx
, rtx
);
629 static void find_moveable_store (rtx
, int *, int *);
630 static int compute_store_table (void);
631 static bool load_kills_store (rtx
, rtx
, int);
632 static bool find_loads (rtx
, rtx
, int);
633 static bool store_killed_in_insn (rtx
, rtx
, rtx
, int);
634 static bool store_killed_after (rtx
, rtx
, rtx
, basic_block
, int *, rtx
*);
635 static bool store_killed_before (rtx
, rtx
, rtx
, basic_block
, int *);
636 static void build_store_vectors (void);
637 static void insert_insn_start_bb (rtx
, basic_block
);
638 static int insert_store (struct ls_expr
*, edge
);
639 static void remove_reachable_equiv_notes (basic_block
, struct ls_expr
*);
640 static void replace_store_insn (rtx
, rtx
, basic_block
, struct ls_expr
*);
641 static void delete_store (struct ls_expr
*, basic_block
);
642 static void free_store_memory (void);
643 static void store_motion (void);
644 static void free_insn_expr_list_list (rtx
*);
645 static void clear_modify_mem_tables (void);
646 static void free_modify_mem_tables (void);
647 static rtx
gcse_emit_move_after (rtx
, rtx
, rtx
);
648 static void local_cprop_find_used_regs (rtx
*, void *);
649 static bool do_local_cprop (rtx
, rtx
, bool, rtx
*);
650 static bool adjust_libcall_notes (rtx
, rtx
, rtx
, rtx
*);
651 static void local_cprop_pass (bool);
652 static bool is_too_expensive (const char *);
655 /* Entry point for global common subexpression elimination.
656 F is the first instruction in the function. Return nonzero if a
660 gcse_main (rtx f ATTRIBUTE_UNUSED
, FILE *file
)
663 /* Bytes used at start of pass. */
664 int initial_bytes_used
;
665 /* Maximum number of bytes used by a pass. */
667 /* Point to release obstack data from for each pass. */
668 char *gcse_obstack_bottom
;
670 /* We do not construct an accurate cfg in functions which call
671 setjmp, so just punt to be safe. */
672 if (current_function_calls_setjmp
)
675 /* Assume that we do not need to run jump optimizations after gcse. */
676 run_jump_opt_after_gcse
= 0;
678 /* For calling dump_foo fns from gdb. */
679 debug_stderr
= stderr
;
682 /* Identify the basic block information for this function, including
683 successors and predecessors. */
684 max_gcse_regno
= max_reg_num ();
687 dump_flow_info (file
);
689 /* Return if there's nothing to do, or it is too expensive. */
690 if (n_basic_blocks
<= 1 || is_too_expensive (_("GCSE disabled")))
693 gcc_obstack_init (&gcse_obstack
);
697 init_alias_analysis ();
698 /* Record where pseudo-registers are set. This data is kept accurate
699 during each pass. ??? We could also record hard-reg information here
700 [since it's unchanging], however it is currently done during hash table
703 It may be tempting to compute MEM set information here too, but MEM sets
704 will be subject to code motion one day and thus we need to compute
705 information about memory sets when we build the hash tables. */
707 alloc_reg_set_mem (max_gcse_regno
);
711 initial_bytes_used
= bytes_used
;
713 gcse_obstack_bottom
= gcse_alloc (1);
715 while (changed
&& pass
< MAX_GCSE_PASSES
)
719 fprintf (file
, "GCSE pass %d\n\n", pass
+ 1);
721 /* Initialize bytes_used to the space for the pred/succ lists,
722 and the reg_set_table data. */
723 bytes_used
= initial_bytes_used
;
725 /* Each pass may create new registers, so recalculate each time. */
726 max_gcse_regno
= max_reg_num ();
730 /* Don't allow constant propagation to modify jumps
732 timevar_push (TV_CPROP1
);
733 changed
= one_cprop_pass (pass
+ 1, false, false);
734 timevar_pop (TV_CPROP1
);
740 timevar_push (TV_PRE
);
741 changed
|= one_pre_gcse_pass (pass
+ 1);
742 /* We may have just created new basic blocks. Release and
743 recompute various things which are sized on the number of
747 free_modify_mem_tables ();
748 modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
749 canon_modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
752 alloc_reg_set_mem (max_reg_num ());
754 run_jump_opt_after_gcse
= 1;
755 timevar_pop (TV_PRE
);
758 if (max_pass_bytes
< bytes_used
)
759 max_pass_bytes
= bytes_used
;
761 /* Free up memory, then reallocate for code hoisting. We can
762 not re-use the existing allocated memory because the tables
763 will not have info for the insns or registers created by
764 partial redundancy elimination. */
767 /* It does not make sense to run code hoisting unless we are optimizing
768 for code size -- it rarely makes programs faster, and can make
769 them bigger if we did partial redundancy elimination (when optimizing
770 for space, we don't run the partial redundancy algorithms). */
773 timevar_push (TV_HOIST
);
774 max_gcse_regno
= max_reg_num ();
776 changed
|= one_code_hoisting_pass ();
779 if (max_pass_bytes
< bytes_used
)
780 max_pass_bytes
= bytes_used
;
781 timevar_pop (TV_HOIST
);
786 fprintf (file
, "\n");
790 obstack_free (&gcse_obstack
, gcse_obstack_bottom
);
794 /* Do one last pass of copy propagation, including cprop into
795 conditional jumps. */
797 max_gcse_regno
= max_reg_num ();
799 /* This time, go ahead and allow cprop to alter jumps. */
800 timevar_push (TV_CPROP2
);
801 one_cprop_pass (pass
+ 1, true, false);
802 timevar_pop (TV_CPROP2
);
807 fprintf (file
, "GCSE of %s: %d basic blocks, ",
808 current_function_name (), n_basic_blocks
);
809 fprintf (file
, "%d pass%s, %d bytes\n\n",
810 pass
, pass
> 1 ? "es" : "", max_pass_bytes
);
813 obstack_free (&gcse_obstack
, NULL
);
816 /* We are finished with alias. */
817 end_alias_analysis ();
818 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
820 if (!optimize_size
&& flag_gcse_sm
)
822 timevar_push (TV_LSM
);
824 timevar_pop (TV_LSM
);
827 /* Record where pseudo-registers are set. */
828 return run_jump_opt_after_gcse
;
831 /* Misc. utilities. */
833 /* Nonzero for each mode that supports (set (reg) (reg)).
834 This is trivially true for integer and floating point values.
835 It may or may not be true for condition codes. */
836 static char can_copy
[(int) NUM_MACHINE_MODES
];
838 /* Compute which modes support reg/reg copy operations. */
841 compute_can_copy (void)
844 #ifndef AVOID_CCMODE_COPIES
847 memset (can_copy
, 0, NUM_MACHINE_MODES
);
850 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
851 if (GET_MODE_CLASS (i
) == MODE_CC
)
853 #ifdef AVOID_CCMODE_COPIES
856 reg
= gen_rtx_REG ((enum machine_mode
) i
, LAST_VIRTUAL_REGISTER
+ 1);
857 insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, reg
));
858 if (recog (PATTERN (insn
), insn
, NULL
) >= 0)
868 /* Returns whether the mode supports reg/reg copy operations. */
871 can_copy_p (enum machine_mode mode
)
873 static bool can_copy_init_p
= false;
875 if (! can_copy_init_p
)
878 can_copy_init_p
= true;
881 return can_copy
[mode
] != 0;
884 /* Cover function to xmalloc to record bytes allocated. */
887 gmalloc (size_t size
)
890 return xmalloc (size
);
893 /* Cover function to xcalloc to record bytes allocated. */
896 gcalloc (size_t nelem
, size_t elsize
)
898 bytes_used
+= nelem
* elsize
;
899 return xcalloc (nelem
, elsize
);
902 /* Cover function to xrealloc.
903 We don't record the additional size since we don't know it.
904 It won't affect memory usage stats much anyway. */
907 grealloc (void *ptr
, size_t size
)
909 return xrealloc (ptr
, size
);
912 /* Cover function to obstack_alloc. */
915 gcse_alloc (unsigned long size
)
918 return obstack_alloc (&gcse_obstack
, size
);
921 /* Allocate memory for the cuid mapping array,
922 and reg/memory set tracking tables.
924 This is called at the start of each pass. */
927 alloc_gcse_mem (void)
933 /* Find the largest UID and create a mapping from UIDs to CUIDs.
934 CUIDs are like UIDs except they increase monotonically, have no gaps,
935 and only apply to real insns.
936 (Actually, there are gaps, for insn that are not inside a basic block.
937 but we should never see those anyway, so this is OK.) */
939 max_uid
= get_max_uid ();
940 uid_cuid
= gcalloc (max_uid
+ 1, sizeof (int));
943 FOR_BB_INSNS (bb
, insn
)
946 uid_cuid
[INSN_UID (insn
)] = i
++;
948 uid_cuid
[INSN_UID (insn
)] = i
;
951 /* Create a table mapping cuids to insns. */
954 cuid_insn
= gcalloc (max_cuid
+ 1, sizeof (rtx
));
957 FOR_BB_INSNS (bb
, insn
)
959 CUID_INSN (i
++) = insn
;
961 /* Allocate vars to track sets of regs. */
962 reg_set_bitmap
= BITMAP_ALLOC (NULL
);
964 /* Allocate vars to track sets of regs, memory per block. */
965 reg_set_in_block
= sbitmap_vector_alloc (last_basic_block
, max_gcse_regno
);
966 /* Allocate array to keep a list of insns which modify memory in each
968 modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
969 canon_modify_mem_list
= gcalloc (last_basic_block
, sizeof (rtx
));
970 modify_mem_list_set
= BITMAP_ALLOC (NULL
);
971 blocks_with_calls
= BITMAP_ALLOC (NULL
);
974 /* Free memory allocated by alloc_gcse_mem. */
982 BITMAP_FREE (reg_set_bitmap
);
984 sbitmap_vector_free (reg_set_in_block
);
985 free_modify_mem_tables ();
986 BITMAP_FREE (modify_mem_list_set
);
987 BITMAP_FREE (blocks_with_calls
);
990 /* Compute the local properties of each recorded expression.
992 Local properties are those that are defined by the block, irrespective of
995 An expression is transparent in a block if its operands are not modified
998 An expression is computed (locally available) in a block if it is computed
999 at least once and expression would contain the same value if the
1000 computation was moved to the end of the block.
1002 An expression is locally anticipatable in a block if it is computed at
1003 least once and expression would contain the same value if the computation
1004 was moved to the beginning of the block.
1006 We call this routine for cprop, pre and code hoisting. They all compute
1007 basically the same information and thus can easily share this code.
1009 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1010 properties. If NULL, then it is not necessary to compute or record that
1011 particular property.
1013 TABLE controls which hash table to look at. If it is set hash table,
1014 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1018 compute_local_properties (sbitmap
*transp
, sbitmap
*comp
, sbitmap
*antloc
,
1019 struct hash_table
*table
)
1023 /* Initialize any bitmaps that were passed in. */
1027 sbitmap_vector_zero (transp
, last_basic_block
);
1029 sbitmap_vector_ones (transp
, last_basic_block
);
1033 sbitmap_vector_zero (comp
, last_basic_block
);
1035 sbitmap_vector_zero (antloc
, last_basic_block
);
1037 for (i
= 0; i
< table
->size
; i
++)
1041 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1043 int indx
= expr
->bitmap_index
;
1046 /* The expression is transparent in this block if it is not killed.
1047 We start by assuming all are transparent [none are killed], and
1048 then reset the bits for those that are. */
1050 compute_transp (expr
->expr
, indx
, transp
, table
->set_p
);
1052 /* The occurrences recorded in antic_occr are exactly those that
1053 we want to set to nonzero in ANTLOC. */
1055 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
1057 SET_BIT (antloc
[BLOCK_NUM (occr
->insn
)], indx
);
1059 /* While we're scanning the table, this is a good place to
1061 occr
->deleted_p
= 0;
1064 /* The occurrences recorded in avail_occr are exactly those that
1065 we want to set to nonzero in COMP. */
1067 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
1069 SET_BIT (comp
[BLOCK_NUM (occr
->insn
)], indx
);
1071 /* While we're scanning the table, this is a good place to
1076 /* While we're scanning the table, this is a good place to
1078 expr
->reaching_reg
= 0;
1083 /* Register set information.
1085 `reg_set_table' records where each register is set or otherwise
1088 static struct obstack reg_set_obstack
;
1091 alloc_reg_set_mem (int n_regs
)
1093 reg_set_table_size
= n_regs
+ REG_SET_TABLE_SLOP
;
1094 reg_set_table
= gcalloc (reg_set_table_size
, sizeof (struct reg_set
*));
1096 gcc_obstack_init (®_set_obstack
);
1100 free_reg_set_mem (void)
1102 free (reg_set_table
);
1103 obstack_free (®_set_obstack
, NULL
);
1106 /* Record REGNO in the reg_set table. */
1109 record_one_set (int regno
, rtx insn
)
1111 /* Allocate a new reg_set element and link it onto the list. */
1112 struct reg_set
*new_reg_info
;
1114 /* If the table isn't big enough, enlarge it. */
1115 if (regno
>= reg_set_table_size
)
1117 int new_size
= regno
+ REG_SET_TABLE_SLOP
;
1119 reg_set_table
= grealloc (reg_set_table
,
1120 new_size
* sizeof (struct reg_set
*));
1121 memset (reg_set_table
+ reg_set_table_size
, 0,
1122 (new_size
- reg_set_table_size
) * sizeof (struct reg_set
*));
1123 reg_set_table_size
= new_size
;
1126 new_reg_info
= obstack_alloc (®_set_obstack
, sizeof (struct reg_set
));
1127 bytes_used
+= sizeof (struct reg_set
);
1128 new_reg_info
->bb_index
= BLOCK_NUM (insn
);
1129 new_reg_info
->next
= reg_set_table
[regno
];
1130 reg_set_table
[regno
] = new_reg_info
;
1133 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1134 an insn. The DATA is really the instruction in which the SET is
1138 record_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
, void *data
)
1140 rtx record_set_insn
= (rtx
) data
;
1142 if (REG_P (dest
) && REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
1143 record_one_set (REGNO (dest
), record_set_insn
);
1146 /* Scan the function and record each set of each pseudo-register.
1148 This is called once, at the start of the gcse pass. See the comments for
1149 `reg_set_table' for further documentation. */
1158 FOR_BB_INSNS (bb
, insn
)
1160 note_stores (PATTERN (insn
), record_set_info
, insn
);
1163 /* Hash table support. */
1165 struct reg_avail_info
1167 basic_block last_bb
;
1172 static struct reg_avail_info
*reg_avail_info
;
1173 static basic_block current_bb
;
1176 /* See whether X, the source of a set, is something we want to consider for
1180 want_to_gcse_p (rtx x
)
1182 switch (GET_CODE (x
))
1193 return can_assign_to_reg_p (x
);
1197 /* Used internally by can_assign_to_reg_p. */
1199 static GTY(()) rtx test_insn
;
1201 /* Return true if we can assign X to a pseudo register. */
1204 can_assign_to_reg_p (rtx x
)
1206 int num_clobbers
= 0;
1209 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1210 if (general_operand (x
, GET_MODE (x
)))
1212 else if (GET_MODE (x
) == VOIDmode
)
1215 /* Otherwise, check if we can make a valid insn from it. First initialize
1216 our test insn if we haven't already. */
1220 = make_insn_raw (gen_rtx_SET (VOIDmode
,
1221 gen_rtx_REG (word_mode
,
1222 FIRST_PSEUDO_REGISTER
* 2),
1224 NEXT_INSN (test_insn
) = PREV_INSN (test_insn
) = 0;
1227 /* Now make an insn like the one we would make when GCSE'ing and see if
1229 PUT_MODE (SET_DEST (PATTERN (test_insn
)), GET_MODE (x
));
1230 SET_SRC (PATTERN (test_insn
)) = x
;
1231 return ((icode
= recog (PATTERN (test_insn
), test_insn
, &num_clobbers
)) >= 0
1232 && (num_clobbers
== 0 || ! added_clobbers_hard_reg_p (icode
)));
1235 /* Return nonzero if the operands of expression X are unchanged from the
1236 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1237 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1240 oprs_unchanged_p (rtx x
, rtx insn
, int avail_p
)
1249 code
= GET_CODE (x
);
1254 struct reg_avail_info
*info
= ®_avail_info
[REGNO (x
)];
1256 if (info
->last_bb
!= current_bb
)
1259 return info
->last_set
< INSN_CUID (insn
);
1261 return info
->first_set
>= INSN_CUID (insn
);
1265 if (load_killed_in_block_p (current_bb
, INSN_CUID (insn
),
1269 return oprs_unchanged_p (XEXP (x
, 0), insn
, avail_p
);
1295 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1299 /* If we are about to do the last recursive call needed at this
1300 level, change it into iteration. This function is called enough
1303 return oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
);
1305 else if (! oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
))
1308 else if (fmt
[i
] == 'E')
1309 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1310 if (! oprs_unchanged_p (XVECEXP (x
, i
, j
), insn
, avail_p
))
1317 /* Used for communication between mems_conflict_for_gcse_p and
1318 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1319 conflict between two memory references. */
1320 static int gcse_mems_conflict_p
;
1322 /* Used for communication between mems_conflict_for_gcse_p and
1323 load_killed_in_block_p. A memory reference for a load instruction,
1324 mems_conflict_for_gcse_p will see if a memory store conflicts with
1325 this memory load. */
1326 static rtx gcse_mem_operand
;
1328 /* DEST is the output of an instruction. If it is a memory reference, and
1329 possibly conflicts with the load found in gcse_mem_operand, then set
1330 gcse_mems_conflict_p to a nonzero value. */
1333 mems_conflict_for_gcse_p (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
1334 void *data ATTRIBUTE_UNUSED
)
1336 while (GET_CODE (dest
) == SUBREG
1337 || GET_CODE (dest
) == ZERO_EXTRACT
1338 || GET_CODE (dest
) == STRICT_LOW_PART
)
1339 dest
= XEXP (dest
, 0);
1341 /* If DEST is not a MEM, then it will not conflict with the load. Note
1342 that function calls are assumed to clobber memory, but are handled
1347 /* If we are setting a MEM in our list of specially recognized MEMs,
1348 don't mark as killed this time. */
1350 if (expr_equiv_p (dest
, gcse_mem_operand
) && pre_ldst_mems
!= NULL
)
1352 if (!find_rtx_in_ldst (dest
))
1353 gcse_mems_conflict_p
= 1;
1357 if (true_dependence (dest
, GET_MODE (dest
), gcse_mem_operand
,
1359 gcse_mems_conflict_p
= 1;
1362 /* Return nonzero if the expression in X (a memory reference) is killed
1363 in block BB before or after the insn with the CUID in UID_LIMIT.
1364 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1367 To check the entire block, set UID_LIMIT to max_uid + 1 and
1371 load_killed_in_block_p (basic_block bb
, int uid_limit
, rtx x
, int avail_p
)
1373 rtx list_entry
= modify_mem_list
[bb
->index
];
1375 /* If this is a readonly then we aren't going to be changing it. */
1376 if (MEM_READONLY_P (x
))
1382 /* Ignore entries in the list that do not apply. */
1384 && INSN_CUID (XEXP (list_entry
, 0)) < uid_limit
)
1386 && INSN_CUID (XEXP (list_entry
, 0)) > uid_limit
))
1388 list_entry
= XEXP (list_entry
, 1);
1392 setter
= XEXP (list_entry
, 0);
1394 /* If SETTER is a call everything is clobbered. Note that calls
1395 to pure functions are never put on the list, so we need not
1396 worry about them. */
1397 if (CALL_P (setter
))
1400 /* SETTER must be an INSN of some kind that sets memory. Call
1401 note_stores to examine each hunk of memory that is modified.
1403 The note_stores interface is pretty limited, so we have to
1404 communicate via global variables. Yuk. */
1405 gcse_mem_operand
= x
;
1406 gcse_mems_conflict_p
= 0;
1407 note_stores (PATTERN (setter
), mems_conflict_for_gcse_p
, NULL
);
1408 if (gcse_mems_conflict_p
)
1410 list_entry
= XEXP (list_entry
, 1);
1415 /* Return nonzero if the operands of expression X are unchanged from
1416 the start of INSN's basic block up to but not including INSN. */
1419 oprs_anticipatable_p (rtx x
, rtx insn
)
1421 return oprs_unchanged_p (x
, insn
, 0);
1424 /* Return nonzero if the operands of expression X are unchanged from
1425 INSN to the end of INSN's basic block. */
1428 oprs_available_p (rtx x
, rtx insn
)
1430 return oprs_unchanged_p (x
, insn
, 1);
1433 /* Hash expression X.
1435 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1436 indicating if a volatile operand is found or if the expression contains
1437 something we don't want to insert in the table. HASH_TABLE_SIZE is
1438 the current size of the hash table to be probed. */
1441 hash_expr (rtx x
, enum machine_mode mode
, int *do_not_record_p
,
1442 int hash_table_size
)
1446 *do_not_record_p
= 0;
1448 hash
= hash_rtx (x
, mode
, do_not_record_p
,
1449 NULL
, /*have_reg_qty=*/false);
1450 return hash
% hash_table_size
;
1453 /* Hash a set of register REGNO.
1455 Sets are hashed on the register that is set. This simplifies the PRE copy
1458 ??? May need to make things more elaborate. Later, as necessary. */
1461 hash_set (int regno
, int hash_table_size
)
1466 return hash
% hash_table_size
;
1469 /* Return nonzero if exp1 is equivalent to exp2. */
1472 expr_equiv_p (rtx x
, rtx y
)
1474 return exp_equiv_p (x
, y
, 0, true);
1477 /* Insert expression X in INSN in the hash TABLE.
1478 If it is already present, record it as the last occurrence in INSN's
1481 MODE is the mode of the value X is being stored into.
1482 It is only used if X is a CONST_INT.
1484 ANTIC_P is nonzero if X is an anticipatable expression.
1485 AVAIL_P is nonzero if X is an available expression. */
1488 insert_expr_in_table (rtx x
, enum machine_mode mode
, rtx insn
, int antic_p
,
1489 int avail_p
, struct hash_table
*table
)
1491 int found
, do_not_record_p
;
1493 struct expr
*cur_expr
, *last_expr
= NULL
;
1494 struct occr
*antic_occr
, *avail_occr
;
1496 hash
= hash_expr (x
, mode
, &do_not_record_p
, table
->size
);
1498 /* Do not insert expression in table if it contains volatile operands,
1499 or if hash_expr determines the expression is something we don't want
1500 to or can't handle. */
1501 if (do_not_record_p
)
1504 cur_expr
= table
->table
[hash
];
1507 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1509 /* If the expression isn't found, save a pointer to the end of
1511 last_expr
= cur_expr
;
1512 cur_expr
= cur_expr
->next_same_hash
;
1517 cur_expr
= gcse_alloc (sizeof (struct expr
));
1518 bytes_used
+= sizeof (struct expr
);
1519 if (table
->table
[hash
] == NULL
)
1520 /* This is the first pattern that hashed to this index. */
1521 table
->table
[hash
] = cur_expr
;
1523 /* Add EXPR to end of this hash chain. */
1524 last_expr
->next_same_hash
= cur_expr
;
1526 /* Set the fields of the expr element. */
1528 cur_expr
->bitmap_index
= table
->n_elems
++;
1529 cur_expr
->next_same_hash
= NULL
;
1530 cur_expr
->antic_occr
= NULL
;
1531 cur_expr
->avail_occr
= NULL
;
1534 /* Now record the occurrence(s). */
1537 antic_occr
= cur_expr
->antic_occr
;
1539 if (antic_occr
&& BLOCK_NUM (antic_occr
->insn
) != BLOCK_NUM (insn
))
1543 /* Found another instance of the expression in the same basic block.
1544 Prefer the currently recorded one. We want the first one in the
1545 block and the block is scanned from start to end. */
1546 ; /* nothing to do */
1549 /* First occurrence of this expression in this basic block. */
1550 antic_occr
= gcse_alloc (sizeof (struct occr
));
1551 bytes_used
+= sizeof (struct occr
);
1552 antic_occr
->insn
= insn
;
1553 antic_occr
->next
= cur_expr
->antic_occr
;
1554 antic_occr
->deleted_p
= 0;
1555 cur_expr
->antic_occr
= antic_occr
;
1561 avail_occr
= cur_expr
->avail_occr
;
1563 if (avail_occr
&& BLOCK_NUM (avail_occr
->insn
) == BLOCK_NUM (insn
))
1565 /* Found another instance of the expression in the same basic block.
1566 Prefer this occurrence to the currently recorded one. We want
1567 the last one in the block and the block is scanned from start
1569 avail_occr
->insn
= insn
;
1573 /* First occurrence of this expression in this basic block. */
1574 avail_occr
= gcse_alloc (sizeof (struct occr
));
1575 bytes_used
+= sizeof (struct occr
);
1576 avail_occr
->insn
= insn
;
1577 avail_occr
->next
= cur_expr
->avail_occr
;
1578 avail_occr
->deleted_p
= 0;
1579 cur_expr
->avail_occr
= avail_occr
;
1584 /* Insert pattern X in INSN in the hash table.
1585 X is a SET of a reg to either another reg or a constant.
1586 If it is already present, record it as the last occurrence in INSN's
1590 insert_set_in_table (rtx x
, rtx insn
, struct hash_table
*table
)
1594 struct expr
*cur_expr
, *last_expr
= NULL
;
1595 struct occr
*cur_occr
;
1597 gcc_assert (GET_CODE (x
) == SET
&& REG_P (SET_DEST (x
)));
1599 hash
= hash_set (REGNO (SET_DEST (x
)), table
->size
);
1601 cur_expr
= table
->table
[hash
];
1604 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1606 /* If the expression isn't found, save a pointer to the end of
1608 last_expr
= cur_expr
;
1609 cur_expr
= cur_expr
->next_same_hash
;
1614 cur_expr
= gcse_alloc (sizeof (struct expr
));
1615 bytes_used
+= sizeof (struct expr
);
1616 if (table
->table
[hash
] == NULL
)
1617 /* This is the first pattern that hashed to this index. */
1618 table
->table
[hash
] = cur_expr
;
1620 /* Add EXPR to end of this hash chain. */
1621 last_expr
->next_same_hash
= cur_expr
;
1623 /* Set the fields of the expr element.
1624 We must copy X because it can be modified when copy propagation is
1625 performed on its operands. */
1626 cur_expr
->expr
= copy_rtx (x
);
1627 cur_expr
->bitmap_index
= table
->n_elems
++;
1628 cur_expr
->next_same_hash
= NULL
;
1629 cur_expr
->antic_occr
= NULL
;
1630 cur_expr
->avail_occr
= NULL
;
1633 /* Now record the occurrence. */
1634 cur_occr
= cur_expr
->avail_occr
;
1636 if (cur_occr
&& BLOCK_NUM (cur_occr
->insn
) == BLOCK_NUM (insn
))
1638 /* Found another instance of the expression in the same basic block.
1639 Prefer this occurrence to the currently recorded one. We want
1640 the last one in the block and the block is scanned from start
1642 cur_occr
->insn
= insn
;
1646 /* First occurrence of this expression in this basic block. */
1647 cur_occr
= gcse_alloc (sizeof (struct occr
));
1648 bytes_used
+= sizeof (struct occr
);
1650 cur_occr
->insn
= insn
;
1651 cur_occr
->next
= cur_expr
->avail_occr
;
1652 cur_occr
->deleted_p
= 0;
1653 cur_expr
->avail_occr
= cur_occr
;
1657 /* Determine whether the rtx X should be treated as a constant for
1658 the purposes of GCSE's constant propagation. */
1661 gcse_constant_p (rtx x
)
1663 /* Consider a COMPARE of two integers constant. */
1664 if (GET_CODE (x
) == COMPARE
1665 && GET_CODE (XEXP (x
, 0)) == CONST_INT
1666 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
1669 /* Consider a COMPARE of the same registers is a constant
1670 if they are not floating point registers. */
1671 if (GET_CODE(x
) == COMPARE
1672 && REG_P (XEXP (x
, 0)) && REG_P (XEXP (x
, 1))
1673 && REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 1))
1674 && ! FLOAT_MODE_P (GET_MODE (XEXP (x
, 0)))
1675 && ! FLOAT_MODE_P (GET_MODE (XEXP (x
, 1))))
1678 return CONSTANT_P (x
);
1681 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
1685 hash_scan_set (rtx pat
, rtx insn
, struct hash_table
*table
)
1687 rtx src
= SET_SRC (pat
);
1688 rtx dest
= SET_DEST (pat
);
1691 if (GET_CODE (src
) == CALL
)
1692 hash_scan_call (src
, insn
, table
);
1694 else if (REG_P (dest
))
1696 unsigned int regno
= REGNO (dest
);
1699 /* If this is a single set and we are doing constant propagation,
1700 see if a REG_NOTE shows this equivalent to a constant. */
1701 if (table
->set_p
&& (note
= find_reg_equal_equiv_note (insn
)) != 0
1702 && gcse_constant_p (XEXP (note
, 0)))
1703 src
= XEXP (note
, 0), pat
= gen_rtx_SET (VOIDmode
, dest
, src
);
1705 /* Only record sets of pseudo-regs in the hash table. */
1707 && regno
>= FIRST_PSEUDO_REGISTER
1708 /* Don't GCSE something if we can't do a reg/reg copy. */
1709 && can_copy_p (GET_MODE (dest
))
1710 /* GCSE commonly inserts instruction after the insn. We can't
1711 do that easily for EH_REGION notes so disable GCSE on these
1713 && !find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1714 /* Is SET_SRC something we want to gcse? */
1715 && want_to_gcse_p (src
)
1716 /* Don't CSE a nop. */
1717 && ! set_noop_p (pat
)
1718 /* Don't GCSE if it has attached REG_EQUIV note.
1719 At this point this only function parameters should have
1720 REG_EQUIV notes and if the argument slot is used somewhere
1721 explicitly, it means address of parameter has been taken,
1722 so we should not extend the lifetime of the pseudo. */
1723 && ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) == 0
1724 || ! MEM_P (XEXP (note
, 0))))
1726 /* An expression is not anticipatable if its operands are
1727 modified before this insn or if this is not the only SET in
1729 int antic_p
= oprs_anticipatable_p (src
, insn
) && single_set (insn
);
1730 /* An expression is not available if its operands are
1731 subsequently modified, including this insn. It's also not
1732 available if this is a branch, because we can't insert
1733 a set after the branch. */
1734 int avail_p
= (oprs_available_p (src
, insn
)
1735 && ! JUMP_P (insn
));
1737 insert_expr_in_table (src
, GET_MODE (dest
), insn
, antic_p
, avail_p
, table
);
1740 /* Record sets for constant/copy propagation. */
1741 else if (table
->set_p
1742 && regno
>= FIRST_PSEUDO_REGISTER
1744 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
1745 && can_copy_p (GET_MODE (dest
))
1746 && REGNO (src
) != regno
)
1747 || gcse_constant_p (src
))
1748 /* A copy is not available if its src or dest is subsequently
1749 modified. Here we want to search from INSN+1 on, but
1750 oprs_available_p searches from INSN on. */
1751 && (insn
== BB_END (BLOCK_FOR_INSN (insn
))
1752 || ((tmp
= next_nonnote_insn (insn
)) != NULL_RTX
1753 && oprs_available_p (pat
, tmp
))))
1754 insert_set_in_table (pat
, insn
, table
);
1756 /* In case of store we want to consider the memory value as available in
1757 the REG stored in that memory. This makes it possible to remove
1758 redundant loads from due to stores to the same location. */
1759 else if (flag_gcse_las
&& REG_P (src
) && MEM_P (dest
))
1761 unsigned int regno
= REGNO (src
);
1763 /* Do not do this for constant/copy propagation. */
1765 /* Only record sets of pseudo-regs in the hash table. */
1766 && regno
>= FIRST_PSEUDO_REGISTER
1767 /* Don't GCSE something if we can't do a reg/reg copy. */
1768 && can_copy_p (GET_MODE (src
))
1769 /* GCSE commonly inserts instruction after the insn. We can't
1770 do that easily for EH_REGION notes so disable GCSE on these
1772 && ! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
1773 /* Is SET_DEST something we want to gcse? */
1774 && want_to_gcse_p (dest
)
1775 /* Don't CSE a nop. */
1776 && ! set_noop_p (pat
)
1777 /* Don't GCSE if it has attached REG_EQUIV note.
1778 At this point this only function parameters should have
1779 REG_EQUIV notes and if the argument slot is used somewhere
1780 explicitly, it means address of parameter has been taken,
1781 so we should not extend the lifetime of the pseudo. */
1782 && ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) == 0
1783 || ! MEM_P (XEXP (note
, 0))))
1785 /* Stores are never anticipatable. */
1787 /* An expression is not available if its operands are
1788 subsequently modified, including this insn. It's also not
1789 available if this is a branch, because we can't insert
1790 a set after the branch. */
1791 int avail_p
= oprs_available_p (dest
, insn
)
1794 /* Record the memory expression (DEST) in the hash table. */
1795 insert_expr_in_table (dest
, GET_MODE (dest
), insn
,
1796 antic_p
, avail_p
, table
);
1802 hash_scan_clobber (rtx x ATTRIBUTE_UNUSED
, rtx insn ATTRIBUTE_UNUSED
,
1803 struct hash_table
*table ATTRIBUTE_UNUSED
)
1805 /* Currently nothing to do. */
1809 hash_scan_call (rtx x ATTRIBUTE_UNUSED
, rtx insn ATTRIBUTE_UNUSED
,
1810 struct hash_table
*table ATTRIBUTE_UNUSED
)
1812 /* Currently nothing to do. */
1815 /* Process INSN and add hash table entries as appropriate.
1817 Only available expressions that set a single pseudo-reg are recorded.
1819 Single sets in a PARALLEL could be handled, but it's an extra complication
1820 that isn't dealt with right now. The trick is handling the CLOBBERs that
1821 are also in the PARALLEL. Later.
1823 If SET_P is nonzero, this is for the assignment hash table,
1824 otherwise it is for the expression hash table.
1825 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1826 not record any expressions. */
1829 hash_scan_insn (rtx insn
, struct hash_table
*table
, int in_libcall_block
)
1831 rtx pat
= PATTERN (insn
);
1834 if (in_libcall_block
)
1837 /* Pick out the sets of INSN and for other forms of instructions record
1838 what's been modified. */
1840 if (GET_CODE (pat
) == SET
)
1841 hash_scan_set (pat
, insn
, table
);
1842 else if (GET_CODE (pat
) == PARALLEL
)
1843 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1845 rtx x
= XVECEXP (pat
, 0, i
);
1847 if (GET_CODE (x
) == SET
)
1848 hash_scan_set (x
, insn
, table
);
1849 else if (GET_CODE (x
) == CLOBBER
)
1850 hash_scan_clobber (x
, insn
, table
);
1851 else if (GET_CODE (x
) == CALL
)
1852 hash_scan_call (x
, insn
, table
);
1855 else if (GET_CODE (pat
) == CLOBBER
)
1856 hash_scan_clobber (pat
, insn
, table
);
1857 else if (GET_CODE (pat
) == CALL
)
1858 hash_scan_call (pat
, insn
, table
);
1862 dump_hash_table (FILE *file
, const char *name
, struct hash_table
*table
)
1865 /* Flattened out table, so it's printed in proper order. */
1866 struct expr
**flat_table
;
1867 unsigned int *hash_val
;
1870 flat_table
= xcalloc (table
->n_elems
, sizeof (struct expr
*));
1871 hash_val
= xmalloc (table
->n_elems
* sizeof (unsigned int));
1873 for (i
= 0; i
< (int) table
->size
; i
++)
1874 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1876 flat_table
[expr
->bitmap_index
] = expr
;
1877 hash_val
[expr
->bitmap_index
] = i
;
1880 fprintf (file
, "%s hash table (%d buckets, %d entries)\n",
1881 name
, table
->size
, table
->n_elems
);
1883 for (i
= 0; i
< (int) table
->n_elems
; i
++)
1884 if (flat_table
[i
] != 0)
1886 expr
= flat_table
[i
];
1887 fprintf (file
, "Index %d (hash value %d)\n ",
1888 expr
->bitmap_index
, hash_val
[i
]);
1889 print_rtl (file
, expr
->expr
);
1890 fprintf (file
, "\n");
1893 fprintf (file
, "\n");
1899 /* Record register first/last/block set information for REGNO in INSN.
1901 first_set records the first place in the block where the register
1902 is set and is used to compute "anticipatability".
1904 last_set records the last place in the block where the register
1905 is set and is used to compute "availability".
1907 last_bb records the block for which first_set and last_set are
1908 valid, as a quick test to invalidate them.
1910 reg_set_in_block records whether the register is set in the block
1911 and is used to compute "transparency". */
1914 record_last_reg_set_info (rtx insn
, int regno
)
1916 struct reg_avail_info
*info
= ®_avail_info
[regno
];
1917 int cuid
= INSN_CUID (insn
);
1919 info
->last_set
= cuid
;
1920 if (info
->last_bb
!= current_bb
)
1922 info
->last_bb
= current_bb
;
1923 info
->first_set
= cuid
;
1924 SET_BIT (reg_set_in_block
[current_bb
->index
], regno
);
1929 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
1930 Note we store a pair of elements in the list, so they have to be
1931 taken off pairwise. */
1934 canon_list_insert (rtx dest ATTRIBUTE_UNUSED
, rtx unused1 ATTRIBUTE_UNUSED
,
1937 rtx dest_addr
, insn
;
1940 while (GET_CODE (dest
) == SUBREG
1941 || GET_CODE (dest
) == ZERO_EXTRACT
1942 || GET_CODE (dest
) == STRICT_LOW_PART
)
1943 dest
= XEXP (dest
, 0);
1945 /* If DEST is not a MEM, then it will not conflict with a load. Note
1946 that function calls are assumed to clobber memory, but are handled
1952 dest_addr
= get_addr (XEXP (dest
, 0));
1953 dest_addr
= canon_rtx (dest_addr
);
1954 insn
= (rtx
) v_insn
;
1955 bb
= BLOCK_NUM (insn
);
1957 canon_modify_mem_list
[bb
] =
1958 alloc_EXPR_LIST (VOIDmode
, dest_addr
, canon_modify_mem_list
[bb
]);
1959 canon_modify_mem_list
[bb
] =
1960 alloc_EXPR_LIST (VOIDmode
, dest
, canon_modify_mem_list
[bb
]);
1963 /* Record memory modification information for INSN. We do not actually care
1964 about the memory location(s) that are set, or even how they are set (consider
1965 a CALL_INSN). We merely need to record which insns modify memory. */
1968 record_last_mem_set_info (rtx insn
)
1970 int bb
= BLOCK_NUM (insn
);
1972 /* load_killed_in_block_p will handle the case of calls clobbering
1974 modify_mem_list
[bb
] = alloc_INSN_LIST (insn
, modify_mem_list
[bb
]);
1975 bitmap_set_bit (modify_mem_list_set
, bb
);
1979 /* Note that traversals of this loop (other than for free-ing)
1980 will break after encountering a CALL_INSN. So, there's no
1981 need to insert a pair of items, as canon_list_insert does. */
1982 canon_modify_mem_list
[bb
] =
1983 alloc_INSN_LIST (insn
, canon_modify_mem_list
[bb
]);
1984 bitmap_set_bit (blocks_with_calls
, bb
);
1987 note_stores (PATTERN (insn
), canon_list_insert
, (void*) insn
);
1990 /* Called from compute_hash_table via note_stores to handle one
1991 SET or CLOBBER in an insn. DATA is really the instruction in which
1992 the SET is taking place. */
1995 record_last_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
, void *data
)
1997 rtx last_set_insn
= (rtx
) data
;
1999 if (GET_CODE (dest
) == SUBREG
)
2000 dest
= SUBREG_REG (dest
);
2003 record_last_reg_set_info (last_set_insn
, REGNO (dest
));
2004 else if (MEM_P (dest
)
2005 /* Ignore pushes, they clobber nothing. */
2006 && ! push_operand (dest
, GET_MODE (dest
)))
2007 record_last_mem_set_info (last_set_insn
);
2010 /* Top level function to create an expression or assignment hash table.
2012 Expression entries are placed in the hash table if
2013 - they are of the form (set (pseudo-reg) src),
2014 - src is something we want to perform GCSE on,
2015 - none of the operands are subsequently modified in the block
2017 Assignment entries are placed in the hash table if
2018 - they are of the form (set (pseudo-reg) src),
2019 - src is something we want to perform const/copy propagation on,
2020 - none of the operands or target are subsequently modified in the block
2022 Currently src must be a pseudo-reg or a const_int.
2024 TABLE is the table computed. */
2027 compute_hash_table_work (struct hash_table
*table
)
2031 /* While we compute the hash table we also compute a bit array of which
2032 registers are set in which blocks.
2033 ??? This isn't needed during const/copy propagation, but it's cheap to
2035 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
2037 /* re-Cache any INSN_LIST nodes we have allocated. */
2038 clear_modify_mem_tables ();
2039 /* Some working arrays used to track first and last set in each block. */
2040 reg_avail_info
= gmalloc (max_gcse_regno
* sizeof (struct reg_avail_info
));
2042 for (i
= 0; i
< max_gcse_regno
; ++i
)
2043 reg_avail_info
[i
].last_bb
= NULL
;
2045 FOR_EACH_BB (current_bb
)
2049 int in_libcall_block
;
2051 /* First pass over the instructions records information used to
2052 determine when registers and memory are first and last set.
2053 ??? hard-reg reg_set_in_block computation
2054 could be moved to compute_sets since they currently don't change. */
2056 FOR_BB_INSNS (current_bb
, insn
)
2058 if (! INSN_P (insn
))
2063 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2064 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2065 record_last_reg_set_info (insn
, regno
);
2070 note_stores (PATTERN (insn
), record_last_set_info
, insn
);
2073 /* Insert implicit sets in the hash table. */
2075 && implicit_sets
[current_bb
->index
] != NULL_RTX
)
2076 hash_scan_set (implicit_sets
[current_bb
->index
],
2077 BB_HEAD (current_bb
), table
);
2079 /* The next pass builds the hash table. */
2080 in_libcall_block
= 0;
2081 FOR_BB_INSNS (current_bb
, insn
)
2084 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2085 in_libcall_block
= 1;
2086 else if (table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2087 in_libcall_block
= 0;
2088 hash_scan_insn (insn
, table
, in_libcall_block
);
2089 if (!table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2090 in_libcall_block
= 0;
2094 free (reg_avail_info
);
2095 reg_avail_info
= NULL
;
2098 /* Allocate space for the set/expr hash TABLE.
2099 N_INSNS is the number of instructions in the function.
2100 It is used to determine the number of buckets to use.
2101 SET_P determines whether set or expression table will
2105 alloc_hash_table (int n_insns
, struct hash_table
*table
, int set_p
)
2109 table
->size
= n_insns
/ 4;
2110 if (table
->size
< 11)
2113 /* Attempt to maintain efficient use of hash table.
2114 Making it an odd number is simplest for now.
2115 ??? Later take some measurements. */
2117 n
= table
->size
* sizeof (struct expr
*);
2118 table
->table
= gmalloc (n
);
2119 table
->set_p
= set_p
;
2122 /* Free things allocated by alloc_hash_table. */
2125 free_hash_table (struct hash_table
*table
)
2127 free (table
->table
);
2130 /* Compute the hash TABLE for doing copy/const propagation or
2131 expression hash table. */
2134 compute_hash_table (struct hash_table
*table
)
2136 /* Initialize count of number of entries in hash table. */
2138 memset (table
->table
, 0, table
->size
* sizeof (struct expr
*));
2140 compute_hash_table_work (table
);
2143 /* Expression tracking support. */
2145 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2146 table entry, or NULL if not found. */
2148 static struct expr
*
2149 lookup_set (unsigned int regno
, struct hash_table
*table
)
2151 unsigned int hash
= hash_set (regno
, table
->size
);
2154 expr
= table
->table
[hash
];
2156 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
)
2157 expr
= expr
->next_same_hash
;
2162 /* Return the next entry for REGNO in list EXPR. */
2164 static struct expr
*
2165 next_set (unsigned int regno
, struct expr
*expr
)
2168 expr
= expr
->next_same_hash
;
2169 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
);
2174 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2175 types may be mixed. */
2178 free_insn_expr_list_list (rtx
*listp
)
2182 for (list
= *listp
; list
; list
= next
)
2184 next
= XEXP (list
, 1);
2185 if (GET_CODE (list
) == EXPR_LIST
)
2186 free_EXPR_LIST_node (list
);
2188 free_INSN_LIST_node (list
);
2194 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2196 clear_modify_mem_tables (void)
2201 EXECUTE_IF_SET_IN_BITMAP (modify_mem_list_set
, 0, i
, bi
)
2203 free_INSN_LIST_list (modify_mem_list
+ i
);
2204 free_insn_expr_list_list (canon_modify_mem_list
+ i
);
2206 bitmap_clear (modify_mem_list_set
);
2207 bitmap_clear (blocks_with_calls
);
2210 /* Release memory used by modify_mem_list_set. */
2213 free_modify_mem_tables (void)
2215 clear_modify_mem_tables ();
2216 free (modify_mem_list
);
2217 free (canon_modify_mem_list
);
2218 modify_mem_list
= 0;
2219 canon_modify_mem_list
= 0;
2222 /* Reset tables used to keep track of what's still available [since the
2223 start of the block]. */
2226 reset_opr_set_tables (void)
2228 /* Maintain a bitmap of which regs have been set since beginning of
2230 CLEAR_REG_SET (reg_set_bitmap
);
2232 /* Also keep a record of the last instruction to modify memory.
2233 For now this is very trivial, we only record whether any memory
2234 location has been modified. */
2235 clear_modify_mem_tables ();
2238 /* Return nonzero if the operands of X are not set before INSN in
2239 INSN's basic block. */
2242 oprs_not_set_p (rtx x
, rtx insn
)
2251 code
= GET_CODE (x
);
2267 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn
),
2268 INSN_CUID (insn
), x
, 0))
2271 return oprs_not_set_p (XEXP (x
, 0), insn
);
2274 return ! REGNO_REG_SET_P (reg_set_bitmap
, REGNO (x
));
2280 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2284 /* If we are about to do the last recursive call
2285 needed at this level, change it into iteration.
2286 This function is called enough to be worth it. */
2288 return oprs_not_set_p (XEXP (x
, i
), insn
);
2290 if (! oprs_not_set_p (XEXP (x
, i
), insn
))
2293 else if (fmt
[i
] == 'E')
2294 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2295 if (! oprs_not_set_p (XVECEXP (x
, i
, j
), insn
))
2302 /* Mark things set by a CALL. */
2305 mark_call (rtx insn
)
2307 if (! CONST_OR_PURE_CALL_P (insn
))
2308 record_last_mem_set_info (insn
);
2311 /* Mark things set by a SET. */
2314 mark_set (rtx pat
, rtx insn
)
2316 rtx dest
= SET_DEST (pat
);
2318 while (GET_CODE (dest
) == SUBREG
2319 || GET_CODE (dest
) == ZERO_EXTRACT
2320 || GET_CODE (dest
) == STRICT_LOW_PART
)
2321 dest
= XEXP (dest
, 0);
2324 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (dest
));
2325 else if (MEM_P (dest
))
2326 record_last_mem_set_info (insn
);
2328 if (GET_CODE (SET_SRC (pat
)) == CALL
)
2332 /* Record things set by a CLOBBER. */
2335 mark_clobber (rtx pat
, rtx insn
)
2337 rtx clob
= XEXP (pat
, 0);
2339 while (GET_CODE (clob
) == SUBREG
|| GET_CODE (clob
) == STRICT_LOW_PART
)
2340 clob
= XEXP (clob
, 0);
2343 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (clob
));
2345 record_last_mem_set_info (insn
);
2348 /* Record things set by INSN.
2349 This data is used by oprs_not_set_p. */
2352 mark_oprs_set (rtx insn
)
2354 rtx pat
= PATTERN (insn
);
2357 if (GET_CODE (pat
) == SET
)
2358 mark_set (pat
, insn
);
2359 else if (GET_CODE (pat
) == PARALLEL
)
2360 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2362 rtx x
= XVECEXP (pat
, 0, i
);
2364 if (GET_CODE (x
) == SET
)
2366 else if (GET_CODE (x
) == CLOBBER
)
2367 mark_clobber (x
, insn
);
2368 else if (GET_CODE (x
) == CALL
)
2372 else if (GET_CODE (pat
) == CLOBBER
)
2373 mark_clobber (pat
, insn
);
2374 else if (GET_CODE (pat
) == CALL
)
2379 /* Compute copy/constant propagation working variables. */
2381 /* Local properties of assignments. */
2382 static sbitmap
*cprop_pavloc
;
2383 static sbitmap
*cprop_absaltered
;
2385 /* Global properties of assignments (computed from the local properties). */
2386 static sbitmap
*cprop_avin
;
2387 static sbitmap
*cprop_avout
;
2389 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
2390 basic blocks. N_SETS is the number of sets. */
2393 alloc_cprop_mem (int n_blocks
, int n_sets
)
2395 cprop_pavloc
= sbitmap_vector_alloc (n_blocks
, n_sets
);
2396 cprop_absaltered
= sbitmap_vector_alloc (n_blocks
, n_sets
);
2398 cprop_avin
= sbitmap_vector_alloc (n_blocks
, n_sets
);
2399 cprop_avout
= sbitmap_vector_alloc (n_blocks
, n_sets
);
2402 /* Free vars used by copy/const propagation. */
2405 free_cprop_mem (void)
2407 sbitmap_vector_free (cprop_pavloc
);
2408 sbitmap_vector_free (cprop_absaltered
);
2409 sbitmap_vector_free (cprop_avin
);
2410 sbitmap_vector_free (cprop_avout
);
2413 /* For each block, compute whether X is transparent. X is either an
2414 expression or an assignment [though we don't care which, for this context
2415 an assignment is treated as an expression]. For each block where an
2416 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
2420 compute_transp (rtx x
, int indx
, sbitmap
*bmap
, int set_p
)
2428 /* repeat is used to turn tail-recursion into iteration since GCC
2429 can't do it when there's no return value. */
2435 code
= GET_CODE (x
);
2441 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2444 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
2445 SET_BIT (bmap
[bb
->index
], indx
);
2449 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
2450 SET_BIT (bmap
[r
->bb_index
], indx
);
2455 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
2458 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
2459 RESET_BIT (bmap
[bb
->index
], indx
);
2463 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
2464 RESET_BIT (bmap
[r
->bb_index
], indx
);
2471 if (! MEM_READONLY_P (x
))
2476 /* First handle all the blocks with calls. We don't need to
2477 do any list walking for them. */
2478 EXECUTE_IF_SET_IN_BITMAP (blocks_with_calls
, 0, bb_index
, bi
)
2481 SET_BIT (bmap
[bb_index
], indx
);
2483 RESET_BIT (bmap
[bb_index
], indx
);
2486 /* Now iterate over the blocks which have memory modifications
2487 but which do not have any calls. */
2488 EXECUTE_IF_AND_COMPL_IN_BITMAP (modify_mem_list_set
,
2492 rtx list_entry
= canon_modify_mem_list
[bb_index
];
2496 rtx dest
, dest_addr
;
2498 /* LIST_ENTRY must be an INSN of some kind that sets memory.
2499 Examine each hunk of memory that is modified. */
2501 dest
= XEXP (list_entry
, 0);
2502 list_entry
= XEXP (list_entry
, 1);
2503 dest_addr
= XEXP (list_entry
, 0);
2505 if (canon_true_dependence (dest
, GET_MODE (dest
), dest_addr
,
2506 x
, rtx_addr_varies_p
))
2509 SET_BIT (bmap
[bb_index
], indx
);
2511 RESET_BIT (bmap
[bb_index
], indx
);
2514 list_entry
= XEXP (list_entry
, 1);
2538 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2542 /* If we are about to do the last recursive call
2543 needed at this level, change it into iteration.
2544 This function is called enough to be worth it. */
2551 compute_transp (XEXP (x
, i
), indx
, bmap
, set_p
);
2553 else if (fmt
[i
] == 'E')
2554 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2555 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, set_p
);
2559 /* Top level routine to do the dataflow analysis needed by copy/const
2563 compute_cprop_data (void)
2565 compute_local_properties (cprop_absaltered
, cprop_pavloc
, NULL
, &set_hash_table
);
2566 compute_available (cprop_pavloc
, cprop_absaltered
,
2567 cprop_avout
, cprop_avin
);
2570 /* Copy/constant propagation. */
2572 /* Maximum number of register uses in an insn that we handle. */
2575 /* Table of uses found in an insn.
2576 Allocated statically to avoid alloc/free complexity and overhead. */
2577 static struct reg_use reg_use_table
[MAX_USES
];
2579 /* Index into `reg_use_table' while building it. */
2580 static int reg_use_count
;
2582 /* Set up a list of register numbers used in INSN. The found uses are stored
2583 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
2584 and contains the number of uses in the table upon exit.
2586 ??? If a register appears multiple times we will record it multiple times.
2587 This doesn't hurt anything but it will slow things down. */
2590 find_used_regs (rtx
*xptr
, void *data ATTRIBUTE_UNUSED
)
2597 /* repeat is used to turn tail-recursion into iteration since GCC
2598 can't do it when there's no return value. */
2603 code
= GET_CODE (x
);
2606 if (reg_use_count
== MAX_USES
)
2609 reg_use_table
[reg_use_count
].reg_rtx
= x
;
2613 /* Recursively scan the operands of this expression. */
2615 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2619 /* If we are about to do the last recursive call
2620 needed at this level, change it into iteration.
2621 This function is called enough to be worth it. */
2628 find_used_regs (&XEXP (x
, i
), data
);
2630 else if (fmt
[i
] == 'E')
2631 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2632 find_used_regs (&XVECEXP (x
, i
, j
), data
);
2636 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
2637 Returns nonzero is successful. */
2640 try_replace_reg (rtx from
, rtx to
, rtx insn
)
2642 rtx note
= find_reg_equal_equiv_note (insn
);
2645 rtx set
= single_set (insn
);
2647 validate_replace_src_group (from
, to
, insn
);
2648 if (num_changes_pending () && apply_change_group ())
2651 /* Try to simplify SET_SRC if we have substituted a constant. */
2652 if (success
&& set
&& CONSTANT_P (to
))
2654 src
= simplify_rtx (SET_SRC (set
));
2657 validate_change (insn
, &SET_SRC (set
), src
, 0);
2660 /* If there is already a NOTE, update the expression in it with our
2663 XEXP (note
, 0) = simplify_replace_rtx (XEXP (note
, 0), from
, to
);
2665 if (!success
&& set
&& reg_mentioned_p (from
, SET_SRC (set
)))
2667 /* If above failed and this is a single set, try to simplify the source of
2668 the set given our substitution. We could perhaps try this for multiple
2669 SETs, but it probably won't buy us anything. */
2670 src
= simplify_replace_rtx (SET_SRC (set
), from
, to
);
2672 if (!rtx_equal_p (src
, SET_SRC (set
))
2673 && validate_change (insn
, &SET_SRC (set
), src
, 0))
2676 /* If we've failed to do replacement, have a single SET, don't already
2677 have a note, and have no special SET, add a REG_EQUAL note to not
2678 lose information. */
2679 if (!success
&& note
== 0 && set
!= 0
2680 && GET_CODE (SET_DEST (set
)) != ZERO_EXTRACT
)
2681 note
= set_unique_reg_note (insn
, REG_EQUAL
, copy_rtx (src
));
2684 /* REG_EQUAL may get simplified into register.
2685 We don't allow that. Remove that note. This code ought
2686 not to happen, because previous code ought to synthesize
2687 reg-reg move, but be on the safe side. */
2688 if (note
&& REG_P (XEXP (note
, 0)))
2689 remove_note (insn
, note
);
2694 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
2695 NULL no such set is found. */
2697 static struct expr
*
2698 find_avail_set (int regno
, rtx insn
)
2700 /* SET1 contains the last set found that can be returned to the caller for
2701 use in a substitution. */
2702 struct expr
*set1
= 0;
2704 /* Loops are not possible here. To get a loop we would need two sets
2705 available at the start of the block containing INSN. i.e. we would
2706 need two sets like this available at the start of the block:
2708 (set (reg X) (reg Y))
2709 (set (reg Y) (reg X))
2711 This can not happen since the set of (reg Y) would have killed the
2712 set of (reg X) making it unavailable at the start of this block. */
2716 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
2718 /* Find a set that is available at the start of the block
2719 which contains INSN. */
2722 if (TEST_BIT (cprop_avin
[BLOCK_NUM (insn
)], set
->bitmap_index
))
2724 set
= next_set (regno
, set
);
2727 /* If no available set was found we've reached the end of the
2728 (possibly empty) copy chain. */
2732 gcc_assert (GET_CODE (set
->expr
) == SET
);
2734 src
= SET_SRC (set
->expr
);
2736 /* We know the set is available.
2737 Now check that SRC is ANTLOC (i.e. none of the source operands
2738 have changed since the start of the block).
2740 If the source operand changed, we may still use it for the next
2741 iteration of this loop, but we may not use it for substitutions. */
2743 if (gcse_constant_p (src
) || oprs_not_set_p (src
, insn
))
2746 /* If the source of the set is anything except a register, then
2747 we have reached the end of the copy chain. */
2751 /* Follow the copy chain, i.e. start another iteration of the loop
2752 and see if we have an available copy into SRC. */
2753 regno
= REGNO (src
);
2756 /* SET1 holds the last set that was available and anticipatable at
2761 /* Subroutine of cprop_insn that tries to propagate constants into
2762 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
2763 it is the instruction that immediately precedes JUMP, and must be a
2764 single SET of a register. FROM is what we will try to replace,
2765 SRC is the constant we will try to substitute for it. Returns nonzero
2766 if a change was made. */
2769 cprop_jump (basic_block bb
, rtx setcc
, rtx jump
, rtx from
, rtx src
)
2771 rtx
new, set_src
, note_src
;
2772 rtx set
= pc_set (jump
);
2773 rtx note
= find_reg_equal_equiv_note (jump
);
2777 note_src
= XEXP (note
, 0);
2778 if (GET_CODE (note_src
) == EXPR_LIST
)
2779 note_src
= NULL_RTX
;
2781 else note_src
= NULL_RTX
;
2783 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
2784 set_src
= note_src
? note_src
: SET_SRC (set
);
2786 /* First substitute the SETCC condition into the JUMP instruction,
2787 then substitute that given values into this expanded JUMP. */
2788 if (setcc
!= NULL_RTX
2789 && !modified_between_p (from
, setcc
, jump
)
2790 && !modified_between_p (src
, setcc
, jump
))
2793 rtx setcc_set
= single_set (setcc
);
2794 rtx setcc_note
= find_reg_equal_equiv_note (setcc
);
2795 setcc_src
= (setcc_note
&& GET_CODE (XEXP (setcc_note
, 0)) != EXPR_LIST
)
2796 ? XEXP (setcc_note
, 0) : SET_SRC (setcc_set
);
2797 set_src
= simplify_replace_rtx (set_src
, SET_DEST (setcc_set
),
2803 new = simplify_replace_rtx (set_src
, from
, src
);
2805 /* If no simplification can be made, then try the next register. */
2806 if (rtx_equal_p (new, SET_SRC (set
)))
2809 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
2814 /* Ensure the value computed inside the jump insn to be equivalent
2815 to one computed by setcc. */
2816 if (setcc
&& modified_in_p (new, setcc
))
2818 if (! validate_change (jump
, &SET_SRC (set
), new, 0))
2820 /* When (some) constants are not valid in a comparison, and there
2821 are two registers to be replaced by constants before the entire
2822 comparison can be folded into a constant, we need to keep
2823 intermediate information in REG_EQUAL notes. For targets with
2824 separate compare insns, such notes are added by try_replace_reg.
2825 When we have a combined compare-and-branch instruction, however,
2826 we need to attach a note to the branch itself to make this
2827 optimization work. */
2829 if (!rtx_equal_p (new, note_src
))
2830 set_unique_reg_note (jump
, REG_EQUAL
, copy_rtx (new));
2834 /* Remove REG_EQUAL note after simplification. */
2836 remove_note (jump
, note
);
2838 /* If this has turned into an unconditional jump,
2839 then put a barrier after it so that the unreachable
2840 code will be deleted. */
2841 if (GET_CODE (SET_SRC (set
)) == LABEL_REF
)
2842 emit_barrier_after (jump
);
2846 /* Delete the cc0 setter. */
2847 if (setcc
!= NULL
&& CC0_P (SET_DEST (single_set (setcc
))))
2848 delete_insn (setcc
);
2851 run_jump_opt_after_gcse
= 1;
2853 global_const_prop_count
++;
2854 if (gcse_file
!= NULL
)
2857 "GLOBAL CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
2858 REGNO (from
), INSN_UID (jump
));
2859 print_rtl (gcse_file
, src
);
2860 fprintf (gcse_file
, "\n");
2862 purge_dead_edges (bb
);
2868 constprop_register (rtx insn
, rtx from
, rtx to
, bool alter_jumps
)
2872 /* Check for reg or cc0 setting instructions followed by
2873 conditional branch instructions first. */
2875 && (sset
= single_set (insn
)) != NULL
2877 && any_condjump_p (NEXT_INSN (insn
)) && onlyjump_p (NEXT_INSN (insn
)))
2879 rtx dest
= SET_DEST (sset
);
2880 if ((REG_P (dest
) || CC0_P (dest
))
2881 && cprop_jump (BLOCK_FOR_INSN (insn
), insn
, NEXT_INSN (insn
), from
, to
))
2885 /* Handle normal insns next. */
2886 if (NONJUMP_INSN_P (insn
)
2887 && try_replace_reg (from
, to
, insn
))
2890 /* Try to propagate a CONST_INT into a conditional jump.
2891 We're pretty specific about what we will handle in this
2892 code, we can extend this as necessary over time.
2894 Right now the insn in question must look like
2895 (set (pc) (if_then_else ...)) */
2896 else if (alter_jumps
&& any_condjump_p (insn
) && onlyjump_p (insn
))
2897 return cprop_jump (BLOCK_FOR_INSN (insn
), NULL
, insn
, from
, to
);
2901 /* Perform constant and copy propagation on INSN.
2902 The result is nonzero if a change was made. */
2905 cprop_insn (rtx insn
, int alter_jumps
)
2907 struct reg_use
*reg_used
;
2915 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
2917 note
= find_reg_equal_equiv_note (insn
);
2919 /* We may win even when propagating constants into notes. */
2921 find_used_regs (&XEXP (note
, 0), NULL
);
2923 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
2924 reg_used
++, reg_use_count
--)
2926 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
2930 /* Ignore registers created by GCSE.
2931 We do this because ... */
2932 if (regno
>= max_gcse_regno
)
2935 /* If the register has already been set in this block, there's
2936 nothing we can do. */
2937 if (! oprs_not_set_p (reg_used
->reg_rtx
, insn
))
2940 /* Find an assignment that sets reg_used and is available
2941 at the start of the block. */
2942 set
= find_avail_set (regno
, insn
);
2947 /* ??? We might be able to handle PARALLELs. Later. */
2948 gcc_assert (GET_CODE (pat
) == SET
);
2950 src
= SET_SRC (pat
);
2952 /* Constant propagation. */
2953 if (gcse_constant_p (src
))
2955 if (constprop_register (insn
, reg_used
->reg_rtx
, src
, alter_jumps
))
2958 global_const_prop_count
++;
2959 if (gcse_file
!= NULL
)
2961 fprintf (gcse_file
, "GLOBAL CONST-PROP: Replacing reg %d in ", regno
);
2962 fprintf (gcse_file
, "insn %d with constant ", INSN_UID (insn
));
2963 print_rtl (gcse_file
, src
);
2964 fprintf (gcse_file
, "\n");
2966 if (INSN_DELETED_P (insn
))
2970 else if (REG_P (src
)
2971 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
2972 && REGNO (src
) != regno
)
2974 if (try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
2977 global_copy_prop_count
++;
2978 if (gcse_file
!= NULL
)
2980 fprintf (gcse_file
, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
2981 regno
, INSN_UID (insn
));
2982 fprintf (gcse_file
, " with reg %d\n", REGNO (src
));
2985 /* The original insn setting reg_used may or may not now be
2986 deletable. We leave the deletion to flow. */
2987 /* FIXME: If it turns out that the insn isn't deletable,
2988 then we may have unnecessarily extended register lifetimes
2989 and made things worse. */
2997 /* Like find_used_regs, but avoid recording uses that appear in
2998 input-output contexts such as zero_extract or pre_dec. This
2999 restricts the cases we consider to those for which local cprop
3000 can legitimately make replacements. */
3003 local_cprop_find_used_regs (rtx
*xptr
, void *data
)
3010 switch (GET_CODE (x
))
3014 case STRICT_LOW_PART
:
3023 /* Can only legitimately appear this early in the context of
3024 stack pushes for function arguments, but handle all of the
3025 codes nonetheless. */
3029 /* Setting a subreg of a register larger than word_mode leaves
3030 the non-written words unchanged. */
3031 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) > BITS_PER_WORD
)
3039 find_used_regs (xptr
, data
);
3042 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3043 their REG_EQUAL notes need updating. */
3046 do_local_cprop (rtx x
, rtx insn
, bool alter_jumps
, rtx
*libcall_sp
)
3048 rtx newreg
= NULL
, newcnst
= NULL
;
3050 /* Rule out USE instructions and ASM statements as we don't want to
3051 change the hard registers mentioned. */
3053 && (REGNO (x
) >= FIRST_PSEUDO_REGISTER
3054 || (GET_CODE (PATTERN (insn
)) != USE
3055 && asm_noperands (PATTERN (insn
)) < 0)))
3057 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0);
3058 struct elt_loc_list
*l
;
3062 for (l
= val
->locs
; l
; l
= l
->next
)
3064 rtx this_rtx
= l
->loc
;
3067 /* Don't CSE non-constant values out of libcall blocks. */
3068 if (l
->in_libcall
&& ! CONSTANT_P (this_rtx
))
3071 if (gcse_constant_p (this_rtx
))
3073 if (REG_P (this_rtx
) && REGNO (this_rtx
) >= FIRST_PSEUDO_REGISTER
3074 /* Don't copy propagate if it has attached REG_EQUIV note.
3075 At this point this only function parameters should have
3076 REG_EQUIV notes and if the argument slot is used somewhere
3077 explicitly, it means address of parameter has been taken,
3078 so we should not extend the lifetime of the pseudo. */
3079 && (!(note
= find_reg_note (l
->setting_insn
, REG_EQUIV
, NULL_RTX
))
3080 || ! MEM_P (XEXP (note
, 0))))
3083 if (newcnst
&& constprop_register (insn
, x
, newcnst
, alter_jumps
))
3085 /* If we find a case where we can't fix the retval REG_EQUAL notes
3086 match the new register, we either have to abandon this replacement
3087 or fix delete_trivially_dead_insns to preserve the setting insn,
3088 or make it delete the REG_EUAQL note, and fix up all passes that
3089 require the REG_EQUAL note there. */
3092 adjusted
= adjust_libcall_notes (x
, newcnst
, insn
, libcall_sp
);
3093 gcc_assert (adjusted
);
3095 if (gcse_file
!= NULL
)
3097 fprintf (gcse_file
, "LOCAL CONST-PROP: Replacing reg %d in ",
3099 fprintf (gcse_file
, "insn %d with constant ",
3101 print_rtl (gcse_file
, newcnst
);
3102 fprintf (gcse_file
, "\n");
3104 local_const_prop_count
++;
3107 else if (newreg
&& newreg
!= x
&& try_replace_reg (x
, newreg
, insn
))
3109 adjust_libcall_notes (x
, newreg
, insn
, libcall_sp
);
3110 if (gcse_file
!= NULL
)
3113 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
3114 REGNO (x
), INSN_UID (insn
));
3115 fprintf (gcse_file
, " with reg %d\n", REGNO (newreg
));
3117 local_copy_prop_count
++;
3124 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
3125 their REG_EQUAL notes need updating to reflect that OLDREG has been
3126 replaced with NEWVAL in INSN. Return true if all substitutions could
3129 adjust_libcall_notes (rtx oldreg
, rtx newval
, rtx insn
, rtx
*libcall_sp
)
3133 while ((end
= *libcall_sp
++))
3135 rtx note
= find_reg_equal_equiv_note (end
);
3142 if (reg_set_between_p (newval
, PREV_INSN (insn
), end
))
3146 note
= find_reg_equal_equiv_note (end
);
3149 if (reg_mentioned_p (newval
, XEXP (note
, 0)))
3152 while ((end
= *libcall_sp
++));
3156 XEXP (note
, 0) = simplify_replace_rtx (XEXP (note
, 0), oldreg
, newval
);
3162 #define MAX_NESTED_LIBCALLS 9
3164 /* Do local const/copy propagation (i.e. within each basic block).
3165 If ALTER_JUMPS is true, allow propagating into jump insns, which
3166 could modify the CFG. */
3169 local_cprop_pass (bool alter_jumps
)
3173 struct reg_use
*reg_used
;
3174 rtx libcall_stack
[MAX_NESTED_LIBCALLS
+ 1], *libcall_sp
;
3175 bool changed
= false;
3177 cselib_init (false);
3178 libcall_sp
= &libcall_stack
[MAX_NESTED_LIBCALLS
];
3182 FOR_BB_INSNS (bb
, insn
)
3186 rtx note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
3190 gcc_assert (libcall_sp
!= libcall_stack
);
3191 *--libcall_sp
= XEXP (note
, 0);
3193 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
3196 note
= find_reg_equal_equiv_note (insn
);
3200 note_uses (&PATTERN (insn
), local_cprop_find_used_regs
,
3203 local_cprop_find_used_regs (&XEXP (note
, 0), NULL
);
3205 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
3206 reg_used
++, reg_use_count
--)
3207 if (do_local_cprop (reg_used
->reg_rtx
, insn
, alter_jumps
,
3213 if (INSN_DELETED_P (insn
))
3216 while (reg_use_count
);
3218 cselib_process_insn (insn
);
3221 /* Forget everything at the end of a basic block. Make sure we are
3222 not inside a libcall, they should never cross basic blocks. */
3223 cselib_clear_table ();
3224 gcc_assert (libcall_sp
== &libcall_stack
[MAX_NESTED_LIBCALLS
]);
3229 /* Global analysis may get into infinite loops for unreachable blocks. */
3230 if (changed
&& alter_jumps
)
3232 delete_unreachable_blocks ();
3233 free_reg_set_mem ();
3234 alloc_reg_set_mem (max_reg_num ());
3239 /* Forward propagate copies. This includes copies and constants. Return
3240 nonzero if a change was made. */
3243 cprop (int alter_jumps
)
3249 /* Note we start at block 1. */
3250 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
3252 if (gcse_file
!= NULL
)
3253 fprintf (gcse_file
, "\n");
3258 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
3260 /* Reset tables used to keep track of what's still valid [since the
3261 start of the block]. */
3262 reset_opr_set_tables ();
3264 FOR_BB_INSNS (bb
, insn
)
3267 changed
|= cprop_insn (insn
, alter_jumps
);
3269 /* Keep track of everything modified by this insn. */
3270 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3271 call mark_oprs_set if we turned the insn into a NOTE. */
3272 if (! NOTE_P (insn
))
3273 mark_oprs_set (insn
);
3277 if (gcse_file
!= NULL
)
3278 fprintf (gcse_file
, "\n");
3283 /* Similar to get_condition, only the resulting condition must be
3284 valid at JUMP, instead of at EARLIEST.
3286 This differs from noce_get_condition in ifcvt.c in that we prefer not to
3287 settle for the condition variable in the jump instruction being integral.
3288 We prefer to be able to record the value of a user variable, rather than
3289 the value of a temporary used in a condition. This could be solved by
3290 recording the value of *every* register scanned by canonicalize_condition,
3291 but this would require some code reorganization. */
3294 fis_get_condition (rtx jump
)
3296 return get_condition (jump
, NULL
, false, true);
3299 /* Check the comparison COND to see if we can safely form an implicit set from
3300 it. COND is either an EQ or NE comparison. */
3303 implicit_set_cond_p (rtx cond
)
3305 enum machine_mode mode
= GET_MODE (XEXP (cond
, 0));
3306 rtx cst
= XEXP (cond
, 1);
3308 /* We can't perform this optimization if either operand might be or might
3309 contain a signed zero. */
3310 if (HONOR_SIGNED_ZEROS (mode
))
3312 /* It is sufficient to check if CST is or contains a zero. We must
3313 handle float, complex, and vector. If any subpart is a zero, then
3314 the optimization can't be performed. */
3315 /* ??? The complex and vector checks are not implemented yet. We just
3316 always return zero for them. */
3317 if (GET_CODE (cst
) == CONST_DOUBLE
)
3320 REAL_VALUE_FROM_CONST_DOUBLE (d
, cst
);
3321 if (REAL_VALUES_EQUAL (d
, dconst0
))
3328 return gcse_constant_p (cst
);
3331 /* Find the implicit sets of a function. An "implicit set" is a constraint
3332 on the value of a variable, implied by a conditional jump. For example,
3333 following "if (x == 2)", the then branch may be optimized as though the
3334 conditional performed an "explicit set", in this example, "x = 2". This
3335 function records the set patterns that are implicit at the start of each
3339 find_implicit_sets (void)
3341 basic_block bb
, dest
;
3347 /* Check for more than one successor. */
3348 if (EDGE_COUNT (bb
->succs
) > 1)
3350 cond
= fis_get_condition (BB_END (bb
));
3353 && (GET_CODE (cond
) == EQ
|| GET_CODE (cond
) == NE
)
3354 && REG_P (XEXP (cond
, 0))
3355 && REGNO (XEXP (cond
, 0)) >= FIRST_PSEUDO_REGISTER
3356 && implicit_set_cond_p (cond
))
3358 dest
= GET_CODE (cond
) == EQ
? BRANCH_EDGE (bb
)->dest
3359 : FALLTHRU_EDGE (bb
)->dest
;
3361 if (dest
&& single_pred_p (dest
)
3362 && dest
!= EXIT_BLOCK_PTR
)
3364 new = gen_rtx_SET (VOIDmode
, XEXP (cond
, 0),
3366 implicit_sets
[dest
->index
] = new;
3369 fprintf(gcse_file
, "Implicit set of reg %d in ",
3370 REGNO (XEXP (cond
, 0)));
3371 fprintf(gcse_file
, "basic block %d\n", dest
->index
);
3379 fprintf (gcse_file
, "Found %d implicit sets\n", count
);
3382 /* Perform one copy/constant propagation pass.
3383 PASS is the pass count. If CPROP_JUMPS is true, perform constant
3384 propagation into conditional jumps. If BYPASS_JUMPS is true,
3385 perform conditional jump bypassing optimizations. */
3388 one_cprop_pass (int pass
, bool cprop_jumps
, bool bypass_jumps
)
3392 global_const_prop_count
= local_const_prop_count
= 0;
3393 global_copy_prop_count
= local_copy_prop_count
= 0;
3395 local_cprop_pass (cprop_jumps
);
3397 /* Determine implicit sets. */
3398 implicit_sets
= xcalloc (last_basic_block
, sizeof (rtx
));
3399 find_implicit_sets ();
3401 alloc_hash_table (max_cuid
, &set_hash_table
, 1);
3402 compute_hash_table (&set_hash_table
);
3404 /* Free implicit_sets before peak usage. */
3405 free (implicit_sets
);
3406 implicit_sets
= NULL
;
3409 dump_hash_table (gcse_file
, "SET", &set_hash_table
);
3410 if (set_hash_table
.n_elems
> 0)
3412 alloc_cprop_mem (last_basic_block
, set_hash_table
.n_elems
);
3413 compute_cprop_data ();
3414 changed
= cprop (cprop_jumps
);
3416 changed
|= bypass_conditional_jumps ();
3420 free_hash_table (&set_hash_table
);
3424 fprintf (gcse_file
, "CPROP of %s, pass %d: %d bytes needed, ",
3425 current_function_name (), pass
, bytes_used
);
3426 fprintf (gcse_file
, "%d local const props, %d local copy props, ",
3427 local_const_prop_count
, local_copy_prop_count
);
3428 fprintf (gcse_file
, "%d global const props, %d global copy props\n\n",
3429 global_const_prop_count
, global_copy_prop_count
);
3431 /* Global analysis may get into infinite loops for unreachable blocks. */
3432 if (changed
&& cprop_jumps
)
3433 delete_unreachable_blocks ();
3438 /* Bypass conditional jumps. */
3440 /* The value of last_basic_block at the beginning of the jump_bypass
3441 pass. The use of redirect_edge_and_branch_force may introduce new
3442 basic blocks, but the data flow analysis is only valid for basic
3443 block indices less than bypass_last_basic_block. */
3445 static int bypass_last_basic_block
;
3447 /* Find a set of REGNO to a constant that is available at the end of basic
3448 block BB. Returns NULL if no such set is found. Based heavily upon
3451 static struct expr
*
3452 find_bypass_set (int regno
, int bb
)
3454 struct expr
*result
= 0;
3459 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
3463 if (TEST_BIT (cprop_avout
[bb
], set
->bitmap_index
))
3465 set
= next_set (regno
, set
);
3471 gcc_assert (GET_CODE (set
->expr
) == SET
);
3473 src
= SET_SRC (set
->expr
);
3474 if (gcse_constant_p (src
))
3480 regno
= REGNO (src
);
3486 /* Subroutine of bypass_block that checks whether a pseudo is killed by
3487 any of the instructions inserted on an edge. Jump bypassing places
3488 condition code setters on CFG edges using insert_insn_on_edge. This
3489 function is required to check that our data flow analysis is still
3490 valid prior to commit_edge_insertions. */
3493 reg_killed_on_edge (rtx reg
, edge e
)
3497 for (insn
= e
->insns
.r
; insn
; insn
= NEXT_INSN (insn
))
3498 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
3504 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
3505 basic block BB which has more than one predecessor. If not NULL, SETCC
3506 is the first instruction of BB, which is immediately followed by JUMP_INSN
3507 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
3508 Returns nonzero if a change was made.
3510 During the jump bypassing pass, we may place copies of SETCC instructions
3511 on CFG edges. The following routine must be careful to pay attention to
3512 these inserted insns when performing its transformations. */
3515 bypass_block (basic_block bb
, rtx setcc
, rtx jump
)
3520 int may_be_loop_header
;
3524 insn
= (setcc
!= NULL
) ? setcc
: jump
;
3526 /* Determine set of register uses in INSN. */
3528 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
3529 note
= find_reg_equal_equiv_note (insn
);
3531 find_used_regs (&XEXP (note
, 0), NULL
);
3533 may_be_loop_header
= false;
3534 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3535 if (e
->flags
& EDGE_DFS_BACK
)
3537 may_be_loop_header
= true;
3542 for (ei
= ei_start (bb
->preds
); (e
= ei_safe_edge (ei
)); )
3546 if (e
->flags
& EDGE_COMPLEX
)
3552 /* We can't redirect edges from new basic blocks. */
3553 if (e
->src
->index
>= bypass_last_basic_block
)
3559 /* The irreducible loops created by redirecting of edges entering the
3560 loop from outside would decrease effectiveness of some of the following
3561 optimizations, so prevent this. */
3562 if (may_be_loop_header
3563 && !(e
->flags
& EDGE_DFS_BACK
))
3569 for (i
= 0; i
< reg_use_count
; i
++)
3571 struct reg_use
*reg_used
= ®_use_table
[i
];
3572 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
3573 basic_block dest
, old_dest
;
3577 if (regno
>= max_gcse_regno
)
3580 set
= find_bypass_set (regno
, e
->src
->index
);
3585 /* Check the data flow is valid after edge insertions. */
3586 if (e
->insns
.r
&& reg_killed_on_edge (reg_used
->reg_rtx
, e
))
3589 src
= SET_SRC (pc_set (jump
));
3592 src
= simplify_replace_rtx (src
,
3593 SET_DEST (PATTERN (setcc
)),
3594 SET_SRC (PATTERN (setcc
)));
3596 new = simplify_replace_rtx (src
, reg_used
->reg_rtx
,
3597 SET_SRC (set
->expr
));
3599 /* Jump bypassing may have already placed instructions on
3600 edges of the CFG. We can't bypass an outgoing edge that
3601 has instructions associated with it, as these insns won't
3602 get executed if the incoming edge is redirected. */
3606 edest
= FALLTHRU_EDGE (bb
);
3607 dest
= edest
->insns
.r
? NULL
: edest
->dest
;
3609 else if (GET_CODE (new) == LABEL_REF
)
3611 dest
= BLOCK_FOR_INSN (XEXP (new, 0));
3612 /* Don't bypass edges containing instructions. */
3613 edest
= find_edge (bb
, dest
);
3614 if (edest
&& edest
->insns
.r
)
3620 /* Avoid unification of the edge with other edges from original
3621 branch. We would end up emitting the instruction on "both"
3624 if (dest
&& setcc
&& !CC0_P (SET_DEST (PATTERN (setcc
)))
3625 && find_edge (e
->src
, dest
))
3631 && dest
!= EXIT_BLOCK_PTR
)
3633 redirect_edge_and_branch_force (e
, dest
);
3635 /* Copy the register setter to the redirected edge.
3636 Don't copy CC0 setters, as CC0 is dead after jump. */
3639 rtx pat
= PATTERN (setcc
);
3640 if (!CC0_P (SET_DEST (pat
)))
3641 insert_insn_on_edge (copy_insn (pat
), e
);
3644 if (gcse_file
!= NULL
)
3646 fprintf (gcse_file
, "JUMP-BYPASS: Proved reg %d "
3647 "in jump_insn %d equals constant ",
3648 regno
, INSN_UID (jump
));
3649 print_rtl (gcse_file
, SET_SRC (set
->expr
));
3650 fprintf (gcse_file
, "\nBypass edge from %d->%d to %d\n",
3651 e
->src
->index
, old_dest
->index
, dest
->index
);
3664 /* Find basic blocks with more than one predecessor that only contain a
3665 single conditional jump. If the result of the comparison is known at
3666 compile-time from any incoming edge, redirect that edge to the
3667 appropriate target. Returns nonzero if a change was made.
3669 This function is now mis-named, because we also handle indirect jumps. */
3672 bypass_conditional_jumps (void)
3680 /* Note we start at block 1. */
3681 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
3684 bypass_last_basic_block
= last_basic_block
;
3685 mark_dfs_back_edges ();
3688 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
,
3689 EXIT_BLOCK_PTR
, next_bb
)
3691 /* Check for more than one predecessor. */
3692 if (!single_pred_p (bb
))
3695 FOR_BB_INSNS (bb
, insn
)
3696 if (NONJUMP_INSN_P (insn
))
3700 if (GET_CODE (PATTERN (insn
)) != SET
)
3703 dest
= SET_DEST (PATTERN (insn
));
3704 if (REG_P (dest
) || CC0_P (dest
))
3709 else if (JUMP_P (insn
))
3711 if ((any_condjump_p (insn
) || computed_jump_p (insn
))
3712 && onlyjump_p (insn
))
3713 changed
|= bypass_block (bb
, setcc
, insn
);
3716 else if (INSN_P (insn
))
3721 /* If we bypassed any register setting insns, we inserted a
3722 copy on the redirected edge. These need to be committed. */
3724 commit_edge_insertions();
3729 /* Compute PRE+LCM working variables. */
3731 /* Local properties of expressions. */
3732 /* Nonzero for expressions that are transparent in the block. */
3733 static sbitmap
*transp
;
3735 /* Nonzero for expressions that are transparent at the end of the block.
3736 This is only zero for expressions killed by abnormal critical edge
3737 created by a calls. */
3738 static sbitmap
*transpout
;
3740 /* Nonzero for expressions that are computed (available) in the block. */
3741 static sbitmap
*comp
;
3743 /* Nonzero for expressions that are locally anticipatable in the block. */
3744 static sbitmap
*antloc
;
3746 /* Nonzero for expressions where this block is an optimal computation
3748 static sbitmap
*pre_optimal
;
3750 /* Nonzero for expressions which are redundant in a particular block. */
3751 static sbitmap
*pre_redundant
;
3753 /* Nonzero for expressions which should be inserted on a specific edge. */
3754 static sbitmap
*pre_insert_map
;
3756 /* Nonzero for expressions which should be deleted in a specific block. */
3757 static sbitmap
*pre_delete_map
;
3759 /* Contains the edge_list returned by pre_edge_lcm. */
3760 static struct edge_list
*edge_list
;
3762 /* Redundant insns. */
3763 static sbitmap pre_redundant_insns
;
3765 /* Allocate vars used for PRE analysis. */
3768 alloc_pre_mem (int n_blocks
, int n_exprs
)
3770 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3771 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3772 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3775 pre_redundant
= NULL
;
3776 pre_insert_map
= NULL
;
3777 pre_delete_map
= NULL
;
3778 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
3780 /* pre_insert and pre_delete are allocated later. */
3783 /* Free vars used for PRE analysis. */
3788 sbitmap_vector_free (transp
);
3789 sbitmap_vector_free (comp
);
3791 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
3794 sbitmap_vector_free (pre_optimal
);
3796 sbitmap_vector_free (pre_redundant
);
3798 sbitmap_vector_free (pre_insert_map
);
3800 sbitmap_vector_free (pre_delete_map
);
3802 transp
= comp
= NULL
;
3803 pre_optimal
= pre_redundant
= pre_insert_map
= pre_delete_map
= NULL
;
3806 /* Top level routine to do the dataflow analysis needed by PRE. */
3809 compute_pre_data (void)
3811 sbitmap trapping_expr
;
3815 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
3816 sbitmap_vector_zero (ae_kill
, last_basic_block
);
3818 /* Collect expressions which might trap. */
3819 trapping_expr
= sbitmap_alloc (expr_hash_table
.n_elems
);
3820 sbitmap_zero (trapping_expr
);
3821 for (ui
= 0; ui
< expr_hash_table
.size
; ui
++)
3824 for (e
= expr_hash_table
.table
[ui
]; e
!= NULL
; e
= e
->next_same_hash
)
3825 if (may_trap_p (e
->expr
))
3826 SET_BIT (trapping_expr
, e
->bitmap_index
);
3829 /* Compute ae_kill for each basic block using:
3839 /* If the current block is the destination of an abnormal edge, we
3840 kill all trapping expressions because we won't be able to properly
3841 place the instruction on the edge. So make them neither
3842 anticipatable nor transparent. This is fairly conservative. */
3843 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
3844 if (e
->flags
& EDGE_ABNORMAL
)
3846 sbitmap_difference (antloc
[bb
->index
], antloc
[bb
->index
], trapping_expr
);
3847 sbitmap_difference (transp
[bb
->index
], transp
[bb
->index
], trapping_expr
);
3851 sbitmap_a_or_b (ae_kill
[bb
->index
], transp
[bb
->index
], comp
[bb
->index
]);
3852 sbitmap_not (ae_kill
[bb
->index
], ae_kill
[bb
->index
]);
3855 edge_list
= pre_edge_lcm (gcse_file
, expr_hash_table
.n_elems
, transp
, comp
, antloc
,
3856 ae_kill
, &pre_insert_map
, &pre_delete_map
);
3857 sbitmap_vector_free (antloc
);
3859 sbitmap_vector_free (ae_kill
);
3861 sbitmap_free (trapping_expr
);
3866 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
3869 VISITED is a pointer to a working buffer for tracking which BB's have
3870 been visited. It is NULL for the top-level call.
3872 We treat reaching expressions that go through blocks containing the same
3873 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3874 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3875 2 as not reaching. The intent is to improve the probability of finding
3876 only one reaching expression and to reduce register lifetimes by picking
3877 the closest such expression. */
3880 pre_expr_reaches_here_p_work (basic_block occr_bb
, struct expr
*expr
, basic_block bb
, char *visited
)
3885 FOR_EACH_EDGE (pred
, ei
, bb
->preds
)
3887 basic_block pred_bb
= pred
->src
;
3889 if (pred
->src
== ENTRY_BLOCK_PTR
3890 /* Has predecessor has already been visited? */
3891 || visited
[pred_bb
->index
])
3892 ;/* Nothing to do. */
3894 /* Does this predecessor generate this expression? */
3895 else if (TEST_BIT (comp
[pred_bb
->index
], expr
->bitmap_index
))
3897 /* Is this the occurrence we're looking for?
3898 Note that there's only one generating occurrence per block
3899 so we just need to check the block number. */
3900 if (occr_bb
== pred_bb
)
3903 visited
[pred_bb
->index
] = 1;
3905 /* Ignore this predecessor if it kills the expression. */
3906 else if (! TEST_BIT (transp
[pred_bb
->index
], expr
->bitmap_index
))
3907 visited
[pred_bb
->index
] = 1;
3909 /* Neither gen nor kill. */
3912 visited
[pred_bb
->index
] = 1;
3913 if (pre_expr_reaches_here_p_work (occr_bb
, expr
, pred_bb
, visited
))
3918 /* All paths have been checked. */
3922 /* The wrapper for pre_expr_reaches_here_work that ensures that any
3923 memory allocated for that function is returned. */
3926 pre_expr_reaches_here_p (basic_block occr_bb
, struct expr
*expr
, basic_block bb
)
3929 char *visited
= xcalloc (last_basic_block
, 1);
3931 rval
= pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, visited
);
3938 /* Given an expr, generate RTL which we can insert at the end of a BB,
3939 or on an edge. Set the block number of any insns generated to
3943 process_insert_insn (struct expr
*expr
)
3945 rtx reg
= expr
->reaching_reg
;
3946 rtx exp
= copy_rtx (expr
->expr
);
3951 /* If the expression is something that's an operand, like a constant,
3952 just copy it to a register. */
3953 if (general_operand (exp
, GET_MODE (reg
)))
3954 emit_move_insn (reg
, exp
);
3956 /* Otherwise, make a new insn to compute this expression and make sure the
3957 insn will be recognized (this also adds any needed CLOBBERs). Copy the
3958 expression to make sure we don't have any sharing issues. */
3961 rtx insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, exp
));
3963 if (insn_invalid_p (insn
))
3974 /* Add EXPR to the end of basic block BB.
3976 This is used by both the PRE and code hoisting.
3978 For PRE, we want to verify that the expr is either transparent
3979 or locally anticipatable in the target block. This check makes
3980 no sense for code hoisting. */
3983 insert_insn_end_bb (struct expr
*expr
, basic_block bb
, int pre
)
3985 rtx insn
= BB_END (bb
);
3987 rtx reg
= expr
->reaching_reg
;
3988 int regno
= REGNO (reg
);
3991 pat
= process_insert_insn (expr
);
3992 gcc_assert (pat
&& INSN_P (pat
));
3995 while (NEXT_INSN (pat_end
) != NULL_RTX
)
3996 pat_end
= NEXT_INSN (pat_end
);
3998 /* If the last insn is a jump, insert EXPR in front [taking care to
3999 handle cc0, etc. properly]. Similarly we need to care trapping
4000 instructions in presence of non-call exceptions. */
4003 || (NONJUMP_INSN_P (insn
)
4004 && (!single_succ_p (bb
)
4005 || single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
)))
4010 /* It should always be the case that we can put these instructions
4011 anywhere in the basic block with performing PRE optimizations.
4013 gcc_assert (!NONJUMP_INSN_P (insn
) || !pre
4014 || TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
4015 || TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
));
4017 /* If this is a jump table, then we can't insert stuff here. Since
4018 we know the previous real insn must be the tablejump, we insert
4019 the new instruction just before the tablejump. */
4020 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4021 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
4022 insn
= prev_real_insn (insn
);
4025 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4026 if cc0 isn't set. */
4027 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
4029 insn
= XEXP (note
, 0);
4032 rtx maybe_cc0_setter
= prev_nonnote_insn (insn
);
4033 if (maybe_cc0_setter
4034 && INSN_P (maybe_cc0_setter
)
4035 && sets_cc0_p (PATTERN (maybe_cc0_setter
)))
4036 insn
= maybe_cc0_setter
;
4039 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4040 new_insn
= emit_insn_before_noloc (pat
, insn
);
4043 /* Likewise if the last insn is a call, as will happen in the presence
4044 of exception handling. */
4045 else if (CALL_P (insn
)
4046 && (!single_succ_p (bb
)
4047 || single_succ_edge (bb
)->flags
& EDGE_ABNORMAL
))
4049 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4050 we search backward and place the instructions before the first
4051 parameter is loaded. Do this for everyone for consistency and a
4052 presumption that we'll get better code elsewhere as well.
4054 It should always be the case that we can put these instructions
4055 anywhere in the basic block with performing PRE optimizations.
4059 || TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
4060 || TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
));
4062 /* Since different machines initialize their parameter registers
4063 in different orders, assume nothing. Collect the set of all
4064 parameter registers. */
4065 insn
= find_first_parameter_load (insn
, BB_HEAD (bb
));
4067 /* If we found all the parameter loads, then we want to insert
4068 before the first parameter load.
4070 If we did not find all the parameter loads, then we might have
4071 stopped on the head of the block, which could be a CODE_LABEL.
4072 If we inserted before the CODE_LABEL, then we would be putting
4073 the insn in the wrong basic block. In that case, put the insn
4074 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4075 while (LABEL_P (insn
)
4076 || NOTE_INSN_BASIC_BLOCK_P (insn
))
4077 insn
= NEXT_INSN (insn
);
4079 new_insn
= emit_insn_before_noloc (pat
, insn
);
4082 new_insn
= emit_insn_after_noloc (pat
, insn
);
4088 add_label_notes (PATTERN (pat
), new_insn
);
4089 note_stores (PATTERN (pat
), record_set_info
, pat
);
4093 pat
= NEXT_INSN (pat
);
4096 gcse_create_count
++;
4100 fprintf (gcse_file
, "PRE/HOIST: end of bb %d, insn %d, ",
4101 bb
->index
, INSN_UID (new_insn
));
4102 fprintf (gcse_file
, "copying expression %d to reg %d\n",
4103 expr
->bitmap_index
, regno
);
4107 /* Insert partially redundant expressions on edges in the CFG to make
4108 the expressions fully redundant. */
4111 pre_edge_insert (struct edge_list
*edge_list
, struct expr
**index_map
)
4113 int e
, i
, j
, num_edges
, set_size
, did_insert
= 0;
4116 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4117 if it reaches any of the deleted expressions. */
4119 set_size
= pre_insert_map
[0]->size
;
4120 num_edges
= NUM_EDGES (edge_list
);
4121 inserted
= sbitmap_vector_alloc (num_edges
, expr_hash_table
.n_elems
);
4122 sbitmap_vector_zero (inserted
, num_edges
);
4124 for (e
= 0; e
< num_edges
; e
++)
4127 basic_block bb
= INDEX_EDGE_PRED_BB (edge_list
, e
);
4129 for (i
= indx
= 0; i
< set_size
; i
++, indx
+= SBITMAP_ELT_BITS
)
4131 SBITMAP_ELT_TYPE insert
= pre_insert_map
[e
]->elms
[i
];
4133 for (j
= indx
; insert
&& j
< (int) expr_hash_table
.n_elems
; j
++, insert
>>= 1)
4134 if ((insert
& 1) != 0 && index_map
[j
]->reaching_reg
!= NULL_RTX
)
4136 struct expr
*expr
= index_map
[j
];
4139 /* Now look at each deleted occurrence of this expression. */
4140 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4142 if (! occr
->deleted_p
)
4145 /* Insert this expression on this edge if it would
4146 reach the deleted occurrence in BB. */
4147 if (!TEST_BIT (inserted
[e
], j
))
4150 edge eg
= INDEX_EDGE (edge_list
, e
);
4152 /* We can't insert anything on an abnormal and
4153 critical edge, so we insert the insn at the end of
4154 the previous block. There are several alternatives
4155 detailed in Morgans book P277 (sec 10.5) for
4156 handling this situation. This one is easiest for
4159 if (eg
->flags
& EDGE_ABNORMAL
)
4160 insert_insn_end_bb (index_map
[j
], bb
, 0);
4163 insn
= process_insert_insn (index_map
[j
]);
4164 insert_insn_on_edge (insn
, eg
);
4169 fprintf (gcse_file
, "PRE/HOIST: edge (%d,%d), ",
4171 INDEX_EDGE_SUCC_BB (edge_list
, e
)->index
);
4172 fprintf (gcse_file
, "copy expression %d\n",
4173 expr
->bitmap_index
);
4176 update_ld_motion_stores (expr
);
4177 SET_BIT (inserted
[e
], j
);
4179 gcse_create_count
++;
4186 sbitmap_vector_free (inserted
);
4190 /* Copy the result of EXPR->EXPR generated by INSN to EXPR->REACHING_REG.
4191 Given "old_reg <- expr" (INSN), instead of adding after it
4192 reaching_reg <- old_reg
4193 it's better to do the following:
4194 reaching_reg <- expr
4195 old_reg <- reaching_reg
4196 because this way copy propagation can discover additional PRE
4197 opportunities. But if this fails, we try the old way.
4198 When "expr" is a store, i.e.
4199 given "MEM <- old_reg", instead of adding after it
4200 reaching_reg <- old_reg
4201 it's better to add it before as follows:
4202 reaching_reg <- old_reg
4203 MEM <- reaching_reg. */
4206 pre_insert_copy_insn (struct expr
*expr
, rtx insn
)
4208 rtx reg
= expr
->reaching_reg
;
4209 int regno
= REGNO (reg
);
4210 int indx
= expr
->bitmap_index
;
4211 rtx pat
= PATTERN (insn
);
4216 /* This block matches the logic in hash_scan_insn. */
4217 switch (GET_CODE (pat
))
4224 /* Search through the parallel looking for the set whose
4225 source was the expression that we're interested in. */
4227 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4229 rtx x
= XVECEXP (pat
, 0, i
);
4230 if (GET_CODE (x
) == SET
4231 && expr_equiv_p (SET_SRC (x
), expr
->expr
))
4243 if (REG_P (SET_DEST (set
)))
4245 old_reg
= SET_DEST (set
);
4246 /* Check if we can modify the set destination in the original insn. */
4247 if (validate_change (insn
, &SET_DEST (set
), reg
, 0))
4249 new_insn
= gen_move_insn (old_reg
, reg
);
4250 new_insn
= emit_insn_after (new_insn
, insn
);
4252 /* Keep register set table up to date. */
4253 record_one_set (regno
, insn
);
4257 new_insn
= gen_move_insn (reg
, old_reg
);
4258 new_insn
= emit_insn_after (new_insn
, insn
);
4260 /* Keep register set table up to date. */
4261 record_one_set (regno
, new_insn
);
4264 else /* This is possible only in case of a store to memory. */
4266 old_reg
= SET_SRC (set
);
4267 new_insn
= gen_move_insn (reg
, old_reg
);
4269 /* Check if we can modify the set source in the original insn. */
4270 if (validate_change (insn
, &SET_SRC (set
), reg
, 0))
4271 new_insn
= emit_insn_before (new_insn
, insn
);
4273 new_insn
= emit_insn_after (new_insn
, insn
);
4275 /* Keep register set table up to date. */
4276 record_one_set (regno
, new_insn
);
4279 gcse_create_count
++;
4283 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4284 BLOCK_NUM (insn
), INSN_UID (new_insn
), indx
,
4285 INSN_UID (insn
), regno
);
4288 /* Copy available expressions that reach the redundant expression
4289 to `reaching_reg'. */
4292 pre_insert_copies (void)
4294 unsigned int i
, added_copy
;
4299 /* For each available expression in the table, copy the result to
4300 `reaching_reg' if the expression reaches a deleted one.
4302 ??? The current algorithm is rather brute force.
4303 Need to do some profiling. */
4305 for (i
= 0; i
< expr_hash_table
.size
; i
++)
4306 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4308 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4309 we don't want to insert a copy here because the expression may not
4310 really be redundant. So only insert an insn if the expression was
4311 deleted. This test also avoids further processing if the
4312 expression wasn't deleted anywhere. */
4313 if (expr
->reaching_reg
== NULL
)
4316 /* Set when we add a copy for that expression. */
4319 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4321 if (! occr
->deleted_p
)
4324 for (avail
= expr
->avail_occr
; avail
!= NULL
; avail
= avail
->next
)
4326 rtx insn
= avail
->insn
;
4328 /* No need to handle this one if handled already. */
4329 if (avail
->copied_p
)
4332 /* Don't handle this one if it's a redundant one. */
4333 if (TEST_BIT (pre_redundant_insns
, INSN_CUID (insn
)))
4336 /* Or if the expression doesn't reach the deleted one. */
4337 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail
->insn
),
4339 BLOCK_FOR_INSN (occr
->insn
)))
4344 /* Copy the result of avail to reaching_reg. */
4345 pre_insert_copy_insn (expr
, insn
);
4346 avail
->copied_p
= 1;
4351 update_ld_motion_stores (expr
);
4355 /* Emit move from SRC to DEST noting the equivalence with expression computed
4358 gcse_emit_move_after (rtx src
, rtx dest
, rtx insn
)
4361 rtx set
= single_set (insn
), set2
;
4365 /* This should never fail since we're creating a reg->reg copy
4366 we've verified to be valid. */
4368 new = emit_insn_after (gen_move_insn (dest
, src
), insn
);
4370 /* Note the equivalence for local CSE pass. */
4371 set2
= single_set (new);
4372 if (!set2
|| !rtx_equal_p (SET_DEST (set2
), dest
))
4374 if ((note
= find_reg_equal_equiv_note (insn
)))
4375 eqv
= XEXP (note
, 0);
4377 eqv
= SET_SRC (set
);
4379 set_unique_reg_note (new, REG_EQUAL
, copy_insn_1 (eqv
));
4384 /* Delete redundant computations.
4385 Deletion is done by changing the insn to copy the `reaching_reg' of
4386 the expression into the result of the SET. It is left to later passes
4387 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4389 Returns nonzero if a change is made. */
4400 for (i
= 0; i
< expr_hash_table
.size
; i
++)
4401 for (expr
= expr_hash_table
.table
[i
];
4403 expr
= expr
->next_same_hash
)
4405 int indx
= expr
->bitmap_index
;
4407 /* We only need to search antic_occr since we require
4410 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4412 rtx insn
= occr
->insn
;
4414 basic_block bb
= BLOCK_FOR_INSN (insn
);
4416 /* We only delete insns that have a single_set. */
4417 if (TEST_BIT (pre_delete_map
[bb
->index
], indx
)
4418 && (set
= single_set (insn
)) != 0)
4420 /* Create a pseudo-reg to store the result of reaching
4421 expressions into. Get the mode for the new pseudo from
4422 the mode of the original destination pseudo. */
4423 if (expr
->reaching_reg
== NULL
)
4425 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
4427 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
4429 occr
->deleted_p
= 1;
4430 SET_BIT (pre_redundant_insns
, INSN_CUID (insn
));
4437 "PRE: redundant insn %d (expression %d) in ",
4438 INSN_UID (insn
), indx
);
4439 fprintf (gcse_file
, "bb %d, reaching reg is %d\n",
4440 bb
->index
, REGNO (expr
->reaching_reg
));
4449 /* Perform GCSE optimizations using PRE.
4450 This is called by one_pre_gcse_pass after all the dataflow analysis
4453 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4454 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4455 Compiler Design and Implementation.
4457 ??? A new pseudo reg is created to hold the reaching expression. The nice
4458 thing about the classical approach is that it would try to use an existing
4459 reg. If the register can't be adequately optimized [i.e. we introduce
4460 reload problems], one could add a pass here to propagate the new register
4463 ??? We don't handle single sets in PARALLELs because we're [currently] not
4464 able to copy the rest of the parallel when we insert copies to create full
4465 redundancies from partial redundancies. However, there's no reason why we
4466 can't handle PARALLELs in the cases where there are no partial
4473 int did_insert
, changed
;
4474 struct expr
**index_map
;
4477 /* Compute a mapping from expression number (`bitmap_index') to
4478 hash table entry. */
4480 index_map
= xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
4481 for (i
= 0; i
< expr_hash_table
.size
; i
++)
4482 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4483 index_map
[expr
->bitmap_index
] = expr
;
4485 /* Reset bitmap used to track which insns are redundant. */
4486 pre_redundant_insns
= sbitmap_alloc (max_cuid
);
4487 sbitmap_zero (pre_redundant_insns
);
4489 /* Delete the redundant insns first so that
4490 - we know what register to use for the new insns and for the other
4491 ones with reaching expressions
4492 - we know which insns are redundant when we go to create copies */
4494 changed
= pre_delete ();
4496 did_insert
= pre_edge_insert (edge_list
, index_map
);
4498 /* In other places with reaching expressions, copy the expression to the
4499 specially allocated pseudo-reg that reaches the redundant expr. */
4500 pre_insert_copies ();
4503 commit_edge_insertions ();
4508 sbitmap_free (pre_redundant_insns
);
4512 /* Top level routine to perform one PRE GCSE pass.
4514 Return nonzero if a change was made. */
4517 one_pre_gcse_pass (int pass
)
4521 gcse_subst_count
= 0;
4522 gcse_create_count
= 0;
4524 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
4525 add_noreturn_fake_exit_edges ();
4527 compute_ld_motion_mems ();
4529 compute_hash_table (&expr_hash_table
);
4530 trim_ld_motion_mems ();
4532 dump_hash_table (gcse_file
, "Expression", &expr_hash_table
);
4534 if (expr_hash_table
.n_elems
> 0)
4536 alloc_pre_mem (last_basic_block
, expr_hash_table
.n_elems
);
4537 compute_pre_data ();
4538 changed
|= pre_gcse ();
4539 free_edge_list (edge_list
);
4544 remove_fake_exit_edges ();
4545 free_hash_table (&expr_hash_table
);
4549 fprintf (gcse_file
, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4550 current_function_name (), pass
, bytes_used
);
4551 fprintf (gcse_file
, "%d substs, %d insns created\n",
4552 gcse_subst_count
, gcse_create_count
);
4558 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4559 If notes are added to an insn which references a CODE_LABEL, the
4560 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4561 because the following loop optimization pass requires them. */
4563 /* ??? This is very similar to the loop.c add_label_notes function. We
4564 could probably share code here. */
4566 /* ??? If there was a jump optimization pass after gcse and before loop,
4567 then we would not need to do this here, because jump would add the
4568 necessary REG_LABEL notes. */
4571 add_label_notes (rtx x
, rtx insn
)
4573 enum rtx_code code
= GET_CODE (x
);
4577 if (code
== LABEL_REF
&& !LABEL_REF_NONLOCAL_P (x
))
4579 /* This code used to ignore labels that referred to dispatch tables to
4580 avoid flow generating (slightly) worse code.
4582 We no longer ignore such label references (see LABEL_REF handling in
4583 mark_jump_label for additional information). */
4585 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, XEXP (x
, 0),
4587 if (LABEL_P (XEXP (x
, 0)))
4588 LABEL_NUSES (XEXP (x
, 0))++;
4592 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
4595 add_label_notes (XEXP (x
, i
), insn
);
4596 else if (fmt
[i
] == 'E')
4597 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4598 add_label_notes (XVECEXP (x
, i
, j
), insn
);
4602 /* Compute transparent outgoing information for each block.
4604 An expression is transparent to an edge unless it is killed by
4605 the edge itself. This can only happen with abnormal control flow,
4606 when the edge is traversed through a call. This happens with
4607 non-local labels and exceptions.
4609 This would not be necessary if we split the edge. While this is
4610 normally impossible for abnormal critical edges, with some effort
4611 it should be possible with exception handling, since we still have
4612 control over which handler should be invoked. But due to increased
4613 EH table sizes, this may not be worthwhile. */
4616 compute_transpout (void)
4622 sbitmap_vector_ones (transpout
, last_basic_block
);
4626 /* Note that flow inserted a nop a the end of basic blocks that
4627 end in call instructions for reasons other than abnormal
4629 if (! CALL_P (BB_END (bb
)))
4632 for (i
= 0; i
< expr_hash_table
.size
; i
++)
4633 for (expr
= expr_hash_table
.table
[i
]; expr
; expr
= expr
->next_same_hash
)
4634 if (MEM_P (expr
->expr
))
4636 if (GET_CODE (XEXP (expr
->expr
, 0)) == SYMBOL_REF
4637 && CONSTANT_POOL_ADDRESS_P (XEXP (expr
->expr
, 0)))
4640 /* ??? Optimally, we would use interprocedural alias
4641 analysis to determine if this mem is actually killed
4643 RESET_BIT (transpout
[bb
->index
], expr
->bitmap_index
);
4648 /* Code Hoisting variables and subroutines. */
4650 /* Very busy expressions. */
4651 static sbitmap
*hoist_vbein
;
4652 static sbitmap
*hoist_vbeout
;
4654 /* Hoistable expressions. */
4655 static sbitmap
*hoist_exprs
;
4657 /* ??? We could compute post dominators and run this algorithm in
4658 reverse to perform tail merging, doing so would probably be
4659 more effective than the tail merging code in jump.c.
4661 It's unclear if tail merging could be run in parallel with
4662 code hoisting. It would be nice. */
4664 /* Allocate vars used for code hoisting analysis. */
4667 alloc_code_hoist_mem (int n_blocks
, int n_exprs
)
4669 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4670 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4671 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4673 hoist_vbein
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4674 hoist_vbeout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4675 hoist_exprs
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4676 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4679 /* Free vars used for code hoisting analysis. */
4682 free_code_hoist_mem (void)
4684 sbitmap_vector_free (antloc
);
4685 sbitmap_vector_free (transp
);
4686 sbitmap_vector_free (comp
);
4688 sbitmap_vector_free (hoist_vbein
);
4689 sbitmap_vector_free (hoist_vbeout
);
4690 sbitmap_vector_free (hoist_exprs
);
4691 sbitmap_vector_free (transpout
);
4693 free_dominance_info (CDI_DOMINATORS
);
4696 /* Compute the very busy expressions at entry/exit from each block.
4698 An expression is very busy if all paths from a given point
4699 compute the expression. */
4702 compute_code_hoist_vbeinout (void)
4704 int changed
, passes
;
4707 sbitmap_vector_zero (hoist_vbeout
, last_basic_block
);
4708 sbitmap_vector_zero (hoist_vbein
, last_basic_block
);
4717 /* We scan the blocks in the reverse order to speed up
4719 FOR_EACH_BB_REVERSE (bb
)
4721 changed
|= sbitmap_a_or_b_and_c_cg (hoist_vbein
[bb
->index
], antloc
[bb
->index
],
4722 hoist_vbeout
[bb
->index
], transp
[bb
->index
]);
4723 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
4724 sbitmap_intersection_of_succs (hoist_vbeout
[bb
->index
], hoist_vbein
, bb
->index
);
4731 fprintf (gcse_file
, "hoisting vbeinout computation: %d passes\n", passes
);
4734 /* Top level routine to do the dataflow analysis needed by code hoisting. */
4737 compute_code_hoist_data (void)
4739 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
4740 compute_transpout ();
4741 compute_code_hoist_vbeinout ();
4742 calculate_dominance_info (CDI_DOMINATORS
);
4744 fprintf (gcse_file
, "\n");
4747 /* Determine if the expression identified by EXPR_INDEX would
4748 reach BB unimpared if it was placed at the end of EXPR_BB.
4750 It's unclear exactly what Muchnick meant by "unimpared". It seems
4751 to me that the expression must either be computed or transparent in
4752 *every* block in the path(s) from EXPR_BB to BB. Any other definition
4753 would allow the expression to be hoisted out of loops, even if
4754 the expression wasn't a loop invariant.
4756 Contrast this to reachability for PRE where an expression is
4757 considered reachable if *any* path reaches instead of *all*
4761 hoist_expr_reaches_here_p (basic_block expr_bb
, int expr_index
, basic_block bb
, char *visited
)
4765 int visited_allocated_locally
= 0;
4768 if (visited
== NULL
)
4770 visited_allocated_locally
= 1;
4771 visited
= xcalloc (last_basic_block
, 1);
4774 FOR_EACH_EDGE (pred
, ei
, bb
->preds
)
4776 basic_block pred_bb
= pred
->src
;
4778 if (pred
->src
== ENTRY_BLOCK_PTR
)
4780 else if (pred_bb
== expr_bb
)
4782 else if (visited
[pred_bb
->index
])
4785 /* Does this predecessor generate this expression? */
4786 else if (TEST_BIT (comp
[pred_bb
->index
], expr_index
))
4788 else if (! TEST_BIT (transp
[pred_bb
->index
], expr_index
))
4794 visited
[pred_bb
->index
] = 1;
4795 if (! hoist_expr_reaches_here_p (expr_bb
, expr_index
,
4800 if (visited_allocated_locally
)
4803 return (pred
== NULL
);
4806 /* Actually perform code hoisting. */
4811 basic_block bb
, dominated
;
4813 unsigned int domby_len
;
4815 struct expr
**index_map
;
4818 sbitmap_vector_zero (hoist_exprs
, last_basic_block
);
4820 /* Compute a mapping from expression number (`bitmap_index') to
4821 hash table entry. */
4823 index_map
= xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
4824 for (i
= 0; i
< expr_hash_table
.size
; i
++)
4825 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4826 index_map
[expr
->bitmap_index
] = expr
;
4828 /* Walk over each basic block looking for potentially hoistable
4829 expressions, nothing gets hoisted from the entry block. */
4833 int insn_inserted_p
;
4835 domby_len
= get_dominated_by (CDI_DOMINATORS
, bb
, &domby
);
4836 /* Examine each expression that is very busy at the exit of this
4837 block. These are the potentially hoistable expressions. */
4838 for (i
= 0; i
< hoist_vbeout
[bb
->index
]->n_bits
; i
++)
4842 if (TEST_BIT (hoist_vbeout
[bb
->index
], i
)
4843 && TEST_BIT (transpout
[bb
->index
], i
))
4845 /* We've found a potentially hoistable expression, now
4846 we look at every block BB dominates to see if it
4847 computes the expression. */
4848 for (j
= 0; j
< domby_len
; j
++)
4850 dominated
= domby
[j
];
4851 /* Ignore self dominance. */
4852 if (bb
== dominated
)
4854 /* We've found a dominated block, now see if it computes
4855 the busy expression and whether or not moving that
4856 expression to the "beginning" of that block is safe. */
4857 if (!TEST_BIT (antloc
[dominated
->index
], i
))
4860 /* Note if the expression would reach the dominated block
4861 unimpared if it was placed at the end of BB.
4863 Keep track of how many times this expression is hoistable
4864 from a dominated block into BB. */
4865 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
4869 /* If we found more than one hoistable occurrence of this
4870 expression, then note it in the bitmap of expressions to
4871 hoist. It makes no sense to hoist things which are computed
4872 in only one BB, and doing so tends to pessimize register
4873 allocation. One could increase this value to try harder
4874 to avoid any possible code expansion due to register
4875 allocation issues; however experiments have shown that
4876 the vast majority of hoistable expressions are only movable
4877 from two successors, so raising this threshold is likely
4878 to nullify any benefit we get from code hoisting. */
4881 SET_BIT (hoist_exprs
[bb
->index
], i
);
4886 /* If we found nothing to hoist, then quit now. */
4893 /* Loop over all the hoistable expressions. */
4894 for (i
= 0; i
< hoist_exprs
[bb
->index
]->n_bits
; i
++)
4896 /* We want to insert the expression into BB only once, so
4897 note when we've inserted it. */
4898 insn_inserted_p
= 0;
4900 /* These tests should be the same as the tests above. */
4901 if (TEST_BIT (hoist_exprs
[bb
->index
], i
))
4903 /* We've found a potentially hoistable expression, now
4904 we look at every block BB dominates to see if it
4905 computes the expression. */
4906 for (j
= 0; j
< domby_len
; j
++)
4908 dominated
= domby
[j
];
4909 /* Ignore self dominance. */
4910 if (bb
== dominated
)
4913 /* We've found a dominated block, now see if it computes
4914 the busy expression and whether or not moving that
4915 expression to the "beginning" of that block is safe. */
4916 if (!TEST_BIT (antloc
[dominated
->index
], i
))
4919 /* The expression is computed in the dominated block and
4920 it would be safe to compute it at the start of the
4921 dominated block. Now we have to determine if the
4922 expression would reach the dominated block if it was
4923 placed at the end of BB. */
4924 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
4926 struct expr
*expr
= index_map
[i
];
4927 struct occr
*occr
= expr
->antic_occr
;
4931 /* Find the right occurrence of this expression. */
4932 while (BLOCK_FOR_INSN (occr
->insn
) != dominated
&& occr
)
4937 set
= single_set (insn
);
4940 /* Create a pseudo-reg to store the result of reaching
4941 expressions into. Get the mode for the new pseudo
4942 from the mode of the original destination pseudo. */
4943 if (expr
->reaching_reg
== NULL
)
4945 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
4947 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
4949 occr
->deleted_p
= 1;
4950 if (!insn_inserted_p
)
4952 insert_insn_end_bb (index_map
[i
], bb
, 0);
4953 insn_inserted_p
= 1;
4965 /* Top level routine to perform one code hoisting (aka unification) pass
4967 Return nonzero if a change was made. */
4970 one_code_hoisting_pass (void)
4974 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
4975 compute_hash_table (&expr_hash_table
);
4977 dump_hash_table (gcse_file
, "Code Hosting Expressions", &expr_hash_table
);
4979 if (expr_hash_table
.n_elems
> 0)
4981 alloc_code_hoist_mem (last_basic_block
, expr_hash_table
.n_elems
);
4982 compute_code_hoist_data ();
4984 free_code_hoist_mem ();
4987 free_hash_table (&expr_hash_table
);
4992 /* Here we provide the things required to do store motion towards
4993 the exit. In order for this to be effective, gcse also needed to
4994 be taught how to move a load when it is kill only by a store to itself.
4999 void foo(float scale)
5001 for (i=0; i<10; i++)
5005 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
5006 the load out since its live around the loop, and stored at the bottom
5009 The 'Load Motion' referred to and implemented in this file is
5010 an enhancement to gcse which when using edge based lcm, recognizes
5011 this situation and allows gcse to move the load out of the loop.
5013 Once gcse has hoisted the load, store motion can then push this
5014 load towards the exit, and we end up with no loads or stores of 'i'
5017 /* This will search the ldst list for a matching expression. If it
5018 doesn't find one, we create one and initialize it. */
5020 static struct ls_expr
*
5023 int do_not_record_p
= 0;
5024 struct ls_expr
* ptr
;
5027 hash
= hash_rtx (x
, GET_MODE (x
), &do_not_record_p
,
5028 NULL
, /*have_reg_qty=*/false);
5030 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
5031 if (ptr
->hash_index
== hash
&& expr_equiv_p (ptr
->pattern
, x
))
5034 ptr
= xmalloc (sizeof (struct ls_expr
));
5036 ptr
->next
= pre_ldst_mems
;
5039 ptr
->pattern_regs
= NULL_RTX
;
5040 ptr
->loads
= NULL_RTX
;
5041 ptr
->stores
= NULL_RTX
;
5042 ptr
->reaching_reg
= NULL_RTX
;
5045 ptr
->hash_index
= hash
;
5046 pre_ldst_mems
= ptr
;
5051 /* Free up an individual ldst entry. */
5054 free_ldst_entry (struct ls_expr
* ptr
)
5056 free_INSN_LIST_list (& ptr
->loads
);
5057 free_INSN_LIST_list (& ptr
->stores
);
5062 /* Free up all memory associated with the ldst list. */
5065 free_ldst_mems (void)
5067 while (pre_ldst_mems
)
5069 struct ls_expr
* tmp
= pre_ldst_mems
;
5071 pre_ldst_mems
= pre_ldst_mems
->next
;
5073 free_ldst_entry (tmp
);
5076 pre_ldst_mems
= NULL
;
5079 /* Dump debugging info about the ldst list. */
5082 print_ldst_list (FILE * file
)
5084 struct ls_expr
* ptr
;
5086 fprintf (file
, "LDST list: \n");
5088 for (ptr
= first_ls_expr(); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
5090 fprintf (file
, " Pattern (%3d): ", ptr
->index
);
5092 print_rtl (file
, ptr
->pattern
);
5094 fprintf (file
, "\n Loads : ");
5097 print_rtl (file
, ptr
->loads
);
5099 fprintf (file
, "(nil)");
5101 fprintf (file
, "\n Stores : ");
5104 print_rtl (file
, ptr
->stores
);
5106 fprintf (file
, "(nil)");
5108 fprintf (file
, "\n\n");
5111 fprintf (file
, "\n");
5114 /* Returns 1 if X is in the list of ldst only expressions. */
5116 static struct ls_expr
*
5117 find_rtx_in_ldst (rtx x
)
5119 struct ls_expr
* ptr
;
5121 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
5122 if (expr_equiv_p (ptr
->pattern
, x
) && ! ptr
->invalid
)
5128 /* Assign each element of the list of mems a monotonically increasing value. */
5131 enumerate_ldsts (void)
5133 struct ls_expr
* ptr
;
5136 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
5142 /* Return first item in the list. */
5144 static inline struct ls_expr
*
5145 first_ls_expr (void)
5147 return pre_ldst_mems
;
5150 /* Return the next item in the list after the specified one. */
5152 static inline struct ls_expr
*
5153 next_ls_expr (struct ls_expr
* ptr
)
5158 /* Load Motion for loads which only kill themselves. */
5160 /* Return true if x is a simple MEM operation, with no registers or
5161 side effects. These are the types of loads we consider for the
5162 ld_motion list, otherwise we let the usual aliasing take care of it. */
5170 if (MEM_VOLATILE_P (x
))
5173 if (GET_MODE (x
) == BLKmode
)
5176 /* If we are handling exceptions, we must be careful with memory references
5177 that may trap. If we are not, the behavior is undefined, so we may just
5179 if (flag_non_call_exceptions
&& may_trap_p (x
))
5182 if (side_effects_p (x
))
5185 /* Do not consider function arguments passed on stack. */
5186 if (reg_mentioned_p (stack_pointer_rtx
, x
))
5189 if (flag_float_store
&& FLOAT_MODE_P (GET_MODE (x
)))
5195 /* Make sure there isn't a buried reference in this pattern anywhere.
5196 If there is, invalidate the entry for it since we're not capable
5197 of fixing it up just yet.. We have to be sure we know about ALL
5198 loads since the aliasing code will allow all entries in the
5199 ld_motion list to not-alias itself. If we miss a load, we will get
5200 the wrong value since gcse might common it and we won't know to
5204 invalidate_any_buried_refs (rtx x
)
5208 struct ls_expr
* ptr
;
5210 /* Invalidate it in the list. */
5211 if (MEM_P (x
) && simple_mem (x
))
5213 ptr
= ldst_entry (x
);
5217 /* Recursively process the insn. */
5218 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5220 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
5223 invalidate_any_buried_refs (XEXP (x
, i
));
5224 else if (fmt
[i
] == 'E')
5225 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5226 invalidate_any_buried_refs (XVECEXP (x
, i
, j
));
5230 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
5231 being defined as MEM loads and stores to symbols, with no side effects
5232 and no registers in the expression. For a MEM destination, we also
5233 check that the insn is still valid if we replace the destination with a
5234 REG, as is done in update_ld_motion_stores. If there are any uses/defs
5235 which don't match this criteria, they are invalidated and trimmed out
5239 compute_ld_motion_mems (void)
5241 struct ls_expr
* ptr
;
5245 pre_ldst_mems
= NULL
;
5249 FOR_BB_INSNS (bb
, insn
)
5253 if (GET_CODE (PATTERN (insn
)) == SET
)
5255 rtx src
= SET_SRC (PATTERN (insn
));
5256 rtx dest
= SET_DEST (PATTERN (insn
));
5258 /* Check for a simple LOAD... */
5259 if (MEM_P (src
) && simple_mem (src
))
5261 ptr
= ldst_entry (src
);
5263 ptr
->loads
= alloc_INSN_LIST (insn
, ptr
->loads
);
5269 /* Make sure there isn't a buried load somewhere. */
5270 invalidate_any_buried_refs (src
);
5273 /* Check for stores. Don't worry about aliased ones, they
5274 will block any movement we might do later. We only care
5275 about this exact pattern since those are the only
5276 circumstance that we will ignore the aliasing info. */
5277 if (MEM_P (dest
) && simple_mem (dest
))
5279 ptr
= ldst_entry (dest
);
5282 && GET_CODE (src
) != ASM_OPERANDS
5283 /* Check for REG manually since want_to_gcse_p
5284 returns 0 for all REGs. */
5285 && can_assign_to_reg_p (src
))
5286 ptr
->stores
= alloc_INSN_LIST (insn
, ptr
->stores
);
5292 invalidate_any_buried_refs (PATTERN (insn
));
5298 /* Remove any references that have been either invalidated or are not in the
5299 expression list for pre gcse. */
5302 trim_ld_motion_mems (void)
5304 struct ls_expr
* * last
= & pre_ldst_mems
;
5305 struct ls_expr
* ptr
= pre_ldst_mems
;
5311 /* Delete if entry has been made invalid. */
5314 /* Delete if we cannot find this mem in the expression list. */
5315 unsigned int hash
= ptr
->hash_index
% expr_hash_table
.size
;
5317 for (expr
= expr_hash_table
.table
[hash
];
5319 expr
= expr
->next_same_hash
)
5320 if (expr_equiv_p (expr
->expr
, ptr
->pattern
))
5324 expr
= (struct expr
*) 0;
5328 /* Set the expression field if we are keeping it. */
5336 free_ldst_entry (ptr
);
5341 /* Show the world what we've found. */
5342 if (gcse_file
&& pre_ldst_mems
!= NULL
)
5343 print_ldst_list (gcse_file
);
5346 /* This routine will take an expression which we are replacing with
5347 a reaching register, and update any stores that are needed if
5348 that expression is in the ld_motion list. Stores are updated by
5349 copying their SRC to the reaching register, and then storing
5350 the reaching register into the store location. These keeps the
5351 correct value in the reaching register for the loads. */
5354 update_ld_motion_stores (struct expr
* expr
)
5356 struct ls_expr
* mem_ptr
;
5358 if ((mem_ptr
= find_rtx_in_ldst (expr
->expr
)))
5360 /* We can try to find just the REACHED stores, but is shouldn't
5361 matter to set the reaching reg everywhere... some might be
5362 dead and should be eliminated later. */
5364 /* We replace (set mem expr) with (set reg expr) (set mem reg)
5365 where reg is the reaching reg used in the load. We checked in
5366 compute_ld_motion_mems that we can replace (set mem expr) with
5367 (set reg expr) in that insn. */
5368 rtx list
= mem_ptr
->stores
;
5370 for ( ; list
!= NULL_RTX
; list
= XEXP (list
, 1))
5372 rtx insn
= XEXP (list
, 0);
5373 rtx pat
= PATTERN (insn
);
5374 rtx src
= SET_SRC (pat
);
5375 rtx reg
= expr
->reaching_reg
;
5378 /* If we've already copied it, continue. */
5379 if (expr
->reaching_reg
== src
)
5384 fprintf (gcse_file
, "PRE: store updated with reaching reg ");
5385 print_rtl (gcse_file
, expr
->reaching_reg
);
5386 fprintf (gcse_file
, ":\n ");
5387 print_inline_rtx (gcse_file
, insn
, 8);
5388 fprintf (gcse_file
, "\n");
5391 copy
= gen_move_insn ( reg
, copy_rtx (SET_SRC (pat
)));
5392 new = emit_insn_before (copy
, insn
);
5393 record_one_set (REGNO (reg
), new);
5394 SET_SRC (pat
) = reg
;
5396 /* un-recognize this pattern since it's probably different now. */
5397 INSN_CODE (insn
) = -1;
5398 gcse_create_count
++;
5403 /* Store motion code. */
5405 #define ANTIC_STORE_LIST(x) ((x)->loads)
5406 #define AVAIL_STORE_LIST(x) ((x)->stores)
5407 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
5409 /* This is used to communicate the target bitvector we want to use in the
5410 reg_set_info routine when called via the note_stores mechanism. */
5411 static int * regvec
;
5413 /* And current insn, for the same routine. */
5414 static rtx compute_store_table_current_insn
;
5416 /* Used in computing the reverse edge graph bit vectors. */
5417 static sbitmap
* st_antloc
;
5419 /* Global holding the number of store expressions we are dealing with. */
5420 static int num_stores
;
5422 /* Checks to set if we need to mark a register set. Called from
5426 reg_set_info (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
5429 sbitmap bb_reg
= data
;
5431 if (GET_CODE (dest
) == SUBREG
)
5432 dest
= SUBREG_REG (dest
);
5436 regvec
[REGNO (dest
)] = INSN_UID (compute_store_table_current_insn
);
5438 SET_BIT (bb_reg
, REGNO (dest
));
5442 /* Clear any mark that says that this insn sets dest. Called from
5446 reg_clear_last_set (rtx dest
, rtx setter ATTRIBUTE_UNUSED
,
5449 int *dead_vec
= data
;
5451 if (GET_CODE (dest
) == SUBREG
)
5452 dest
= SUBREG_REG (dest
);
5455 dead_vec
[REGNO (dest
)] == INSN_UID (compute_store_table_current_insn
))
5456 dead_vec
[REGNO (dest
)] = 0;
5459 /* Return zero if some of the registers in list X are killed
5460 due to set of registers in bitmap REGS_SET. */
5463 store_ops_ok (rtx x
, int *regs_set
)
5467 for (; x
; x
= XEXP (x
, 1))
5470 if (regs_set
[REGNO(reg
)])
5477 /* Returns a list of registers mentioned in X. */
5479 extract_mentioned_regs (rtx x
)
5481 return extract_mentioned_regs_helper (x
, NULL_RTX
);
5484 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
5487 extract_mentioned_regs_helper (rtx x
, rtx accum
)
5493 /* Repeat is used to turn tail-recursion into iteration. */
5499 code
= GET_CODE (x
);
5503 return alloc_EXPR_LIST (0, x
, accum
);
5513 /* We do not run this function with arguments having side effects. */
5532 i
= GET_RTX_LENGTH (code
) - 1;
5533 fmt
= GET_RTX_FORMAT (code
);
5539 rtx tem
= XEXP (x
, i
);
5541 /* If we are about to do the last recursive call
5542 needed at this level, change it into iteration. */
5549 accum
= extract_mentioned_regs_helper (tem
, accum
);
5551 else if (fmt
[i
] == 'E')
5555 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
5556 accum
= extract_mentioned_regs_helper (XVECEXP (x
, i
, j
), accum
);
5563 /* Determine whether INSN is MEM store pattern that we will consider moving.
5564 REGS_SET_BEFORE is bitmap of registers set before (and including) the
5565 current insn, REGS_SET_AFTER is bitmap of registers set after (and
5566 including) the insn in this basic block. We must be passing through BB from
5567 head to end, as we are using this fact to speed things up.
5569 The results are stored this way:
5571 -- the first anticipatable expression is added into ANTIC_STORE_LIST
5572 -- if the processed expression is not anticipatable, NULL_RTX is added
5573 there instead, so that we can use it as indicator that no further
5574 expression of this type may be anticipatable
5575 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
5576 consequently, all of them but this head are dead and may be deleted.
5577 -- if the expression is not available, the insn due to that it fails to be
5578 available is stored in reaching_reg.
5580 The things are complicated a bit by fact that there already may be stores
5581 to the same MEM from other blocks; also caller must take care of the
5582 necessary cleanup of the temporary markers after end of the basic block.
5586 find_moveable_store (rtx insn
, int *regs_set_before
, int *regs_set_after
)
5588 struct ls_expr
* ptr
;
5590 int check_anticipatable
, check_available
;
5591 basic_block bb
= BLOCK_FOR_INSN (insn
);
5593 set
= single_set (insn
);
5597 dest
= SET_DEST (set
);
5599 if (! MEM_P (dest
) || MEM_VOLATILE_P (dest
)
5600 || GET_MODE (dest
) == BLKmode
)
5603 if (side_effects_p (dest
))
5606 /* If we are handling exceptions, we must be careful with memory references
5607 that may trap. If we are not, the behavior is undefined, so we may just
5609 if (flag_non_call_exceptions
&& may_trap_p (dest
))
5612 /* Even if the destination cannot trap, the source may. In this case we'd
5613 need to handle updating the REG_EH_REGION note. */
5614 if (find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
))
5617 ptr
= ldst_entry (dest
);
5618 if (!ptr
->pattern_regs
)
5619 ptr
->pattern_regs
= extract_mentioned_regs (dest
);
5621 /* Do not check for anticipatability if we either found one anticipatable
5622 store already, or tested for one and found out that it was killed. */
5623 check_anticipatable
= 0;
5624 if (!ANTIC_STORE_LIST (ptr
))
5625 check_anticipatable
= 1;
5628 tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0);
5630 && BLOCK_FOR_INSN (tmp
) != bb
)
5631 check_anticipatable
= 1;
5633 if (check_anticipatable
)
5635 if (store_killed_before (dest
, ptr
->pattern_regs
, insn
, bb
, regs_set_before
))
5639 ANTIC_STORE_LIST (ptr
) = alloc_INSN_LIST (tmp
,
5640 ANTIC_STORE_LIST (ptr
));
5643 /* It is not necessary to check whether store is available if we did
5644 it successfully before; if we failed before, do not bother to check
5645 until we reach the insn that caused us to fail. */
5646 check_available
= 0;
5647 if (!AVAIL_STORE_LIST (ptr
))
5648 check_available
= 1;
5651 tmp
= XEXP (AVAIL_STORE_LIST (ptr
), 0);
5652 if (BLOCK_FOR_INSN (tmp
) != bb
)
5653 check_available
= 1;
5655 if (check_available
)
5657 /* Check that we have already reached the insn at that the check
5658 failed last time. */
5659 if (LAST_AVAIL_CHECK_FAILURE (ptr
))
5661 for (tmp
= BB_END (bb
);
5662 tmp
!= insn
&& tmp
!= LAST_AVAIL_CHECK_FAILURE (ptr
);
5663 tmp
= PREV_INSN (tmp
))
5666 check_available
= 0;
5669 check_available
= store_killed_after (dest
, ptr
->pattern_regs
, insn
,
5671 &LAST_AVAIL_CHECK_FAILURE (ptr
));
5673 if (!check_available
)
5674 AVAIL_STORE_LIST (ptr
) = alloc_INSN_LIST (insn
, AVAIL_STORE_LIST (ptr
));
5677 /* Find available and anticipatable stores. */
5680 compute_store_table (void)
5686 int *last_set_in
, *already_set
;
5687 struct ls_expr
* ptr
, **prev_next_ptr_ptr
;
5689 max_gcse_regno
= max_reg_num ();
5691 reg_set_in_block
= sbitmap_vector_alloc (last_basic_block
,
5693 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
5695 last_set_in
= xcalloc (max_gcse_regno
, sizeof (int));
5696 already_set
= xmalloc (sizeof (int) * max_gcse_regno
);
5698 /* Find all the stores we care about. */
5701 /* First compute the registers set in this block. */
5702 regvec
= last_set_in
;
5704 FOR_BB_INSNS (bb
, insn
)
5706 if (! INSN_P (insn
))
5711 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5712 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
5714 last_set_in
[regno
] = INSN_UID (insn
);
5715 SET_BIT (reg_set_in_block
[bb
->index
], regno
);
5719 pat
= PATTERN (insn
);
5720 compute_store_table_current_insn
= insn
;
5721 note_stores (pat
, reg_set_info
, reg_set_in_block
[bb
->index
]);
5724 /* Now find the stores. */
5725 memset (already_set
, 0, sizeof (int) * max_gcse_regno
);
5726 regvec
= already_set
;
5727 FOR_BB_INSNS (bb
, insn
)
5729 if (! INSN_P (insn
))
5734 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5735 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
5736 already_set
[regno
] = 1;
5739 pat
= PATTERN (insn
);
5740 note_stores (pat
, reg_set_info
, NULL
);
5742 /* Now that we've marked regs, look for stores. */
5743 find_moveable_store (insn
, already_set
, last_set_in
);
5745 /* Unmark regs that are no longer set. */
5746 compute_store_table_current_insn
= insn
;
5747 note_stores (pat
, reg_clear_last_set
, last_set_in
);
5750 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
5751 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
)
5752 && last_set_in
[regno
] == INSN_UID (insn
))
5753 last_set_in
[regno
] = 0;
5757 #ifdef ENABLE_CHECKING
5758 /* last_set_in should now be all-zero. */
5759 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
5760 gcc_assert (!last_set_in
[regno
]);
5763 /* Clear temporary marks. */
5764 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
5766 LAST_AVAIL_CHECK_FAILURE(ptr
) = NULL_RTX
;
5767 if (ANTIC_STORE_LIST (ptr
)
5768 && (tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0)) == NULL_RTX
)
5769 ANTIC_STORE_LIST (ptr
) = XEXP (ANTIC_STORE_LIST (ptr
), 1);
5773 /* Remove the stores that are not available anywhere, as there will
5774 be no opportunity to optimize them. */
5775 for (ptr
= pre_ldst_mems
, prev_next_ptr_ptr
= &pre_ldst_mems
;
5777 ptr
= *prev_next_ptr_ptr
)
5779 if (!AVAIL_STORE_LIST (ptr
))
5781 *prev_next_ptr_ptr
= ptr
->next
;
5782 free_ldst_entry (ptr
);
5785 prev_next_ptr_ptr
= &ptr
->next
;
5788 ret
= enumerate_ldsts ();
5792 fprintf (gcse_file
, "ST_avail and ST_antic (shown under loads..)\n");
5793 print_ldst_list (gcse_file
);
5801 /* Check to see if the load X is aliased with STORE_PATTERN.
5802 AFTER is true if we are checking the case when STORE_PATTERN occurs
5806 load_kills_store (rtx x
, rtx store_pattern
, int after
)
5809 return anti_dependence (x
, store_pattern
);
5811 return true_dependence (store_pattern
, GET_MODE (store_pattern
), x
,
5815 /* Go through the entire insn X, looking for any loads which might alias
5816 STORE_PATTERN. Return true if found.
5817 AFTER is true if we are checking the case when STORE_PATTERN occurs
5818 after the insn X. */
5821 find_loads (rtx x
, rtx store_pattern
, int after
)
5830 if (GET_CODE (x
) == SET
)
5835 if (load_kills_store (x
, store_pattern
, after
))
5839 /* Recursively process the insn. */
5840 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
5842 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0 && !ret
; i
--)
5845 ret
|= find_loads (XEXP (x
, i
), store_pattern
, after
);
5846 else if (fmt
[i
] == 'E')
5847 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5848 ret
|= find_loads (XVECEXP (x
, i
, j
), store_pattern
, after
);
5853 /* Check if INSN kills the store pattern X (is aliased with it).
5854 AFTER is true if we are checking the case when store X occurs
5855 after the insn. Return true if it does. */
5858 store_killed_in_insn (rtx x
, rtx x_regs
, rtx insn
, int after
)
5860 rtx reg
, base
, note
;
5867 /* A normal or pure call might read from pattern,
5868 but a const call will not. */
5869 if (! CONST_OR_PURE_CALL_P (insn
) || pure_call_p (insn
))
5872 /* But even a const call reads its parameters. Check whether the
5873 base of some of registers used in mem is stack pointer. */
5874 for (reg
= x_regs
; reg
; reg
= XEXP (reg
, 1))
5876 base
= find_base_term (XEXP (reg
, 0));
5878 || (GET_CODE (base
) == ADDRESS
5879 && GET_MODE (base
) == Pmode
5880 && XEXP (base
, 0) == stack_pointer_rtx
))
5887 if (GET_CODE (PATTERN (insn
)) == SET
)
5889 rtx pat
= PATTERN (insn
);
5890 rtx dest
= SET_DEST (pat
);
5892 if (GET_CODE (dest
) == ZERO_EXTRACT
)
5893 dest
= XEXP (dest
, 0);
5895 /* Check for memory stores to aliased objects. */
5897 && !expr_equiv_p (dest
, x
))
5901 if (output_dependence (dest
, x
))
5906 if (output_dependence (x
, dest
))
5910 if (find_loads (SET_SRC (pat
), x
, after
))
5913 else if (find_loads (PATTERN (insn
), x
, after
))
5916 /* If this insn has a REG_EQUAL or REG_EQUIV note referencing a memory
5917 location aliased with X, then this insn kills X. */
5918 note
= find_reg_equal_equiv_note (insn
);
5921 note
= XEXP (note
, 0);
5923 /* However, if the note represents a must alias rather than a may
5924 alias relationship, then it does not kill X. */
5925 if (expr_equiv_p (note
, x
))
5928 /* See if there are any aliased loads in the note. */
5929 return find_loads (note
, x
, after
);
5932 /* Returns true if the expression X is loaded or clobbered on or after INSN
5933 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
5934 or after the insn. X_REGS is list of registers mentioned in X. If the store
5935 is killed, return the last insn in that it occurs in FAIL_INSN. */
5938 store_killed_after (rtx x
, rtx x_regs
, rtx insn
, basic_block bb
,
5939 int *regs_set_after
, rtx
*fail_insn
)
5941 rtx last
= BB_END (bb
), act
;
5943 if (!store_ops_ok (x_regs
, regs_set_after
))
5945 /* We do not know where it will happen. */
5947 *fail_insn
= NULL_RTX
;
5951 /* Scan from the end, so that fail_insn is determined correctly. */
5952 for (act
= last
; act
!= PREV_INSN (insn
); act
= PREV_INSN (act
))
5953 if (store_killed_in_insn (x
, x_regs
, act
, false))
5963 /* Returns true if the expression X is loaded or clobbered on or before INSN
5964 within basic block BB. X_REGS is list of registers mentioned in X.
5965 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
5967 store_killed_before (rtx x
, rtx x_regs
, rtx insn
, basic_block bb
,
5968 int *regs_set_before
)
5970 rtx first
= BB_HEAD (bb
);
5972 if (!store_ops_ok (x_regs
, regs_set_before
))
5975 for ( ; insn
!= PREV_INSN (first
); insn
= PREV_INSN (insn
))
5976 if (store_killed_in_insn (x
, x_regs
, insn
, true))
5982 /* Fill in available, anticipatable, transparent and kill vectors in
5983 STORE_DATA, based on lists of available and anticipatable stores. */
5985 build_store_vectors (void)
5988 int *regs_set_in_block
;
5990 struct ls_expr
* ptr
;
5993 /* Build the gen_vector. This is any store in the table which is not killed
5994 by aliasing later in its block. */
5995 ae_gen
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
5996 sbitmap_vector_zero (ae_gen
, last_basic_block
);
5998 st_antloc
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
5999 sbitmap_vector_zero (st_antloc
, last_basic_block
);
6001 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6003 for (st
= AVAIL_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
6005 insn
= XEXP (st
, 0);
6006 bb
= BLOCK_FOR_INSN (insn
);
6008 /* If we've already seen an available expression in this block,
6009 we can delete this one (It occurs earlier in the block). We'll
6010 copy the SRC expression to an unused register in case there
6011 are any side effects. */
6012 if (TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
6014 rtx r
= gen_reg_rtx (GET_MODE (ptr
->pattern
));
6016 fprintf (gcse_file
, "Removing redundant store:\n");
6017 replace_store_insn (r
, XEXP (st
, 0), bb
, ptr
);
6020 SET_BIT (ae_gen
[bb
->index
], ptr
->index
);
6023 for (st
= ANTIC_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
6025 insn
= XEXP (st
, 0);
6026 bb
= BLOCK_FOR_INSN (insn
);
6027 SET_BIT (st_antloc
[bb
->index
], ptr
->index
);
6031 ae_kill
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
6032 sbitmap_vector_zero (ae_kill
, last_basic_block
);
6034 transp
= sbitmap_vector_alloc (last_basic_block
, num_stores
);
6035 sbitmap_vector_zero (transp
, last_basic_block
);
6036 regs_set_in_block
= xmalloc (sizeof (int) * max_gcse_regno
);
6040 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
6041 regs_set_in_block
[regno
] = TEST_BIT (reg_set_in_block
[bb
->index
], regno
);
6043 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6045 if (store_killed_after (ptr
->pattern
, ptr
->pattern_regs
, BB_HEAD (bb
),
6046 bb
, regs_set_in_block
, NULL
))
6048 /* It should not be necessary to consider the expression
6049 killed if it is both anticipatable and available. */
6050 if (!TEST_BIT (st_antloc
[bb
->index
], ptr
->index
)
6051 || !TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
6052 SET_BIT (ae_kill
[bb
->index
], ptr
->index
);
6055 SET_BIT (transp
[bb
->index
], ptr
->index
);
6059 free (regs_set_in_block
);
6063 dump_sbitmap_vector (gcse_file
, "st_antloc", "", st_antloc
, last_basic_block
);
6064 dump_sbitmap_vector (gcse_file
, "st_kill", "", ae_kill
, last_basic_block
);
6065 dump_sbitmap_vector (gcse_file
, "Transpt", "", transp
, last_basic_block
);
6066 dump_sbitmap_vector (gcse_file
, "st_avloc", "", ae_gen
, last_basic_block
);
6070 /* Insert an instruction at the beginning of a basic block, and update
6071 the BB_HEAD if needed. */
6074 insert_insn_start_bb (rtx insn
, basic_block bb
)
6076 /* Insert at start of successor block. */
6077 rtx prev
= PREV_INSN (BB_HEAD (bb
));
6078 rtx before
= BB_HEAD (bb
);
6081 if (! LABEL_P (before
)
6082 && (! NOTE_P (before
)
6083 || NOTE_LINE_NUMBER (before
) != NOTE_INSN_BASIC_BLOCK
))
6086 if (prev
== BB_END (bb
))
6088 before
= NEXT_INSN (before
);
6091 insn
= emit_insn_after_noloc (insn
, prev
);
6095 fprintf (gcse_file
, "STORE_MOTION insert store at start of BB %d:\n",
6097 print_inline_rtx (gcse_file
, insn
, 6);
6098 fprintf (gcse_file
, "\n");
6102 /* This routine will insert a store on an edge. EXPR is the ldst entry for
6103 the memory reference, and E is the edge to insert it on. Returns nonzero
6104 if an edge insertion was performed. */
6107 insert_store (struct ls_expr
* expr
, edge e
)
6114 /* We did all the deleted before this insert, so if we didn't delete a
6115 store, then we haven't set the reaching reg yet either. */
6116 if (expr
->reaching_reg
== NULL_RTX
)
6119 if (e
->flags
& EDGE_FAKE
)
6122 reg
= expr
->reaching_reg
;
6123 insn
= gen_move_insn (copy_rtx (expr
->pattern
), reg
);
6125 /* If we are inserting this expression on ALL predecessor edges of a BB,
6126 insert it at the start of the BB, and reset the insert bits on the other
6127 edges so we don't try to insert it on the other edges. */
6129 FOR_EACH_EDGE (tmp
, ei
, e
->dest
->preds
)
6130 if (!(tmp
->flags
& EDGE_FAKE
))
6132 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
6134 gcc_assert (index
!= EDGE_INDEX_NO_EDGE
);
6135 if (! TEST_BIT (pre_insert_map
[index
], expr
->index
))
6139 /* If tmp is NULL, we found an insertion on every edge, blank the
6140 insertion vector for these edges, and insert at the start of the BB. */
6141 if (!tmp
&& bb
!= EXIT_BLOCK_PTR
)
6143 FOR_EACH_EDGE (tmp
, ei
, e
->dest
->preds
)
6145 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
6146 RESET_BIT (pre_insert_map
[index
], expr
->index
);
6148 insert_insn_start_bb (insn
, bb
);
6152 /* We can't put stores in the front of blocks pointed to by abnormal
6153 edges since that may put a store where one didn't used to be. */
6154 gcc_assert (!(e
->flags
& EDGE_ABNORMAL
));
6156 insert_insn_on_edge (insn
, e
);
6160 fprintf (gcse_file
, "STORE_MOTION insert insn on edge (%d, %d):\n",
6161 e
->src
->index
, e
->dest
->index
);
6162 print_inline_rtx (gcse_file
, insn
, 6);
6163 fprintf (gcse_file
, "\n");
6169 /* Remove any REG_EQUAL or REG_EQUIV notes containing a reference to the
6170 memory location in SMEXPR set in basic block BB.
6172 This could be rather expensive. */
6175 remove_reachable_equiv_notes (basic_block bb
, struct ls_expr
*smexpr
)
6177 edge_iterator
*stack
, ei
;
6180 sbitmap visited
= sbitmap_alloc (last_basic_block
);
6181 rtx last
, insn
, note
;
6182 rtx mem
= smexpr
->pattern
;
6184 stack
= xmalloc (sizeof (edge_iterator
) * n_basic_blocks
);
6186 ei
= ei_start (bb
->succs
);
6188 sbitmap_zero (visited
);
6190 act
= (EDGE_COUNT (ei_container (ei
)) > 0 ? EDGE_I (ei_container (ei
), 0) : NULL
);
6198 sbitmap_free (visited
);
6201 act
= ei_edge (stack
[--sp
]);
6205 if (bb
== EXIT_BLOCK_PTR
6206 || TEST_BIT (visited
, bb
->index
))
6210 act
= (! ei_end_p (ei
)) ? ei_edge (ei
) : NULL
;
6213 SET_BIT (visited
, bb
->index
);
6215 if (TEST_BIT (st_antloc
[bb
->index
], smexpr
->index
))
6217 for (last
= ANTIC_STORE_LIST (smexpr
);
6218 BLOCK_FOR_INSN (XEXP (last
, 0)) != bb
;
6219 last
= XEXP (last
, 1))
6221 last
= XEXP (last
, 0);
6224 last
= NEXT_INSN (BB_END (bb
));
6226 for (insn
= BB_HEAD (bb
); insn
!= last
; insn
= NEXT_INSN (insn
))
6229 note
= find_reg_equal_equiv_note (insn
);
6230 if (!note
|| !expr_equiv_p (XEXP (note
, 0), mem
))
6234 fprintf (gcse_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6236 remove_note (insn
, note
);
6241 act
= (! ei_end_p (ei
)) ? ei_edge (ei
) : NULL
;
6243 if (EDGE_COUNT (bb
->succs
) > 0)
6247 ei
= ei_start (bb
->succs
);
6248 act
= (EDGE_COUNT (ei_container (ei
)) > 0 ? EDGE_I (ei_container (ei
), 0) : NULL
);
6253 /* This routine will replace a store with a SET to a specified register. */
6256 replace_store_insn (rtx reg
, rtx del
, basic_block bb
, struct ls_expr
*smexpr
)
6258 rtx insn
, mem
, note
, set
, ptr
, pair
;
6260 mem
= smexpr
->pattern
;
6261 insn
= gen_move_insn (reg
, SET_SRC (single_set (del
)));
6262 insn
= emit_insn_after (insn
, del
);
6267 "STORE_MOTION delete insn in BB %d:\n ", bb
->index
);
6268 print_inline_rtx (gcse_file
, del
, 6);
6269 fprintf (gcse_file
, "\nSTORE MOTION replaced with insn:\n ");
6270 print_inline_rtx (gcse_file
, insn
, 6);
6271 fprintf (gcse_file
, "\n");
6274 for (ptr
= ANTIC_STORE_LIST (smexpr
); ptr
; ptr
= XEXP (ptr
, 1))
6275 if (XEXP (ptr
, 0) == del
)
6277 XEXP (ptr
, 0) = insn
;
6281 /* Move the notes from the deleted insn to its replacement, and patch
6282 up the LIBCALL notes. */
6283 REG_NOTES (insn
) = REG_NOTES (del
);
6285 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
6288 pair
= XEXP (note
, 0);
6289 note
= find_reg_note (pair
, REG_LIBCALL
, NULL_RTX
);
6290 XEXP (note
, 0) = insn
;
6292 note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
6295 pair
= XEXP (note
, 0);
6296 note
= find_reg_note (pair
, REG_RETVAL
, NULL_RTX
);
6297 XEXP (note
, 0) = insn
;
6302 /* Now we must handle REG_EQUAL notes whose contents is equal to the mem;
6303 they are no longer accurate provided that they are reached by this
6304 definition, so drop them. */
6305 for (; insn
!= NEXT_INSN (BB_END (bb
)); insn
= NEXT_INSN (insn
))
6308 set
= single_set (insn
);
6311 if (expr_equiv_p (SET_DEST (set
), mem
))
6313 note
= find_reg_equal_equiv_note (insn
);
6314 if (!note
|| !expr_equiv_p (XEXP (note
, 0), mem
))
6318 fprintf (gcse_file
, "STORE_MOTION drop REG_EQUAL note at insn %d:\n",
6320 remove_note (insn
, note
);
6322 remove_reachable_equiv_notes (bb
, smexpr
);
6326 /* Delete a store, but copy the value that would have been stored into
6327 the reaching_reg for later storing. */
6330 delete_store (struct ls_expr
* expr
, basic_block bb
)
6334 if (expr
->reaching_reg
== NULL_RTX
)
6335 expr
->reaching_reg
= gen_reg_rtx (GET_MODE (expr
->pattern
));
6337 reg
= expr
->reaching_reg
;
6339 for (i
= AVAIL_STORE_LIST (expr
); i
; i
= XEXP (i
, 1))
6342 if (BLOCK_FOR_INSN (del
) == bb
)
6344 /* We know there is only one since we deleted redundant
6345 ones during the available computation. */
6346 replace_store_insn (reg
, del
, bb
, expr
);
6352 /* Free memory used by store motion. */
6355 free_store_memory (void)
6360 sbitmap_vector_free (ae_gen
);
6362 sbitmap_vector_free (ae_kill
);
6364 sbitmap_vector_free (transp
);
6366 sbitmap_vector_free (st_antloc
);
6368 sbitmap_vector_free (pre_insert_map
);
6370 sbitmap_vector_free (pre_delete_map
);
6371 if (reg_set_in_block
)
6372 sbitmap_vector_free (reg_set_in_block
);
6374 ae_gen
= ae_kill
= transp
= st_antloc
= NULL
;
6375 pre_insert_map
= pre_delete_map
= reg_set_in_block
= NULL
;
6378 /* Perform store motion. Much like gcse, except we move expressions the
6379 other way by looking at the flowgraph in reverse. */
6386 struct ls_expr
* ptr
;
6387 int update_flow
= 0;
6391 fprintf (gcse_file
, "before store motion\n");
6392 print_rtl (gcse_file
, get_insns ());
6395 init_alias_analysis ();
6397 /* Find all the available and anticipatable stores. */
6398 num_stores
= compute_store_table ();
6399 if (num_stores
== 0)
6401 sbitmap_vector_free (reg_set_in_block
);
6402 end_alias_analysis ();
6406 /* Now compute kill & transp vectors. */
6407 build_store_vectors ();
6408 add_noreturn_fake_exit_edges ();
6409 connect_infinite_loops_to_exit ();
6411 edge_list
= pre_edge_rev_lcm (gcse_file
, num_stores
, transp
, ae_gen
,
6412 st_antloc
, ae_kill
, &pre_insert_map
,
6415 /* Now we want to insert the new stores which are going to be needed. */
6416 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6418 /* If any of the edges we have above are abnormal, we can't move this
6420 for (x
= NUM_EDGES (edge_list
) - 1; x
>= 0; x
--)
6421 if (TEST_BIT (pre_insert_map
[x
], ptr
->index
)
6422 && (INDEX_EDGE (edge_list
, x
)->flags
& EDGE_ABNORMAL
))
6427 if (gcse_file
!= NULL
)
6429 "Can't replace store %d: abnormal edge from %d to %d\n",
6430 ptr
->index
, INDEX_EDGE (edge_list
, x
)->src
->index
,
6431 INDEX_EDGE (edge_list
, x
)->dest
->index
);
6435 /* Now we want to insert the new stores which are going to be needed. */
6438 if (TEST_BIT (pre_delete_map
[bb
->index
], ptr
->index
))
6439 delete_store (ptr
, bb
);
6441 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
6442 if (TEST_BIT (pre_insert_map
[x
], ptr
->index
))
6443 update_flow
|= insert_store (ptr
, INDEX_EDGE (edge_list
, x
));
6447 commit_edge_insertions ();
6449 free_store_memory ();
6450 free_edge_list (edge_list
);
6451 remove_fake_exit_edges ();
6452 end_alias_analysis ();
6456 /* Entry point for jump bypassing optimization pass. */
6459 bypass_jumps (FILE *file
)
6463 /* We do not construct an accurate cfg in functions which call
6464 setjmp, so just punt to be safe. */
6465 if (current_function_calls_setjmp
)
6468 /* For calling dump_foo fns from gdb. */
6469 debug_stderr
= stderr
;
6472 /* Identify the basic block information for this function, including
6473 successors and predecessors. */
6474 max_gcse_regno
= max_reg_num ();
6477 dump_flow_info (file
);
6479 /* Return if there's nothing to do, or it is too expensive. */
6480 if (n_basic_blocks
<= 1 || is_too_expensive (_ ("jump bypassing disabled")))
6483 gcc_obstack_init (&gcse_obstack
);
6486 /* We need alias. */
6487 init_alias_analysis ();
6489 /* Record where pseudo-registers are set. This data is kept accurate
6490 during each pass. ??? We could also record hard-reg information here
6491 [since it's unchanging], however it is currently done during hash table
6494 It may be tempting to compute MEM set information here too, but MEM sets
6495 will be subject to code motion one day and thus we need to compute
6496 information about memory sets when we build the hash tables. */
6498 alloc_reg_set_mem (max_gcse_regno
);
6501 max_gcse_regno
= max_reg_num ();
6503 changed
= one_cprop_pass (MAX_GCSE_PASSES
+ 2, true, true);
6508 fprintf (file
, "BYPASS of %s: %d basic blocks, ",
6509 current_function_name (), n_basic_blocks
);
6510 fprintf (file
, "%d bytes\n\n", bytes_used
);
6513 obstack_free (&gcse_obstack
, NULL
);
6514 free_reg_set_mem ();
6516 /* We are finished with alias. */
6517 end_alias_analysis ();
6518 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
6523 /* Return true if the graph is too expensive to optimize. PASS is the
6524 optimization about to be performed. */
6527 is_too_expensive (const char *pass
)
6529 /* Trying to perform global optimizations on flow graphs which have
6530 a high connectivity will take a long time and is unlikely to be
6531 particularly useful.
6533 In normal circumstances a cfg should have about twice as many
6534 edges as blocks. But we do not want to punish small functions
6535 which have a couple switch statements. Rather than simply
6536 threshold the number of blocks, uses something with a more
6537 graceful degradation. */
6538 if (n_edges
> 20000 + n_basic_blocks
* 4)
6540 warning (OPT_Wdisabled_optimization
,
6541 "%s: %d basic blocks and %d edges/basic block",
6542 pass
, n_basic_blocks
, n_edges
/ n_basic_blocks
);
6547 /* If allocating memory for the cprop bitmap would take up too much
6548 storage it's better just to disable the optimization. */
6550 * SBITMAP_SET_SIZE (max_reg_num ())
6551 * sizeof (SBITMAP_ELT_TYPE
)) > MAX_GCSE_MEMORY
)
6553 warning (OPT_Wdisabled_optimization
,
6554 "%s: %d basic blocks and %d registers",
6555 pass
, n_basic_blocks
, max_reg_num ());
6564 gate_handle_jump_bypass (void)
6566 return optimize
> 0 && flag_gcse
;
6569 /* Perform jump bypassing and control flow optimizations. */
6571 rest_of_handle_jump_bypass (void)
6573 cleanup_cfg (CLEANUP_EXPENSIVE
);
6574 reg_scan (get_insns (), max_reg_num ());
6576 if (bypass_jumps (dump_file
))
6578 rebuild_jump_labels (get_insns ());
6579 cleanup_cfg (CLEANUP_EXPENSIVE
);
6580 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6584 struct tree_opt_pass pass_jump_bypass
=
6586 "bypass", /* name */
6587 gate_handle_jump_bypass
, /* gate */
6588 rest_of_handle_jump_bypass
, /* execute */
6591 0, /* static_pass_number */
6592 TV_BYPASS
, /* tv_id */
6593 0, /* properties_required */
6594 0, /* properties_provided */
6595 0, /* properties_destroyed */
6596 0, /* todo_flags_start */
6598 TODO_ggc_collect
| TODO_verify_flow
, /* todo_flags_finish */
6604 gate_handle_gcse (void)
6606 return optimize
> 0 && flag_gcse
;
6611 rest_of_handle_gcse (void)
6613 int save_csb
, save_cfj
;
6616 tem
= gcse_main (get_insns (), dump_file
);
6617 rebuild_jump_labels (get_insns ());
6618 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6620 save_csb
= flag_cse_skip_blocks
;
6621 save_cfj
= flag_cse_follow_jumps
;
6622 flag_cse_skip_blocks
= flag_cse_follow_jumps
= 0;
6624 /* If -fexpensive-optimizations, re-run CSE to clean up things done
6626 if (flag_expensive_optimizations
)
6628 timevar_push (TV_CSE
);
6629 reg_scan (get_insns (), max_reg_num ());
6630 tem2
= cse_main (get_insns (), max_reg_num (), dump_file
);
6631 purge_all_dead_edges ();
6632 delete_trivially_dead_insns (get_insns (), max_reg_num ());
6633 timevar_pop (TV_CSE
);
6634 cse_not_expected
= !flag_rerun_cse_after_loop
;
6637 /* If gcse or cse altered any jumps, rerun jump optimizations to clean
6641 timevar_push (TV_JUMP
);
6642 rebuild_jump_labels (get_insns ());
6643 delete_dead_jumptables ();
6644 cleanup_cfg (CLEANUP_EXPENSIVE
| CLEANUP_PRE_LOOP
);
6645 timevar_pop (TV_JUMP
);
6648 flag_cse_skip_blocks
= save_csb
;
6649 flag_cse_follow_jumps
= save_cfj
;
6652 struct tree_opt_pass pass_gcse
=
6655 gate_handle_gcse
, /* gate */
6656 rest_of_handle_gcse
, /* execute */
6659 0, /* static_pass_number */
6660 TV_GCSE
, /* tv_id */
6661 0, /* properties_required */
6662 0, /* properties_provided */
6663 0, /* properties_destroyed */
6664 0, /* todo_flags_start */
6666 TODO_verify_flow
| TODO_ggc_collect
, /* todo_flags_finish */
6671 #include "gt-gcse.h"