1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001, 2002, 2003
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
24 - reordering of memory allocation and freeing to be more space efficient
25 - do rough calc of how many regs are needed in each block, and a rough
26 calc of how many regs are available in each class and use that to
27 throttle back the code in cases where RTX_COST is minimal.
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
148 #include "coretypes.h"
155 #include "hard-reg-set.h"
158 #include "insn-config.h"
160 #include "basic-block.h"
162 #include "function.h"
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. The parameter max-gcse-passes can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file
;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse
;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr
;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack
;
302 struct reg_use
{rtx reg_rtx
; };
304 /* Hash table of expressions. */
308 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
310 /* Index in the available expression bitmaps. */
312 /* Next entry with the same hash. */
313 struct expr
*next_same_hash
;
314 /* List of anticipatable occurrences in basic blocks in the function.
315 An "anticipatable occurrence" is one that is the first occurrence in the
316 basic block, the operands are not modified in the basic block prior
317 to the occurrence and the output is not used between the start of
318 the block and the occurrence. */
319 struct occr
*antic_occr
;
320 /* List of available occurrence in basic blocks in the function.
321 An "available occurrence" is one that is the last occurrence in the
322 basic block and the operands are not modified by following statements in
323 the basic block [including this insn]. */
324 struct occr
*avail_occr
;
325 /* Non-null if the computation is PRE redundant.
326 The value is the newly created pseudo-reg to record a copy of the
327 expression in all the places that reach the redundant copy. */
331 /* Occurrence of an expression.
332 There is one per basic block. If a pattern appears more than once the
333 last appearance is used [or first for anticipatable expressions]. */
337 /* Next occurrence of this expression. */
339 /* The insn that computes the expression. */
341 /* Nonzero if this [anticipatable] occurrence has been deleted. */
343 /* Nonzero if this [available] occurrence has been copied to
345 /* ??? This is mutually exclusive with deleted_p, so they could share
350 /* Expression and copy propagation hash tables.
351 Each hash table is an array of buckets.
352 ??? It is known that if it were an array of entries, structure elements
353 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
354 not clear whether in the final analysis a sufficient amount of memory would
355 be saved as the size of the available expression bitmaps would be larger
356 [one could build a mapping table without holes afterwards though].
357 Someday I'll perform the computation and figure it out. */
362 This is an array of `expr_hash_table_size' elements. */
365 /* Size of the hash table, in elements. */
368 /* Number of hash table elements. */
369 unsigned int n_elems
;
371 /* Whether the table is expression of copy propagation one. */
375 /* Expression hash table. */
376 static struct hash_table expr_hash_table
;
378 /* Copy propagation hash table. */
379 static struct hash_table set_hash_table
;
381 /* Mapping of uids to cuids.
382 Only real insns get cuids. */
383 static int *uid_cuid
;
385 /* Highest UID in UID_CUID. */
388 /* Get the cuid of an insn. */
389 #ifdef ENABLE_CHECKING
390 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
395 /* Number of cuids. */
398 /* Mapping of cuids to insns. */
399 static rtx
*cuid_insn
;
401 /* Get insn from cuid. */
402 #define CUID_INSN(CUID) (cuid_insn[CUID])
404 /* Maximum register number in function prior to doing gcse + 1.
405 Registers created during this pass have regno >= max_gcse_regno.
406 This is named with "gcse" to not collide with global of same name. */
407 static unsigned int max_gcse_regno
;
409 /* Table of registers that are modified.
411 For each register, each element is a list of places where the pseudo-reg
414 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
415 requires knowledge of which blocks kill which regs [and thus could use
416 a bitmap instead of the lists `reg_set_table' uses].
418 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
419 num-regs) [however perhaps it may be useful to keep the data as is]. One
420 advantage of recording things this way is that `reg_set_table' is fairly
421 sparse with respect to pseudo regs but for hard regs could be fairly dense
422 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
423 up functions like compute_transp since in the case of pseudo-regs we only
424 need to iterate over the number of times a pseudo-reg is set, not over the
425 number of basic blocks [clearly there is a bit of a slow down in the cases
426 where a pseudo is set more than once in a block, however it is believed
427 that the net effect is to speed things up]. This isn't done for hard-regs
428 because recording call-clobbered hard-regs in `reg_set_table' at each
429 function call can consume a fair bit of memory, and iterating over
430 hard-regs stored this way in compute_transp will be more expensive. */
432 typedef struct reg_set
434 /* The next setting of this register. */
435 struct reg_set
*next
;
436 /* The insn where it was set. */
440 static reg_set
**reg_set_table
;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
445 static int reg_set_table_size
;
447 /* Amount to grow `reg_set_table' by when it's full. */
448 #define REG_SET_TABLE_SLOP 100
450 /* This is a list of expressions which are MEMs and will be used by load
452 Load motion tracks MEMs which aren't killed by
453 anything except itself. (ie, loads and stores to a single location).
454 We can then allow movement of these MEM refs with a little special
455 allowance. (all stores copy the same value to the reaching reg used
456 for the loads). This means all values used to store into memory must have
457 no side effects so we can re-issue the setter value.
458 Store Motion uses this structure as an expression table to track stores
459 which look interesting, and might be moveable towards the exit block. */
463 struct expr
* expr
; /* Gcse expression reference for LM. */
464 rtx pattern
; /* Pattern of this mem. */
465 rtx pattern_regs
; /* List of registers mentioned by the mem. */
466 rtx loads
; /* INSN list of loads seen. */
467 rtx stores
; /* INSN list of stores seen. */
468 struct ls_expr
* next
; /* Next in the list. */
469 int invalid
; /* Invalid for some reason. */
470 int index
; /* If it maps to a bitmap index. */
471 int hash_index
; /* Index when in a hash table. */
472 rtx reaching_reg
; /* Register to use when re-writing. */
475 /* Array of implicit set patterns indexed by basic block index. */
476 static rtx
*implicit_sets
;
478 /* Head of the list of load/store memory refs. */
479 static struct ls_expr
* pre_ldst_mems
= NULL
;
481 /* Bitmap containing one bit for each register in the program.
482 Used when performing GCSE to track which registers have been set since
483 the start of the basic block. */
484 static regset reg_set_bitmap
;
486 /* For each block, a bitmap of registers set in the block.
487 This is used by expr_killed_p and compute_transp.
488 It is computed during hash table computation and not by compute_sets
489 as it includes registers added since the last pass (or between cprop and
490 gcse) and it's currently not easy to realloc sbitmap vectors. */
491 static sbitmap
*reg_set_in_block
;
493 /* Array, indexed by basic block number for a list of insns which modify
494 memory within that block. */
495 static rtx
* modify_mem_list
;
496 bitmap modify_mem_list_set
;
498 /* This array parallels modify_mem_list, but is kept canonicalized. */
499 static rtx
* canon_modify_mem_list
;
500 bitmap canon_modify_mem_list_set
;
501 /* Various variables for statistics gathering. */
503 /* Memory used in a pass.
504 This isn't intended to be absolutely precise. Its intent is only
505 to keep an eye on memory usage. */
506 static int bytes_used
;
508 /* GCSE substitutions made. */
509 static int gcse_subst_count
;
510 /* Number of copy instructions created. */
511 static int gcse_create_count
;
512 /* Number of constants propagated. */
513 static int const_prop_count
;
514 /* Number of copys propagated. */
515 static int copy_prop_count
;
517 /* These variables are used by classic GCSE.
518 Normally they'd be defined a bit later, but `rd_gen' needs to
519 be declared sooner. */
521 /* Each block has a bitmap of each type.
522 The length of each blocks bitmap is:
524 max_cuid - for reaching definitions
525 n_exprs - for available expressions
527 Thus we view the bitmaps as 2 dimensional arrays. i.e.
528 rd_kill[block_num][cuid_num]
529 ae_kill[block_num][expr_num] */
531 /* For reaching defs */
532 static sbitmap
*rd_kill
, *rd_gen
, *reaching_defs
, *rd_out
;
534 /* for available exprs */
535 static sbitmap
*ae_kill
, *ae_gen
, *ae_in
, *ae_out
;
537 /* Objects of this type are passed around by the null-pointer check
539 struct null_pointer_info
541 /* The basic block being processed. */
542 basic_block current_block
;
543 /* The first register to be handled in this pass. */
544 unsigned int min_reg
;
545 /* One greater than the last register to be handled in this pass. */
546 unsigned int max_reg
;
547 sbitmap
*nonnull_local
;
548 sbitmap
*nonnull_killed
;
551 static void compute_can_copy
PARAMS ((void));
552 static char *gmalloc
PARAMS ((unsigned int));
553 static char *grealloc
PARAMS ((char *, unsigned int));
554 static char *gcse_alloc
PARAMS ((unsigned long));
555 static void alloc_gcse_mem
PARAMS ((rtx
));
556 static void free_gcse_mem
PARAMS ((void));
557 static void alloc_reg_set_mem
PARAMS ((int));
558 static void free_reg_set_mem
PARAMS ((void));
559 static int get_bitmap_width
PARAMS ((int, int, int));
560 static void record_one_set
PARAMS ((int, rtx
));
561 static void record_set_info
PARAMS ((rtx
, rtx
, void *));
562 static void compute_sets
PARAMS ((rtx
));
563 static void hash_scan_insn
PARAMS ((rtx
, struct hash_table
*, int));
564 static void hash_scan_set
PARAMS ((rtx
, rtx
, struct hash_table
*));
565 static void hash_scan_clobber
PARAMS ((rtx
, rtx
, struct hash_table
*));
566 static void hash_scan_call
PARAMS ((rtx
, rtx
, struct hash_table
*));
567 static int want_to_gcse_p
PARAMS ((rtx
));
568 static bool gcse_constant_p
PARAMS ((rtx
));
569 static int oprs_unchanged_p
PARAMS ((rtx
, rtx
, int));
570 static int oprs_anticipatable_p
PARAMS ((rtx
, rtx
));
571 static int oprs_available_p
PARAMS ((rtx
, rtx
));
572 static void insert_expr_in_table
PARAMS ((rtx
, enum machine_mode
, rtx
,
573 int, int, struct hash_table
*));
574 static void insert_set_in_table
PARAMS ((rtx
, rtx
, struct hash_table
*));
575 static unsigned int hash_expr
PARAMS ((rtx
, enum machine_mode
, int *, int));
576 static unsigned int hash_expr_1
PARAMS ((rtx
, enum machine_mode
, int *));
577 static unsigned int hash_string_1
PARAMS ((const char *));
578 static unsigned int hash_set
PARAMS ((int, int));
579 static int expr_equiv_p
PARAMS ((rtx
, rtx
));
580 static void record_last_reg_set_info
PARAMS ((rtx
, int));
581 static void record_last_mem_set_info
PARAMS ((rtx
));
582 static void record_last_set_info
PARAMS ((rtx
, rtx
, void *));
583 static void compute_hash_table
PARAMS ((struct hash_table
*));
584 static void alloc_hash_table
PARAMS ((int, struct hash_table
*, int));
585 static void free_hash_table
PARAMS ((struct hash_table
*));
586 static void compute_hash_table_work
PARAMS ((struct hash_table
*));
587 static void dump_hash_table
PARAMS ((FILE *, const char *,
588 struct hash_table
*));
589 static struct expr
*lookup_expr
PARAMS ((rtx
, struct hash_table
*));
590 static struct expr
*lookup_set
PARAMS ((unsigned int, struct hash_table
*));
591 static struct expr
*next_set
PARAMS ((unsigned int, struct expr
*));
592 static void reset_opr_set_tables
PARAMS ((void));
593 static int oprs_not_set_p
PARAMS ((rtx
, rtx
));
594 static void mark_call
PARAMS ((rtx
));
595 static void mark_set
PARAMS ((rtx
, rtx
));
596 static void mark_clobber
PARAMS ((rtx
, rtx
));
597 static void mark_oprs_set
PARAMS ((rtx
));
598 static void alloc_cprop_mem
PARAMS ((int, int));
599 static void free_cprop_mem
PARAMS ((void));
600 static void compute_transp
PARAMS ((rtx
, int, sbitmap
*, int));
601 static void compute_transpout
PARAMS ((void));
602 static void compute_local_properties
PARAMS ((sbitmap
*, sbitmap
*, sbitmap
*,
603 struct hash_table
*));
604 static void compute_cprop_data
PARAMS ((void));
605 static void find_used_regs
PARAMS ((rtx
*, void *));
606 static int try_replace_reg
PARAMS ((rtx
, rtx
, rtx
));
607 static struct expr
*find_avail_set
PARAMS ((int, rtx
));
608 static int cprop_jump
PARAMS ((basic_block
, rtx
, rtx
, rtx
, rtx
));
609 static void mems_conflict_for_gcse_p
PARAMS ((rtx
, rtx
, void *));
610 static int load_killed_in_block_p
PARAMS ((basic_block
, int, rtx
, int));
611 static void canon_list_insert
PARAMS ((rtx
, rtx
, void *));
612 static int cprop_insn
PARAMS ((rtx
, int));
613 static int cprop
PARAMS ((int));
614 static void find_implicit_sets
PARAMS ((void));
615 static int one_cprop_pass
PARAMS ((int, int, int));
616 static bool constprop_register
PARAMS ((rtx
, rtx
, rtx
, int));
617 static struct expr
*find_bypass_set
PARAMS ((int, int));
618 static bool reg_killed_on_edge
PARAMS ((rtx
, edge
));
619 static int bypass_block
PARAMS ((basic_block
, rtx
, rtx
));
620 static int bypass_conditional_jumps
PARAMS ((void));
621 static void alloc_pre_mem
PARAMS ((int, int));
622 static void free_pre_mem
PARAMS ((void));
623 static void compute_pre_data
PARAMS ((void));
624 static int pre_expr_reaches_here_p
PARAMS ((basic_block
, struct expr
*,
626 static void insert_insn_end_bb
PARAMS ((struct expr
*, basic_block
, int));
627 static void pre_insert_copy_insn
PARAMS ((struct expr
*, rtx
));
628 static void pre_insert_copies
PARAMS ((void));
629 static int pre_delete
PARAMS ((void));
630 static int pre_gcse
PARAMS ((void));
631 static int one_pre_gcse_pass
PARAMS ((int));
632 static void add_label_notes
PARAMS ((rtx
, rtx
));
633 static void alloc_code_hoist_mem
PARAMS ((int, int));
634 static void free_code_hoist_mem
PARAMS ((void));
635 static void compute_code_hoist_vbeinout
PARAMS ((void));
636 static void compute_code_hoist_data
PARAMS ((void));
637 static int hoist_expr_reaches_here_p
PARAMS ((basic_block
, int, basic_block
,
639 static void hoist_code
PARAMS ((void));
640 static int one_code_hoisting_pass
PARAMS ((void));
641 static void alloc_rd_mem
PARAMS ((int, int));
642 static void free_rd_mem
PARAMS ((void));
643 static void handle_rd_kill_set
PARAMS ((rtx
, int, basic_block
));
644 static void compute_kill_rd
PARAMS ((void));
645 static void compute_rd
PARAMS ((void));
646 static void alloc_avail_expr_mem
PARAMS ((int, int));
647 static void free_avail_expr_mem
PARAMS ((void));
648 static void compute_ae_gen
PARAMS ((struct hash_table
*));
649 static int expr_killed_p
PARAMS ((rtx
, basic_block
));
650 static void compute_ae_kill
PARAMS ((sbitmap
*, sbitmap
*, struct hash_table
*));
651 static int expr_reaches_here_p
PARAMS ((struct occr
*, struct expr
*,
653 static rtx computing_insn
PARAMS ((struct expr
*, rtx
));
654 static int def_reaches_here_p
PARAMS ((rtx
, rtx
));
655 static int can_disregard_other_sets
PARAMS ((struct reg_set
**, rtx
, int));
656 static int handle_avail_expr
PARAMS ((rtx
, struct expr
*));
657 static int classic_gcse
PARAMS ((void));
658 static int one_classic_gcse_pass
PARAMS ((int));
659 static void invalidate_nonnull_info
PARAMS ((rtx
, rtx
, void *));
660 static int delete_null_pointer_checks_1
PARAMS ((unsigned int *,
661 sbitmap
*, sbitmap
*,
662 struct null_pointer_info
*));
663 static rtx process_insert_insn
PARAMS ((struct expr
*));
664 static int pre_edge_insert
PARAMS ((struct edge_list
*, struct expr
**));
665 static int expr_reaches_here_p_work
PARAMS ((struct occr
*, struct expr
*,
666 basic_block
, int, char *));
667 static int pre_expr_reaches_here_p_work
PARAMS ((basic_block
, struct expr
*,
668 basic_block
, char *));
669 static struct ls_expr
* ldst_entry
PARAMS ((rtx
));
670 static void free_ldst_entry
PARAMS ((struct ls_expr
*));
671 static void free_ldst_mems
PARAMS ((void));
672 static void print_ldst_list
PARAMS ((FILE *));
673 static struct ls_expr
* find_rtx_in_ldst
PARAMS ((rtx
));
674 static int enumerate_ldsts
PARAMS ((void));
675 static inline struct ls_expr
* first_ls_expr
PARAMS ((void));
676 static inline struct ls_expr
* next_ls_expr
PARAMS ((struct ls_expr
*));
677 static int simple_mem
PARAMS ((rtx
));
678 static void invalidate_any_buried_refs
PARAMS ((rtx
));
679 static void compute_ld_motion_mems
PARAMS ((void));
680 static void trim_ld_motion_mems
PARAMS ((void));
681 static void update_ld_motion_stores
PARAMS ((struct expr
*));
682 static void reg_set_info
PARAMS ((rtx
, rtx
, void *));
683 static bool store_ops_ok
PARAMS ((rtx
, int *));
684 static rtx extract_mentioned_regs
PARAMS ((rtx
));
685 static rtx extract_mentioned_regs_helper
PARAMS ((rtx
, rtx
));
686 static void find_moveable_store
PARAMS ((rtx
, int *, int *));
687 static int compute_store_table
PARAMS ((void));
688 static bool load_kills_store
PARAMS ((rtx
, rtx
));
689 static bool find_loads
PARAMS ((rtx
, rtx
));
690 static bool store_killed_in_insn
PARAMS ((rtx
, rtx
, rtx
));
691 static bool store_killed_after
PARAMS ((rtx
, rtx
, rtx
, basic_block
,
693 static bool store_killed_before
PARAMS ((rtx
, rtx
, rtx
, basic_block
,
695 static void build_store_vectors
PARAMS ((void));
696 static void insert_insn_start_bb
PARAMS ((rtx
, basic_block
));
697 static int insert_store
PARAMS ((struct ls_expr
*, edge
));
698 static void replace_store_insn
PARAMS ((rtx
, rtx
, basic_block
));
699 static void delete_store
PARAMS ((struct ls_expr
*,
701 static void free_store_memory
PARAMS ((void));
702 static void store_motion
PARAMS ((void));
703 static void free_insn_expr_list_list
PARAMS ((rtx
*));
704 static void clear_modify_mem_tables
PARAMS ((void));
705 static void free_modify_mem_tables
PARAMS ((void));
706 static rtx gcse_emit_move_after
PARAMS ((rtx
, rtx
, rtx
));
707 static void local_cprop_find_used_regs
PARAMS ((rtx
*, void *));
708 static bool do_local_cprop
PARAMS ((rtx
, rtx
, int, rtx
*));
709 static bool adjust_libcall_notes
PARAMS ((rtx
, rtx
, rtx
, rtx
*));
710 static void local_cprop_pass
PARAMS ((int));
712 /* Entry point for global common subexpression elimination.
713 F is the first instruction in the function. */
721 /* Bytes used at start of pass. */
722 int initial_bytes_used
;
723 /* Maximum number of bytes used by a pass. */
725 /* Point to release obstack data from for each pass. */
726 char *gcse_obstack_bottom
;
728 /* We do not construct an accurate cfg in functions which call
729 setjmp, so just punt to be safe. */
730 if (current_function_calls_setjmp
)
733 /* Assume that we do not need to run jump optimizations after gcse. */
734 run_jump_opt_after_gcse
= 0;
736 /* For calling dump_foo fns from gdb. */
737 debug_stderr
= stderr
;
740 /* Identify the basic block information for this function, including
741 successors and predecessors. */
742 max_gcse_regno
= max_reg_num ();
745 dump_flow_info (file
);
747 /* Return if there's nothing to do. */
748 if (n_basic_blocks
<= 1)
751 /* Trying to perform global optimizations on flow graphs which have
752 a high connectivity will take a long time and is unlikely to be
755 In normal circumstances a cfg should have about twice as many edges
756 as blocks. But we do not want to punish small functions which have
757 a couple switch statements. So we require a relatively large number
758 of basic blocks and the ratio of edges to blocks to be high. */
759 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
761 if (warn_disabled_optimization
)
762 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
763 n_basic_blocks
, n_edges
/ n_basic_blocks
);
767 /* If allocating memory for the cprop bitmap would take up too much
768 storage it's better just to disable the optimization. */
770 * SBITMAP_SET_SIZE (max_gcse_regno
)
771 * sizeof (SBITMAP_ELT_TYPE
)) > MAX_GCSE_MEMORY
)
773 if (warn_disabled_optimization
)
774 warning ("GCSE disabled: %d basic blocks and %d registers",
775 n_basic_blocks
, max_gcse_regno
);
780 gcc_obstack_init (&gcse_obstack
);
784 init_alias_analysis ();
785 /* Record where pseudo-registers are set. This data is kept accurate
786 during each pass. ??? We could also record hard-reg information here
787 [since it's unchanging], however it is currently done during hash table
790 It may be tempting to compute MEM set information here too, but MEM sets
791 will be subject to code motion one day and thus we need to compute
792 information about memory sets when we build the hash tables. */
794 alloc_reg_set_mem (max_gcse_regno
);
798 initial_bytes_used
= bytes_used
;
800 gcse_obstack_bottom
= gcse_alloc (1);
802 while (changed
&& pass
< MAX_GCSE_PASSES
)
806 fprintf (file
, "GCSE pass %d\n\n", pass
+ 1);
808 /* Initialize bytes_used to the space for the pred/succ lists,
809 and the reg_set_table data. */
810 bytes_used
= initial_bytes_used
;
812 /* Each pass may create new registers, so recalculate each time. */
813 max_gcse_regno
= max_reg_num ();
817 /* Don't allow constant propagation to modify jumps
819 changed
= one_cprop_pass (pass
+ 1, 0, 0);
822 changed
|= one_classic_gcse_pass (pass
+ 1);
825 changed
|= one_pre_gcse_pass (pass
+ 1);
826 /* We may have just created new basic blocks. Release and
827 recompute various things which are sized on the number of
831 free_modify_mem_tables ();
833 = (rtx
*) gmalloc (last_basic_block
* sizeof (rtx
));
834 canon_modify_mem_list
835 = (rtx
*) gmalloc (last_basic_block
* sizeof (rtx
));
836 memset ((char *) modify_mem_list
, 0, last_basic_block
* sizeof (rtx
));
837 memset ((char *) canon_modify_mem_list
, 0, last_basic_block
* sizeof (rtx
));
840 alloc_reg_set_mem (max_reg_num ());
842 run_jump_opt_after_gcse
= 1;
845 if (max_pass_bytes
< bytes_used
)
846 max_pass_bytes
= bytes_used
;
848 /* Free up memory, then reallocate for code hoisting. We can
849 not re-use the existing allocated memory because the tables
850 will not have info for the insns or registers created by
851 partial redundancy elimination. */
854 /* It does not make sense to run code hoisting unless we optimizing
855 for code size -- it rarely makes programs faster, and can make
856 them bigger if we did partial redundancy elimination (when optimizing
857 for space, we use a classic gcse algorithm instead of partial
858 redundancy algorithms). */
861 max_gcse_regno
= max_reg_num ();
863 changed
|= one_code_hoisting_pass ();
866 if (max_pass_bytes
< bytes_used
)
867 max_pass_bytes
= bytes_used
;
872 fprintf (file
, "\n");
876 obstack_free (&gcse_obstack
, gcse_obstack_bottom
);
880 /* Do one last pass of copy propagation, including cprop into
881 conditional jumps. */
883 max_gcse_regno
= max_reg_num ();
885 /* This time, go ahead and allow cprop to alter jumps. */
886 one_cprop_pass (pass
+ 1, 1, 0);
891 fprintf (file
, "GCSE of %s: %d basic blocks, ",
892 current_function_name
, n_basic_blocks
);
893 fprintf (file
, "%d pass%s, %d bytes\n\n",
894 pass
, pass
> 1 ? "es" : "", max_pass_bytes
);
897 obstack_free (&gcse_obstack
, NULL
);
899 /* We are finished with alias. */
900 end_alias_analysis ();
901 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
903 if (!optimize_size
&& flag_gcse_sm
)
906 /* Record where pseudo-registers are set. */
907 return run_jump_opt_after_gcse
;
910 /* Misc. utilities. */
912 /* Nonzero for each mode that supports (set (reg) (reg)).
913 This is trivially true for integer and floating point values.
914 It may or may not be true for condition codes. */
915 static char can_copy
[(int) NUM_MACHINE_MODES
];
917 /* Compute which modes support reg/reg copy operations. */
923 #ifndef AVOID_CCMODE_COPIES
926 memset (can_copy
, 0, NUM_MACHINE_MODES
);
929 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
930 if (GET_MODE_CLASS (i
) == MODE_CC
)
932 #ifdef AVOID_CCMODE_COPIES
935 reg
= gen_rtx_REG ((enum machine_mode
) i
, LAST_VIRTUAL_REGISTER
+ 1);
936 insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, reg
));
937 if (recog (PATTERN (insn
), insn
, NULL
) >= 0)
947 /* Returns whether the mode supports reg/reg copy operations. */
951 enum machine_mode mode
;
953 static bool can_copy_init_p
= false;
955 if (! can_copy_init_p
)
958 can_copy_init_p
= true;
961 return can_copy
[mode
] != 0;
964 /* Cover function to xmalloc to record bytes allocated. */
971 return xmalloc (size
);
974 /* Cover function to xrealloc.
975 We don't record the additional size since we don't know it.
976 It won't affect memory usage stats much anyway. */
983 return xrealloc (ptr
, size
);
986 /* Cover function to obstack_alloc. */
993 return (char *) obstack_alloc (&gcse_obstack
, size
);
996 /* Allocate memory for the cuid mapping array,
997 and reg/memory set tracking tables.
999 This is called at the start of each pass. */
1008 /* Find the largest UID and create a mapping from UIDs to CUIDs.
1009 CUIDs are like UIDs except they increase monotonically, have no gaps,
1010 and only apply to real insns. */
1012 max_uid
= get_max_uid ();
1013 n
= (max_uid
+ 1) * sizeof (int);
1014 uid_cuid
= (int *) gmalloc (n
);
1015 memset ((char *) uid_cuid
, 0, n
);
1016 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
1019 uid_cuid
[INSN_UID (insn
)] = i
++;
1021 uid_cuid
[INSN_UID (insn
)] = i
;
1024 /* Create a table mapping cuids to insns. */
1027 n
= (max_cuid
+ 1) * sizeof (rtx
);
1028 cuid_insn
= (rtx
*) gmalloc (n
);
1029 memset ((char *) cuid_insn
, 0, n
);
1030 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
1032 CUID_INSN (i
++) = insn
;
1034 /* Allocate vars to track sets of regs. */
1035 reg_set_bitmap
= BITMAP_XMALLOC ();
1037 /* Allocate vars to track sets of regs, memory per block. */
1038 reg_set_in_block
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
,
1040 /* Allocate array to keep a list of insns which modify memory in each
1042 modify_mem_list
= (rtx
*) gmalloc (last_basic_block
* sizeof (rtx
));
1043 canon_modify_mem_list
= (rtx
*) gmalloc (last_basic_block
* sizeof (rtx
));
1044 memset ((char *) modify_mem_list
, 0, last_basic_block
* sizeof (rtx
));
1045 memset ((char *) canon_modify_mem_list
, 0, last_basic_block
* sizeof (rtx
));
1046 modify_mem_list_set
= BITMAP_XMALLOC ();
1047 canon_modify_mem_list_set
= BITMAP_XMALLOC ();
1050 /* Free memory allocated by alloc_gcse_mem. */
1058 BITMAP_XFREE (reg_set_bitmap
);
1060 sbitmap_vector_free (reg_set_in_block
);
1061 free_modify_mem_tables ();
1062 BITMAP_XFREE (modify_mem_list_set
);
1063 BITMAP_XFREE (canon_modify_mem_list_set
);
1066 /* Many of the global optimization algorithms work by solving dataflow
1067 equations for various expressions. Initially, some local value is
1068 computed for each expression in each block. Then, the values across the
1069 various blocks are combined (by following flow graph edges) to arrive at
1070 global values. Conceptually, each set of equations is independent. We
1071 may therefore solve all the equations in parallel, solve them one at a
1072 time, or pick any intermediate approach.
1074 When you're going to need N two-dimensional bitmaps, each X (say, the
1075 number of blocks) by Y (say, the number of expressions), call this
1076 function. It's not important what X and Y represent; only that Y
1077 correspond to the things that can be done in parallel. This function will
1078 return an appropriate chunking factor C; you should solve C sets of
1079 equations in parallel. By going through this function, we can easily
1080 trade space against time; by solving fewer equations in parallel we use
1084 get_bitmap_width (n
, x
, y
)
1089 /* It's not really worth figuring out *exactly* how much memory will
1090 be used by a particular choice. The important thing is to get
1091 something approximately right. */
1092 size_t max_bitmap_memory
= 10 * 1024 * 1024;
1094 /* The number of bytes we'd use for a single column of minimum
1096 size_t column_size
= n
* x
* sizeof (SBITMAP_ELT_TYPE
);
1098 /* Often, it's reasonable just to solve all the equations in
1100 if (column_size
* SBITMAP_SET_SIZE (y
) <= max_bitmap_memory
)
1103 /* Otherwise, pick the largest width we can, without going over the
1105 return SBITMAP_ELT_BITS
* ((max_bitmap_memory
+ column_size
- 1)
1109 /* Compute the local properties of each recorded expression.
1111 Local properties are those that are defined by the block, irrespective of
1114 An expression is transparent in a block if its operands are not modified
1117 An expression is computed (locally available) in a block if it is computed
1118 at least once and expression would contain the same value if the
1119 computation was moved to the end of the block.
1121 An expression is locally anticipatable in a block if it is computed at
1122 least once and expression would contain the same value if the computation
1123 was moved to the beginning of the block.
1125 We call this routine for cprop, pre and code hoisting. They all compute
1126 basically the same information and thus can easily share this code.
1128 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
1129 properties. If NULL, then it is not necessary to compute or record that
1130 particular property.
1132 TABLE controls which hash table to look at. If it is set hash table,
1133 additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1137 compute_local_properties (transp
, comp
, antloc
, table
)
1141 struct hash_table
*table
;
1145 /* Initialize any bitmaps that were passed in. */
1149 sbitmap_vector_zero (transp
, last_basic_block
);
1151 sbitmap_vector_ones (transp
, last_basic_block
);
1155 sbitmap_vector_zero (comp
, last_basic_block
);
1157 sbitmap_vector_zero (antloc
, last_basic_block
);
1159 for (i
= 0; i
< table
->size
; i
++)
1163 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1165 int indx
= expr
->bitmap_index
;
1168 /* The expression is transparent in this block if it is not killed.
1169 We start by assuming all are transparent [none are killed], and
1170 then reset the bits for those that are. */
1172 compute_transp (expr
->expr
, indx
, transp
, table
->set_p
);
1174 /* The occurrences recorded in antic_occr are exactly those that
1175 we want to set to nonzero in ANTLOC. */
1177 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
1179 SET_BIT (antloc
[BLOCK_NUM (occr
->insn
)], indx
);
1181 /* While we're scanning the table, this is a good place to
1183 occr
->deleted_p
= 0;
1186 /* The occurrences recorded in avail_occr are exactly those that
1187 we want to set to nonzero in COMP. */
1189 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
1191 SET_BIT (comp
[BLOCK_NUM (occr
->insn
)], indx
);
1193 /* While we're scanning the table, this is a good place to
1198 /* While we're scanning the table, this is a good place to
1200 expr
->reaching_reg
= 0;
1205 /* Register set information.
1207 `reg_set_table' records where each register is set or otherwise
1210 static struct obstack reg_set_obstack
;
1213 alloc_reg_set_mem (n_regs
)
1218 reg_set_table_size
= n_regs
+ REG_SET_TABLE_SLOP
;
1219 n
= reg_set_table_size
* sizeof (struct reg_set
*);
1220 reg_set_table
= (struct reg_set
**) gmalloc (n
);
1221 memset ((char *) reg_set_table
, 0, n
);
1223 gcc_obstack_init (®_set_obstack
);
1229 free (reg_set_table
);
1230 obstack_free (®_set_obstack
, NULL
);
1233 /* Record REGNO in the reg_set table. */
1236 record_one_set (regno
, insn
)
1240 /* Allocate a new reg_set element and link it onto the list. */
1241 struct reg_set
*new_reg_info
;
1243 /* If the table isn't big enough, enlarge it. */
1244 if (regno
>= reg_set_table_size
)
1246 int new_size
= regno
+ REG_SET_TABLE_SLOP
;
1249 = (struct reg_set
**) grealloc ((char *) reg_set_table
,
1250 new_size
* sizeof (struct reg_set
*));
1251 memset ((char *) (reg_set_table
+ reg_set_table_size
), 0,
1252 (new_size
- reg_set_table_size
) * sizeof (struct reg_set
*));
1253 reg_set_table_size
= new_size
;
1256 new_reg_info
= (struct reg_set
*) obstack_alloc (®_set_obstack
,
1257 sizeof (struct reg_set
));
1258 bytes_used
+= sizeof (struct reg_set
);
1259 new_reg_info
->insn
= insn
;
1260 new_reg_info
->next
= reg_set_table
[regno
];
1261 reg_set_table
[regno
] = new_reg_info
;
1264 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1265 an insn. The DATA is really the instruction in which the SET is
1269 record_set_info (dest
, setter
, data
)
1270 rtx dest
, setter ATTRIBUTE_UNUSED
;
1273 rtx record_set_insn
= (rtx
) data
;
1275 if (GET_CODE (dest
) == REG
&& REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
1276 record_one_set (REGNO (dest
), record_set_insn
);
1279 /* Scan the function and record each set of each pseudo-register.
1281 This is called once, at the start of the gcse pass. See the comments for
1282 `reg_set_table' for further documentation. */
1290 for (insn
= f
; insn
!= 0; insn
= NEXT_INSN (insn
))
1292 note_stores (PATTERN (insn
), record_set_info
, insn
);
1295 /* Hash table support. */
1297 struct reg_avail_info
1299 basic_block last_bb
;
1304 static struct reg_avail_info
*reg_avail_info
;
1305 static basic_block current_bb
;
1308 /* See whether X, the source of a set, is something we want to consider for
1311 static GTY(()) rtx test_insn
;
1316 int num_clobbers
= 0;
1319 switch (GET_CODE (x
))
1327 case CONSTANT_P_RTX
:
1334 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1335 if (general_operand (x
, GET_MODE (x
)))
1337 else if (GET_MODE (x
) == VOIDmode
)
1340 /* Otherwise, check if we can make a valid insn from it. First initialize
1341 our test insn if we haven't already. */
1345 = make_insn_raw (gen_rtx_SET (VOIDmode
,
1346 gen_rtx_REG (word_mode
,
1347 FIRST_PSEUDO_REGISTER
* 2),
1349 NEXT_INSN (test_insn
) = PREV_INSN (test_insn
) = 0;
1352 /* Now make an insn like the one we would make when GCSE'ing and see if
1354 PUT_MODE (SET_DEST (PATTERN (test_insn
)), GET_MODE (x
));
1355 SET_SRC (PATTERN (test_insn
)) = x
;
1356 return ((icode
= recog (PATTERN (test_insn
), test_insn
, &num_clobbers
)) >= 0
1357 && (num_clobbers
== 0 || ! added_clobbers_hard_reg_p (icode
)));
1360 /* Return nonzero if the operands of expression X are unchanged from the
1361 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1362 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1365 oprs_unchanged_p (x
, insn
, avail_p
)
1376 code
= GET_CODE (x
);
1381 struct reg_avail_info
*info
= ®_avail_info
[REGNO (x
)];
1383 if (info
->last_bb
!= current_bb
)
1386 return info
->last_set
< INSN_CUID (insn
);
1388 return info
->first_set
>= INSN_CUID (insn
);
1392 if (load_killed_in_block_p (current_bb
, INSN_CUID (insn
),
1396 return oprs_unchanged_p (XEXP (x
, 0), insn
, avail_p
);
1422 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1426 /* If we are about to do the last recursive call needed at this
1427 level, change it into iteration. This function is called enough
1430 return oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
);
1432 else if (! oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
))
1435 else if (fmt
[i
] == 'E')
1436 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1437 if (! oprs_unchanged_p (XVECEXP (x
, i
, j
), insn
, avail_p
))
1444 /* Used for communication between mems_conflict_for_gcse_p and
1445 load_killed_in_block_p. Nonzero if mems_conflict_for_gcse_p finds a
1446 conflict between two memory references. */
1447 static int gcse_mems_conflict_p
;
1449 /* Used for communication between mems_conflict_for_gcse_p and
1450 load_killed_in_block_p. A memory reference for a load instruction,
1451 mems_conflict_for_gcse_p will see if a memory store conflicts with
1452 this memory load. */
1453 static rtx gcse_mem_operand
;
1455 /* DEST is the output of an instruction. If it is a memory reference, and
1456 possibly conflicts with the load found in gcse_mem_operand, then set
1457 gcse_mems_conflict_p to a nonzero value. */
1460 mems_conflict_for_gcse_p (dest
, setter
, data
)
1461 rtx dest
, setter ATTRIBUTE_UNUSED
;
1462 void *data ATTRIBUTE_UNUSED
;
1464 while (GET_CODE (dest
) == SUBREG
1465 || GET_CODE (dest
) == ZERO_EXTRACT
1466 || GET_CODE (dest
) == SIGN_EXTRACT
1467 || GET_CODE (dest
) == STRICT_LOW_PART
)
1468 dest
= XEXP (dest
, 0);
1470 /* If DEST is not a MEM, then it will not conflict with the load. Note
1471 that function calls are assumed to clobber memory, but are handled
1473 if (GET_CODE (dest
) != MEM
)
1476 /* If we are setting a MEM in our list of specially recognized MEMs,
1477 don't mark as killed this time. */
1479 if (expr_equiv_p (dest
, gcse_mem_operand
) && pre_ldst_mems
!= NULL
)
1481 if (!find_rtx_in_ldst (dest
))
1482 gcse_mems_conflict_p
= 1;
1486 if (true_dependence (dest
, GET_MODE (dest
), gcse_mem_operand
,
1488 gcse_mems_conflict_p
= 1;
1491 /* Return nonzero if the expression in X (a memory reference) is killed
1492 in block BB before or after the insn with the CUID in UID_LIMIT.
1493 AVAIL_P is nonzero for kills after UID_LIMIT, and zero for kills
1496 To check the entire block, set UID_LIMIT to max_uid + 1 and
1500 load_killed_in_block_p (bb
, uid_limit
, x
, avail_p
)
1506 rtx list_entry
= modify_mem_list
[bb
->index
];
1510 /* Ignore entries in the list that do not apply. */
1512 && INSN_CUID (XEXP (list_entry
, 0)) < uid_limit
)
1514 && INSN_CUID (XEXP (list_entry
, 0)) > uid_limit
))
1516 list_entry
= XEXP (list_entry
, 1);
1520 setter
= XEXP (list_entry
, 0);
1522 /* If SETTER is a call everything is clobbered. Note that calls
1523 to pure functions are never put on the list, so we need not
1524 worry about them. */
1525 if (GET_CODE (setter
) == CALL_INSN
)
1528 /* SETTER must be an INSN of some kind that sets memory. Call
1529 note_stores to examine each hunk of memory that is modified.
1531 The note_stores interface is pretty limited, so we have to
1532 communicate via global variables. Yuk. */
1533 gcse_mem_operand
= x
;
1534 gcse_mems_conflict_p
= 0;
1535 note_stores (PATTERN (setter
), mems_conflict_for_gcse_p
, NULL
);
1536 if (gcse_mems_conflict_p
)
1538 list_entry
= XEXP (list_entry
, 1);
1543 /* Return nonzero if the operands of expression X are unchanged from
1544 the start of INSN's basic block up to but not including INSN. */
1547 oprs_anticipatable_p (x
, insn
)
1550 return oprs_unchanged_p (x
, insn
, 0);
1553 /* Return nonzero if the operands of expression X are unchanged from
1554 INSN to the end of INSN's basic block. */
1557 oprs_available_p (x
, insn
)
1560 return oprs_unchanged_p (x
, insn
, 1);
1563 /* Hash expression X.
1565 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1566 indicating if a volatile operand is found or if the expression contains
1567 something we don't want to insert in the table.
1569 ??? One might want to merge this with canon_hash. Later. */
1572 hash_expr (x
, mode
, do_not_record_p
, hash_table_size
)
1574 enum machine_mode mode
;
1575 int *do_not_record_p
;
1576 int hash_table_size
;
1580 *do_not_record_p
= 0;
1582 hash
= hash_expr_1 (x
, mode
, do_not_record_p
);
1583 return hash
% hash_table_size
;
1586 /* Hash a string. Just add its bytes up. */
1588 static inline unsigned
1593 const unsigned char *p
= (const unsigned char *) ps
;
1602 /* Subroutine of hash_expr to do the actual work. */
1605 hash_expr_1 (x
, mode
, do_not_record_p
)
1607 enum machine_mode mode
;
1608 int *do_not_record_p
;
1615 /* Used to turn recursion into iteration. We can't rely on GCC's
1616 tail-recursion elimination since we need to keep accumulating values
1623 code
= GET_CODE (x
);
1627 hash
+= ((unsigned int) REG
<< 7) + REGNO (x
);
1631 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
1632 + (unsigned int) INTVAL (x
));
1636 /* This is like the general case, except that it only counts
1637 the integers representing the constant. */
1638 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
1639 if (GET_MODE (x
) != VOIDmode
)
1640 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1641 hash
+= (unsigned int) XWINT (x
, i
);
1643 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
1644 + (unsigned int) CONST_DOUBLE_HIGH (x
));
1652 units
= CONST_VECTOR_NUNITS (x
);
1654 for (i
= 0; i
< units
; ++i
)
1656 elt
= CONST_VECTOR_ELT (x
, i
);
1657 hash
+= hash_expr_1 (elt
, GET_MODE (elt
), do_not_record_p
);
1663 /* Assume there is only one rtx object for any given label. */
1665 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1666 differences and differences between each stage's debugging dumps. */
1667 hash
+= (((unsigned int) LABEL_REF
<< 7)
1668 + CODE_LABEL_NUMBER (XEXP (x
, 0)));
1673 /* Don't hash on the symbol's address to avoid bootstrap differences.
1674 Different hash values may cause expressions to be recorded in
1675 different orders and thus different registers to be used in the
1676 final assembler. This also avoids differences in the dump files
1677 between various stages. */
1679 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
1682 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1684 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
1689 if (MEM_VOLATILE_P (x
))
1691 *do_not_record_p
= 1;
1695 hash
+= (unsigned int) MEM
;
1696 /* We used alias set for hashing, but this is not good, since the alias
1697 set may differ in -fprofile-arcs and -fbranch-probabilities compilation
1698 causing the profiles to fail to match. */
1709 case UNSPEC_VOLATILE
:
1710 *do_not_record_p
= 1;
1714 if (MEM_VOLATILE_P (x
))
1716 *do_not_record_p
= 1;
1721 /* We don't want to take the filename and line into account. */
1722 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
)
1723 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x
))
1724 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
))
1725 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x
);
1727 if (ASM_OPERANDS_INPUT_LENGTH (x
))
1729 for (i
= 1; i
< ASM_OPERANDS_INPUT_LENGTH (x
); i
++)
1731 hash
+= (hash_expr_1 (ASM_OPERANDS_INPUT (x
, i
),
1732 GET_MODE (ASM_OPERANDS_INPUT (x
, i
)),
1734 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1738 hash
+= hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x
, 0));
1739 x
= ASM_OPERANDS_INPUT (x
, 0);
1740 mode
= GET_MODE (x
);
1750 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1751 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1755 /* If we are about to do the last recursive call
1756 needed at this level, change it into iteration.
1757 This function is called enough to be worth it. */
1764 hash
+= hash_expr_1 (XEXP (x
, i
), 0, do_not_record_p
);
1765 if (*do_not_record_p
)
1769 else if (fmt
[i
] == 'E')
1770 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1772 hash
+= hash_expr_1 (XVECEXP (x
, i
, j
), 0, do_not_record_p
);
1773 if (*do_not_record_p
)
1777 else if (fmt
[i
] == 's')
1778 hash
+= hash_string_1 (XSTR (x
, i
));
1779 else if (fmt
[i
] == 'i')
1780 hash
+= (unsigned int) XINT (x
, i
);
1788 /* Hash a set of register REGNO.
1790 Sets are hashed on the register that is set. This simplifies the PRE copy
1793 ??? May need to make things more elaborate. Later, as necessary. */
1796 hash_set (regno
, hash_table_size
)
1798 int hash_table_size
;
1803 return hash
% hash_table_size
;
1806 /* Return nonzero if exp1 is equivalent to exp2.
1807 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1820 if (x
== 0 || y
== 0)
1823 code
= GET_CODE (x
);
1824 if (code
!= GET_CODE (y
))
1827 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1828 if (GET_MODE (x
) != GET_MODE (y
))
1838 return INTVAL (x
) == INTVAL (y
);
1841 return XEXP (x
, 0) == XEXP (y
, 0);
1844 return XSTR (x
, 0) == XSTR (y
, 0);
1847 return REGNO (x
) == REGNO (y
);
1850 /* Can't merge two expressions in different alias sets, since we can
1851 decide that the expression is transparent in a block when it isn't,
1852 due to it being set with the different alias set. */
1853 if (MEM_ALIAS_SET (x
) != MEM_ALIAS_SET (y
))
1857 /* For commutative operations, check both orders. */
1865 return ((expr_equiv_p (XEXP (x
, 0), XEXP (y
, 0))
1866 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 1)))
1867 || (expr_equiv_p (XEXP (x
, 0), XEXP (y
, 1))
1868 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 0))));
1871 /* We don't use the generic code below because we want to
1872 disregard filename and line numbers. */
1874 /* A volatile asm isn't equivalent to any other. */
1875 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
1878 if (GET_MODE (x
) != GET_MODE (y
)
1879 || strcmp (ASM_OPERANDS_TEMPLATE (x
), ASM_OPERANDS_TEMPLATE (y
))
1880 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x
),
1881 ASM_OPERANDS_OUTPUT_CONSTRAINT (y
))
1882 || ASM_OPERANDS_OUTPUT_IDX (x
) != ASM_OPERANDS_OUTPUT_IDX (y
)
1883 || ASM_OPERANDS_INPUT_LENGTH (x
) != ASM_OPERANDS_INPUT_LENGTH (y
))
1886 if (ASM_OPERANDS_INPUT_LENGTH (x
))
1888 for (i
= ASM_OPERANDS_INPUT_LENGTH (x
) - 1; i
>= 0; i
--)
1889 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x
, i
),
1890 ASM_OPERANDS_INPUT (y
, i
))
1891 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x
, i
),
1892 ASM_OPERANDS_INPUT_CONSTRAINT (y
, i
)))
1902 /* Compare the elements. If any pair of corresponding elements
1903 fail to match, return 0 for the whole thing. */
1905 fmt
= GET_RTX_FORMAT (code
);
1906 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1911 if (! expr_equiv_p (XEXP (x
, i
), XEXP (y
, i
)))
1916 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1918 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1919 if (! expr_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1924 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1929 if (XINT (x
, i
) != XINT (y
, i
))
1934 if (XWINT (x
, i
) != XWINT (y
, i
))
1949 /* Insert expression X in INSN in the hash TABLE.
1950 If it is already present, record it as the last occurrence in INSN's
1953 MODE is the mode of the value X is being stored into.
1954 It is only used if X is a CONST_INT.
1956 ANTIC_P is nonzero if X is an anticipatable expression.
1957 AVAIL_P is nonzero if X is an available expression. */
1960 insert_expr_in_table (x
, mode
, insn
, antic_p
, avail_p
, table
)
1962 enum machine_mode mode
;
1964 int antic_p
, avail_p
;
1965 struct hash_table
*table
;
1967 int found
, do_not_record_p
;
1969 struct expr
*cur_expr
, *last_expr
= NULL
;
1970 struct occr
*antic_occr
, *avail_occr
;
1971 struct occr
*last_occr
= NULL
;
1973 hash
= hash_expr (x
, mode
, &do_not_record_p
, table
->size
);
1975 /* Do not insert expression in table if it contains volatile operands,
1976 or if hash_expr determines the expression is something we don't want
1977 to or can't handle. */
1978 if (do_not_record_p
)
1981 cur_expr
= table
->table
[hash
];
1984 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1986 /* If the expression isn't found, save a pointer to the end of
1988 last_expr
= cur_expr
;
1989 cur_expr
= cur_expr
->next_same_hash
;
1994 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
1995 bytes_used
+= sizeof (struct expr
);
1996 if (table
->table
[hash
] == NULL
)
1997 /* This is the first pattern that hashed to this index. */
1998 table
->table
[hash
] = cur_expr
;
2000 /* Add EXPR to end of this hash chain. */
2001 last_expr
->next_same_hash
= cur_expr
;
2003 /* Set the fields of the expr element. */
2005 cur_expr
->bitmap_index
= table
->n_elems
++;
2006 cur_expr
->next_same_hash
= NULL
;
2007 cur_expr
->antic_occr
= NULL
;
2008 cur_expr
->avail_occr
= NULL
;
2011 /* Now record the occurrence(s). */
2014 antic_occr
= cur_expr
->antic_occr
;
2016 /* Search for another occurrence in the same basic block. */
2017 while (antic_occr
&& BLOCK_NUM (antic_occr
->insn
) != BLOCK_NUM (insn
))
2019 /* If an occurrence isn't found, save a pointer to the end of
2021 last_occr
= antic_occr
;
2022 antic_occr
= antic_occr
->next
;
2026 /* Found another instance of the expression in the same basic block.
2027 Prefer the currently recorded one. We want the first one in the
2028 block and the block is scanned from start to end. */
2029 ; /* nothing to do */
2032 /* First occurrence of this expression in this basic block. */
2033 antic_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
2034 bytes_used
+= sizeof (struct occr
);
2035 /* First occurrence of this expression in any block? */
2036 if (cur_expr
->antic_occr
== NULL
)
2037 cur_expr
->antic_occr
= antic_occr
;
2039 last_occr
->next
= antic_occr
;
2041 antic_occr
->insn
= insn
;
2042 antic_occr
->next
= NULL
;
2048 avail_occr
= cur_expr
->avail_occr
;
2050 /* Search for another occurrence in the same basic block. */
2051 while (avail_occr
&& BLOCK_NUM (avail_occr
->insn
) != BLOCK_NUM (insn
))
2053 /* If an occurrence isn't found, save a pointer to the end of
2055 last_occr
= avail_occr
;
2056 avail_occr
= avail_occr
->next
;
2060 /* Found another instance of the expression in the same basic block.
2061 Prefer this occurrence to the currently recorded one. We want
2062 the last one in the block and the block is scanned from start
2064 avail_occr
->insn
= insn
;
2067 /* First occurrence of this expression in this basic block. */
2068 avail_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
2069 bytes_used
+= sizeof (struct occr
);
2071 /* First occurrence of this expression in any block? */
2072 if (cur_expr
->avail_occr
== NULL
)
2073 cur_expr
->avail_occr
= avail_occr
;
2075 last_occr
->next
= avail_occr
;
2077 avail_occr
->insn
= insn
;
2078 avail_occr
->next
= NULL
;
2083 /* Insert pattern X in INSN in the hash table.
2084 X is a SET of a reg to either another reg or a constant.
2085 If it is already present, record it as the last occurrence in INSN's
2089 insert_set_in_table (x
, insn
, table
)
2092 struct hash_table
*table
;
2096 struct expr
*cur_expr
, *last_expr
= NULL
;
2097 struct occr
*cur_occr
, *last_occr
= NULL
;
2099 if (GET_CODE (x
) != SET
2100 || GET_CODE (SET_DEST (x
)) != REG
)
2103 hash
= hash_set (REGNO (SET_DEST (x
)), table
->size
);
2105 cur_expr
= table
->table
[hash
];
2108 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
2110 /* If the expression isn't found, save a pointer to the end of
2112 last_expr
= cur_expr
;
2113 cur_expr
= cur_expr
->next_same_hash
;
2118 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
2119 bytes_used
+= sizeof (struct expr
);
2120 if (table
->table
[hash
] == NULL
)
2121 /* This is the first pattern that hashed to this index. */
2122 table
->table
[hash
] = cur_expr
;
2124 /* Add EXPR to end of this hash chain. */
2125 last_expr
->next_same_hash
= cur_expr
;
2127 /* Set the fields of the expr element.
2128 We must copy X because it can be modified when copy propagation is
2129 performed on its operands. */
2130 cur_expr
->expr
= copy_rtx (x
);
2131 cur_expr
->bitmap_index
= table
->n_elems
++;
2132 cur_expr
->next_same_hash
= NULL
;
2133 cur_expr
->antic_occr
= NULL
;
2134 cur_expr
->avail_occr
= NULL
;
2137 /* Now record the occurrence. */
2138 cur_occr
= cur_expr
->avail_occr
;
2140 /* Search for another occurrence in the same basic block. */
2141 while (cur_occr
&& BLOCK_NUM (cur_occr
->insn
) != BLOCK_NUM (insn
))
2143 /* If an occurrence isn't found, save a pointer to the end of
2145 last_occr
= cur_occr
;
2146 cur_occr
= cur_occr
->next
;
2150 /* Found another instance of the expression in the same basic block.
2151 Prefer this occurrence to the currently recorded one. We want the
2152 last one in the block and the block is scanned from start to end. */
2153 cur_occr
->insn
= insn
;
2156 /* First occurrence of this expression in this basic block. */
2157 cur_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
2158 bytes_used
+= sizeof (struct occr
);
2160 /* First occurrence of this expression in any block? */
2161 if (cur_expr
->avail_occr
== NULL
)
2162 cur_expr
->avail_occr
= cur_occr
;
2164 last_occr
->next
= cur_occr
;
2166 cur_occr
->insn
= insn
;
2167 cur_occr
->next
= NULL
;
2171 /* Determine whether the rtx X should be treated as a constant for
2172 the purposes of GCSE's constant propagation. */
2178 /* Consider a COMPARE of two integers constant. */
2179 if (GET_CODE (x
) == COMPARE
2180 && GET_CODE (XEXP (x
, 0)) == CONST_INT
2181 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
2184 if (GET_CODE (x
) == CONSTANT_P_RTX
)
2187 return CONSTANT_P (x
);
2190 /* Scan pattern PAT of INSN and add an entry to the hash TABLE (set or
2194 hash_scan_set (pat
, insn
, table
)
2196 struct hash_table
*table
;
2198 rtx src
= SET_SRC (pat
);
2199 rtx dest
= SET_DEST (pat
);
2202 if (GET_CODE (src
) == CALL
)
2203 hash_scan_call (src
, insn
, table
);
2205 else if (GET_CODE (dest
) == REG
)
2207 unsigned int regno
= REGNO (dest
);
2210 /* If this is a single set and we are doing constant propagation,
2211 see if a REG_NOTE shows this equivalent to a constant. */
2212 if (table
->set_p
&& (note
= find_reg_equal_equiv_note (insn
)) != 0
2213 && gcse_constant_p (XEXP (note
, 0)))
2214 src
= XEXP (note
, 0), pat
= gen_rtx_SET (VOIDmode
, dest
, src
);
2216 /* Only record sets of pseudo-regs in the hash table. */
2218 && regno
>= FIRST_PSEUDO_REGISTER
2219 /* Don't GCSE something if we can't do a reg/reg copy. */
2220 && can_copy_p (GET_MODE (dest
))
2221 /* GCSE commonly inserts instruction after the insn. We can't
2222 do that easily for EH_REGION notes so disable GCSE on these
2224 && !find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2225 /* Is SET_SRC something we want to gcse? */
2226 && want_to_gcse_p (src
)
2227 /* Don't CSE a nop. */
2228 && ! set_noop_p (pat
)
2229 /* Don't GCSE if it has attached REG_EQUIV note.
2230 At this point this only function parameters should have
2231 REG_EQUIV notes and if the argument slot is used somewhere
2232 explicitly, it means address of parameter has been taken,
2233 so we should not extend the lifetime of the pseudo. */
2234 && ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) == 0
2235 || GET_CODE (XEXP (note
, 0)) != MEM
))
2237 /* An expression is not anticipatable if its operands are
2238 modified before this insn or if this is not the only SET in
2240 int antic_p
= oprs_anticipatable_p (src
, insn
) && single_set (insn
);
2241 /* An expression is not available if its operands are
2242 subsequently modified, including this insn. It's also not
2243 available if this is a branch, because we can't insert
2244 a set after the branch. */
2245 int avail_p
= (oprs_available_p (src
, insn
)
2246 && ! JUMP_P (insn
));
2248 insert_expr_in_table (src
, GET_MODE (dest
), insn
, antic_p
, avail_p
, table
);
2251 /* Record sets for constant/copy propagation. */
2252 else if (table
->set_p
2253 && regno
>= FIRST_PSEUDO_REGISTER
2254 && ((GET_CODE (src
) == REG
2255 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
2256 && can_copy_p (GET_MODE (dest
))
2257 && REGNO (src
) != regno
)
2258 || gcse_constant_p (src
))
2259 /* A copy is not available if its src or dest is subsequently
2260 modified. Here we want to search from INSN+1 on, but
2261 oprs_available_p searches from INSN on. */
2262 && (insn
== BLOCK_END (BLOCK_NUM (insn
))
2263 || ((tmp
= next_nonnote_insn (insn
)) != NULL_RTX
2264 && oprs_available_p (pat
, tmp
))))
2265 insert_set_in_table (pat
, insn
, table
);
2270 hash_scan_clobber (x
, insn
, table
)
2271 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
2272 struct hash_table
*table ATTRIBUTE_UNUSED
;
2274 /* Currently nothing to do. */
2278 hash_scan_call (x
, insn
, table
)
2279 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
2280 struct hash_table
*table ATTRIBUTE_UNUSED
;
2282 /* Currently nothing to do. */
2285 /* Process INSN and add hash table entries as appropriate.
2287 Only available expressions that set a single pseudo-reg are recorded.
2289 Single sets in a PARALLEL could be handled, but it's an extra complication
2290 that isn't dealt with right now. The trick is handling the CLOBBERs that
2291 are also in the PARALLEL. Later.
2293 If SET_P is nonzero, this is for the assignment hash table,
2294 otherwise it is for the expression hash table.
2295 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2296 not record any expressions. */
2299 hash_scan_insn (insn
, table
, in_libcall_block
)
2301 struct hash_table
*table
;
2302 int in_libcall_block
;
2304 rtx pat
= PATTERN (insn
);
2307 if (in_libcall_block
)
2310 /* Pick out the sets of INSN and for other forms of instructions record
2311 what's been modified. */
2313 if (GET_CODE (pat
) == SET
)
2314 hash_scan_set (pat
, insn
, table
);
2315 else if (GET_CODE (pat
) == PARALLEL
)
2316 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2318 rtx x
= XVECEXP (pat
, 0, i
);
2320 if (GET_CODE (x
) == SET
)
2321 hash_scan_set (x
, insn
, table
);
2322 else if (GET_CODE (x
) == CLOBBER
)
2323 hash_scan_clobber (x
, insn
, table
);
2324 else if (GET_CODE (x
) == CALL
)
2325 hash_scan_call (x
, insn
, table
);
2328 else if (GET_CODE (pat
) == CLOBBER
)
2329 hash_scan_clobber (pat
, insn
, table
);
2330 else if (GET_CODE (pat
) == CALL
)
2331 hash_scan_call (pat
, insn
, table
);
2335 dump_hash_table (file
, name
, table
)
2338 struct hash_table
*table
;
2341 /* Flattened out table, so it's printed in proper order. */
2342 struct expr
**flat_table
;
2343 unsigned int *hash_val
;
2347 = (struct expr
**) xcalloc (table
->n_elems
, sizeof (struct expr
*));
2348 hash_val
= (unsigned int *) xmalloc (table
->n_elems
* sizeof (unsigned int));
2350 for (i
= 0; i
< (int) table
->size
; i
++)
2351 for (expr
= table
->table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
2353 flat_table
[expr
->bitmap_index
] = expr
;
2354 hash_val
[expr
->bitmap_index
] = i
;
2357 fprintf (file
, "%s hash table (%d buckets, %d entries)\n",
2358 name
, table
->size
, table
->n_elems
);
2360 for (i
= 0; i
< (int) table
->n_elems
; i
++)
2361 if (flat_table
[i
] != 0)
2363 expr
= flat_table
[i
];
2364 fprintf (file
, "Index %d (hash value %d)\n ",
2365 expr
->bitmap_index
, hash_val
[i
]);
2366 print_rtl (file
, expr
->expr
);
2367 fprintf (file
, "\n");
2370 fprintf (file
, "\n");
2376 /* Record register first/last/block set information for REGNO in INSN.
2378 first_set records the first place in the block where the register
2379 is set and is used to compute "anticipatability".
2381 last_set records the last place in the block where the register
2382 is set and is used to compute "availability".
2384 last_bb records the block for which first_set and last_set are
2385 valid, as a quick test to invalidate them.
2387 reg_set_in_block records whether the register is set in the block
2388 and is used to compute "transparency". */
2391 record_last_reg_set_info (insn
, regno
)
2395 struct reg_avail_info
*info
= ®_avail_info
[regno
];
2396 int cuid
= INSN_CUID (insn
);
2398 info
->last_set
= cuid
;
2399 if (info
->last_bb
!= current_bb
)
2401 info
->last_bb
= current_bb
;
2402 info
->first_set
= cuid
;
2403 SET_BIT (reg_set_in_block
[current_bb
->index
], regno
);
2408 /* Record all of the canonicalized MEMs of record_last_mem_set_info's insn.
2409 Note we store a pair of elements in the list, so they have to be
2410 taken off pairwise. */
2413 canon_list_insert (dest
, unused1
, v_insn
)
2414 rtx dest ATTRIBUTE_UNUSED
;
2415 rtx unused1 ATTRIBUTE_UNUSED
;
2418 rtx dest_addr
, insn
;
2421 while (GET_CODE (dest
) == SUBREG
2422 || GET_CODE (dest
) == ZERO_EXTRACT
2423 || GET_CODE (dest
) == SIGN_EXTRACT
2424 || GET_CODE (dest
) == STRICT_LOW_PART
)
2425 dest
= XEXP (dest
, 0);
2427 /* If DEST is not a MEM, then it will not conflict with a load. Note
2428 that function calls are assumed to clobber memory, but are handled
2431 if (GET_CODE (dest
) != MEM
)
2434 dest_addr
= get_addr (XEXP (dest
, 0));
2435 dest_addr
= canon_rtx (dest_addr
);
2436 insn
= (rtx
) v_insn
;
2437 bb
= BLOCK_NUM (insn
);
2439 canon_modify_mem_list
[bb
] =
2440 alloc_EXPR_LIST (VOIDmode
, dest_addr
, canon_modify_mem_list
[bb
]);
2441 canon_modify_mem_list
[bb
] =
2442 alloc_EXPR_LIST (VOIDmode
, dest
, canon_modify_mem_list
[bb
]);
2443 bitmap_set_bit (canon_modify_mem_list_set
, bb
);
2446 /* Record memory modification information for INSN. We do not actually care
2447 about the memory location(s) that are set, or even how they are set (consider
2448 a CALL_INSN). We merely need to record which insns modify memory. */
2451 record_last_mem_set_info (insn
)
2454 int bb
= BLOCK_NUM (insn
);
2456 /* load_killed_in_block_p will handle the case of calls clobbering
2458 modify_mem_list
[bb
] = alloc_INSN_LIST (insn
, modify_mem_list
[bb
]);
2459 bitmap_set_bit (modify_mem_list_set
, bb
);
2461 if (GET_CODE (insn
) == CALL_INSN
)
2463 /* Note that traversals of this loop (other than for free-ing)
2464 will break after encountering a CALL_INSN. So, there's no
2465 need to insert a pair of items, as canon_list_insert does. */
2466 canon_modify_mem_list
[bb
] =
2467 alloc_INSN_LIST (insn
, canon_modify_mem_list
[bb
]);
2468 bitmap_set_bit (canon_modify_mem_list_set
, bb
);
2471 note_stores (PATTERN (insn
), canon_list_insert
, (void*) insn
);
2474 /* Called from compute_hash_table via note_stores to handle one
2475 SET or CLOBBER in an insn. DATA is really the instruction in which
2476 the SET is taking place. */
2479 record_last_set_info (dest
, setter
, data
)
2480 rtx dest
, setter ATTRIBUTE_UNUSED
;
2483 rtx last_set_insn
= (rtx
) data
;
2485 if (GET_CODE (dest
) == SUBREG
)
2486 dest
= SUBREG_REG (dest
);
2488 if (GET_CODE (dest
) == REG
)
2489 record_last_reg_set_info (last_set_insn
, REGNO (dest
));
2490 else if (GET_CODE (dest
) == MEM
2491 /* Ignore pushes, they clobber nothing. */
2492 && ! push_operand (dest
, GET_MODE (dest
)))
2493 record_last_mem_set_info (last_set_insn
);
2496 /* Top level function to create an expression or assignment hash table.
2498 Expression entries are placed in the hash table if
2499 - they are of the form (set (pseudo-reg) src),
2500 - src is something we want to perform GCSE on,
2501 - none of the operands are subsequently modified in the block
2503 Assignment entries are placed in the hash table if
2504 - they are of the form (set (pseudo-reg) src),
2505 - src is something we want to perform const/copy propagation on,
2506 - none of the operands or target are subsequently modified in the block
2508 Currently src must be a pseudo-reg or a const_int.
2510 TABLE is the table computed. */
2513 compute_hash_table_work (table
)
2514 struct hash_table
*table
;
2518 /* While we compute the hash table we also compute a bit array of which
2519 registers are set in which blocks.
2520 ??? This isn't needed during const/copy propagation, but it's cheap to
2522 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
2524 /* re-Cache any INSN_LIST nodes we have allocated. */
2525 clear_modify_mem_tables ();
2526 /* Some working arrays used to track first and last set in each block. */
2527 reg_avail_info
= (struct reg_avail_info
*)
2528 gmalloc (max_gcse_regno
* sizeof (struct reg_avail_info
));
2530 for (i
= 0; i
< max_gcse_regno
; ++i
)
2531 reg_avail_info
[i
].last_bb
= NULL
;
2533 FOR_EACH_BB (current_bb
)
2537 int in_libcall_block
;
2539 /* First pass over the instructions records information used to
2540 determine when registers and memory are first and last set.
2541 ??? hard-reg reg_set_in_block computation
2542 could be moved to compute_sets since they currently don't change. */
2544 for (insn
= current_bb
->head
;
2545 insn
&& insn
!= NEXT_INSN (current_bb
->end
);
2546 insn
= NEXT_INSN (insn
))
2548 if (! INSN_P (insn
))
2551 if (GET_CODE (insn
) == CALL_INSN
)
2553 bool clobbers_all
= false;
2554 #ifdef NON_SAVING_SETJMP
2555 if (NON_SAVING_SETJMP
2556 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
2557 clobbers_all
= true;
2560 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2562 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2563 record_last_reg_set_info (insn
, regno
);
2568 note_stores (PATTERN (insn
), record_last_set_info
, insn
);
2571 /* Insert implicit sets in the hash table. */
2573 && implicit_sets
[current_bb
->index
] != NULL_RTX
)
2574 hash_scan_set (implicit_sets
[current_bb
->index
],
2575 current_bb
->head
, table
);
2577 /* The next pass builds the hash table. */
2579 for (insn
= current_bb
->head
, in_libcall_block
= 0;
2580 insn
&& insn
!= NEXT_INSN (current_bb
->end
);
2581 insn
= NEXT_INSN (insn
))
2584 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2585 in_libcall_block
= 1;
2586 else if (table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2587 in_libcall_block
= 0;
2588 hash_scan_insn (insn
, table
, in_libcall_block
);
2589 if (!table
->set_p
&& find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2590 in_libcall_block
= 0;
2594 free (reg_avail_info
);
2595 reg_avail_info
= NULL
;
2598 /* Allocate space for the set/expr hash TABLE.
2599 N_INSNS is the number of instructions in the function.
2600 It is used to determine the number of buckets to use.
2601 SET_P determines whether set or expression table will
2605 alloc_hash_table (n_insns
, table
, set_p
)
2607 struct hash_table
*table
;
2612 table
->size
= n_insns
/ 4;
2613 if (table
->size
< 11)
2616 /* Attempt to maintain efficient use of hash table.
2617 Making it an odd number is simplest for now.
2618 ??? Later take some measurements. */
2620 n
= table
->size
* sizeof (struct expr
*);
2621 table
->table
= (struct expr
**) gmalloc (n
);
2622 table
->set_p
= set_p
;
2625 /* Free things allocated by alloc_hash_table. */
2628 free_hash_table (table
)
2629 struct hash_table
*table
;
2631 free (table
->table
);
2634 /* Compute the hash TABLE for doing copy/const propagation or
2635 expression hash table. */
2638 compute_hash_table (table
)
2639 struct hash_table
*table
;
2641 /* Initialize count of number of entries in hash table. */
2643 memset ((char *) table
->table
, 0,
2644 table
->size
* sizeof (struct expr
*));
2646 compute_hash_table_work (table
);
2649 /* Expression tracking support. */
2651 /* Lookup pattern PAT in the expression TABLE.
2652 The result is a pointer to the table entry, or NULL if not found. */
2654 static struct expr
*
2655 lookup_expr (pat
, table
)
2657 struct hash_table
*table
;
2659 int do_not_record_p
;
2660 unsigned int hash
= hash_expr (pat
, GET_MODE (pat
), &do_not_record_p
,
2664 if (do_not_record_p
)
2667 expr
= table
->table
[hash
];
2669 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2670 expr
= expr
->next_same_hash
;
2675 /* Lookup REGNO in the set TABLE. The result is a pointer to the
2676 table entry, or NULL if not found. */
2678 static struct expr
*
2679 lookup_set (regno
, table
)
2681 struct hash_table
*table
;
2683 unsigned int hash
= hash_set (regno
, table
->size
);
2686 expr
= table
->table
[hash
];
2688 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
)
2689 expr
= expr
->next_same_hash
;
2694 /* Return the next entry for REGNO in list EXPR. */
2696 static struct expr
*
2697 next_set (regno
, expr
)
2702 expr
= expr
->next_same_hash
;
2703 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
);
2708 /* Like free_INSN_LIST_list or free_EXPR_LIST_list, except that the node
2709 types may be mixed. */
2712 free_insn_expr_list_list (listp
)
2717 for (list
= *listp
; list
; list
= next
)
2719 next
= XEXP (list
, 1);
2720 if (GET_CODE (list
) == EXPR_LIST
)
2721 free_EXPR_LIST_node (list
);
2723 free_INSN_LIST_node (list
);
2729 /* Clear canon_modify_mem_list and modify_mem_list tables. */
2731 clear_modify_mem_tables ()
2735 EXECUTE_IF_SET_IN_BITMAP
2736 (modify_mem_list_set
, 0, i
, free_INSN_LIST_list (modify_mem_list
+ i
));
2737 bitmap_clear (modify_mem_list_set
);
2739 EXECUTE_IF_SET_IN_BITMAP
2740 (canon_modify_mem_list_set
, 0, i
,
2741 free_insn_expr_list_list (canon_modify_mem_list
+ i
));
2742 bitmap_clear (canon_modify_mem_list_set
);
2745 /* Release memory used by modify_mem_list_set and canon_modify_mem_list_set. */
2748 free_modify_mem_tables ()
2750 clear_modify_mem_tables ();
2751 free (modify_mem_list
);
2752 free (canon_modify_mem_list
);
2753 modify_mem_list
= 0;
2754 canon_modify_mem_list
= 0;
2757 /* Reset tables used to keep track of what's still available [since the
2758 start of the block]. */
2761 reset_opr_set_tables ()
2763 /* Maintain a bitmap of which regs have been set since beginning of
2765 CLEAR_REG_SET (reg_set_bitmap
);
2767 /* Also keep a record of the last instruction to modify memory.
2768 For now this is very trivial, we only record whether any memory
2769 location has been modified. */
2770 clear_modify_mem_tables ();
2773 /* Return nonzero if the operands of X are not set before INSN in
2774 INSN's basic block. */
2777 oprs_not_set_p (x
, insn
)
2787 code
= GET_CODE (x
);
2803 if (load_killed_in_block_p (BLOCK_FOR_INSN (insn
),
2804 INSN_CUID (insn
), x
, 0))
2807 return oprs_not_set_p (XEXP (x
, 0), insn
);
2810 return ! REGNO_REG_SET_P (reg_set_bitmap
, REGNO (x
));
2816 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2820 /* If we are about to do the last recursive call
2821 needed at this level, change it into iteration.
2822 This function is called enough to be worth it. */
2824 return oprs_not_set_p (XEXP (x
, i
), insn
);
2826 if (! oprs_not_set_p (XEXP (x
, i
), insn
))
2829 else if (fmt
[i
] == 'E')
2830 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2831 if (! oprs_not_set_p (XVECEXP (x
, i
, j
), insn
))
2838 /* Mark things set by a CALL. */
2844 if (! CONST_OR_PURE_CALL_P (insn
))
2845 record_last_mem_set_info (insn
);
2848 /* Mark things set by a SET. */
2851 mark_set (pat
, insn
)
2854 rtx dest
= SET_DEST (pat
);
2856 while (GET_CODE (dest
) == SUBREG
2857 || GET_CODE (dest
) == ZERO_EXTRACT
2858 || GET_CODE (dest
) == SIGN_EXTRACT
2859 || GET_CODE (dest
) == STRICT_LOW_PART
)
2860 dest
= XEXP (dest
, 0);
2862 if (GET_CODE (dest
) == REG
)
2863 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (dest
));
2864 else if (GET_CODE (dest
) == MEM
)
2865 record_last_mem_set_info (insn
);
2867 if (GET_CODE (SET_SRC (pat
)) == CALL
)
2871 /* Record things set by a CLOBBER. */
2874 mark_clobber (pat
, insn
)
2877 rtx clob
= XEXP (pat
, 0);
2879 while (GET_CODE (clob
) == SUBREG
|| GET_CODE (clob
) == STRICT_LOW_PART
)
2880 clob
= XEXP (clob
, 0);
2882 if (GET_CODE (clob
) == REG
)
2883 SET_REGNO_REG_SET (reg_set_bitmap
, REGNO (clob
));
2885 record_last_mem_set_info (insn
);
2888 /* Record things set by INSN.
2889 This data is used by oprs_not_set_p. */
2892 mark_oprs_set (insn
)
2895 rtx pat
= PATTERN (insn
);
2898 if (GET_CODE (pat
) == SET
)
2899 mark_set (pat
, insn
);
2900 else if (GET_CODE (pat
) == PARALLEL
)
2901 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2903 rtx x
= XVECEXP (pat
, 0, i
);
2905 if (GET_CODE (x
) == SET
)
2907 else if (GET_CODE (x
) == CLOBBER
)
2908 mark_clobber (x
, insn
);
2909 else if (GET_CODE (x
) == CALL
)
2913 else if (GET_CODE (pat
) == CLOBBER
)
2914 mark_clobber (pat
, insn
);
2915 else if (GET_CODE (pat
) == CALL
)
2920 /* Classic GCSE reaching definition support. */
2922 /* Allocate reaching def variables. */
2925 alloc_rd_mem (n_blocks
, n_insns
)
2926 int n_blocks
, n_insns
;
2928 rd_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2929 sbitmap_vector_zero (rd_kill
, n_blocks
);
2931 rd_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2932 sbitmap_vector_zero (rd_gen
, n_blocks
);
2934 reaching_defs
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2935 sbitmap_vector_zero (reaching_defs
, n_blocks
);
2937 rd_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2938 sbitmap_vector_zero (rd_out
, n_blocks
);
2941 /* Free reaching def variables. */
2946 sbitmap_vector_free (rd_kill
);
2947 sbitmap_vector_free (rd_gen
);
2948 sbitmap_vector_free (reaching_defs
);
2949 sbitmap_vector_free (rd_out
);
2952 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2955 handle_rd_kill_set (insn
, regno
, bb
)
2960 struct reg_set
*this_reg
;
2962 for (this_reg
= reg_set_table
[regno
]; this_reg
; this_reg
= this_reg
->next
)
2963 if (BLOCK_NUM (this_reg
->insn
) != BLOCK_NUM (insn
))
2964 SET_BIT (rd_kill
[bb
->index
], INSN_CUID (this_reg
->insn
));
2967 /* Compute the set of kill's for reaching definitions. */
2978 For each set bit in `gen' of the block (i.e each insn which
2979 generates a definition in the block)
2980 Call the reg set by the insn corresponding to that bit regx
2981 Look at the linked list starting at reg_set_table[regx]
2982 For each setting of regx in the linked list, which is not in
2984 Set the bit in `kill' corresponding to that insn. */
2986 for (cuid
= 0; cuid
< max_cuid
; cuid
++)
2987 if (TEST_BIT (rd_gen
[bb
->index
], cuid
))
2989 rtx insn
= CUID_INSN (cuid
);
2990 rtx pat
= PATTERN (insn
);
2992 if (GET_CODE (insn
) == CALL_INSN
)
2994 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2995 if (TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
2996 handle_rd_kill_set (insn
, regno
, bb
);
2999 if (GET_CODE (pat
) == PARALLEL
)
3001 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
3003 enum rtx_code code
= GET_CODE (XVECEXP (pat
, 0, i
));
3005 if ((code
== SET
|| code
== CLOBBER
)
3006 && GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0)) == REG
)
3007 handle_rd_kill_set (insn
,
3008 REGNO (XEXP (XVECEXP (pat
, 0, i
), 0)),
3012 else if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == REG
)
3013 /* Each setting of this register outside of this block
3014 must be marked in the set of kills in this block. */
3015 handle_rd_kill_set (insn
, REGNO (SET_DEST (pat
)), bb
);
3019 /* Compute the reaching definitions as in
3020 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
3021 Chapter 10. It is the same algorithm as used for computing available
3022 expressions but applied to the gens and kills of reaching definitions. */
3027 int changed
, passes
;
3031 sbitmap_copy (rd_out
[bb
->index
] /*dst*/, rd_gen
[bb
->index
] /*src*/);
3040 sbitmap_union_of_preds (reaching_defs
[bb
->index
], rd_out
, bb
->index
);
3041 changed
|= sbitmap_union_of_diff_cg (rd_out
[bb
->index
], rd_gen
[bb
->index
],
3042 reaching_defs
[bb
->index
], rd_kill
[bb
->index
]);
3048 fprintf (gcse_file
, "reaching def computation: %d passes\n", passes
);
3051 /* Classic GCSE available expression support. */
3053 /* Allocate memory for available expression computation. */
3056 alloc_avail_expr_mem (n_blocks
, n_exprs
)
3057 int n_blocks
, n_exprs
;
3059 ae_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
3060 sbitmap_vector_zero (ae_kill
, n_blocks
);
3062 ae_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
3063 sbitmap_vector_zero (ae_gen
, n_blocks
);
3065 ae_in
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
3066 sbitmap_vector_zero (ae_in
, n_blocks
);
3068 ae_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
3069 sbitmap_vector_zero (ae_out
, n_blocks
);
3073 free_avail_expr_mem ()
3075 sbitmap_vector_free (ae_kill
);
3076 sbitmap_vector_free (ae_gen
);
3077 sbitmap_vector_free (ae_in
);
3078 sbitmap_vector_free (ae_out
);
3081 /* Compute the set of available expressions generated in each basic block. */
3084 compute_ae_gen (expr_hash_table
)
3085 struct hash_table
*expr_hash_table
;
3091 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
3092 This is all we have to do because an expression is not recorded if it
3093 is not available, and the only expressions we want to work with are the
3094 ones that are recorded. */
3095 for (i
= 0; i
< expr_hash_table
->size
; i
++)
3096 for (expr
= expr_hash_table
->table
[i
]; expr
!= 0; expr
= expr
->next_same_hash
)
3097 for (occr
= expr
->avail_occr
; occr
!= 0; occr
= occr
->next
)
3098 SET_BIT (ae_gen
[BLOCK_NUM (occr
->insn
)], expr
->bitmap_index
);
3101 /* Return nonzero if expression X is killed in BB. */
3104 expr_killed_p (x
, bb
)
3115 code
= GET_CODE (x
);
3119 return TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
));
3122 if (load_killed_in_block_p (bb
, get_max_uid () + 1, x
, 0))
3125 return expr_killed_p (XEXP (x
, 0), bb
);
3143 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3147 /* If we are about to do the last recursive call
3148 needed at this level, change it into iteration.
3149 This function is called enough to be worth it. */
3151 return expr_killed_p (XEXP (x
, i
), bb
);
3152 else if (expr_killed_p (XEXP (x
, i
), bb
))
3155 else if (fmt
[i
] == 'E')
3156 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3157 if (expr_killed_p (XVECEXP (x
, i
, j
), bb
))
3164 /* Compute the set of available expressions killed in each basic block. */
3167 compute_ae_kill (ae_gen
, ae_kill
, expr_hash_table
)
3168 sbitmap
*ae_gen
, *ae_kill
;
3169 struct hash_table
*expr_hash_table
;
3176 for (i
= 0; i
< expr_hash_table
->size
; i
++)
3177 for (expr
= expr_hash_table
->table
[i
]; expr
; expr
= expr
->next_same_hash
)
3179 /* Skip EXPR if generated in this block. */
3180 if (TEST_BIT (ae_gen
[bb
->index
], expr
->bitmap_index
))
3183 if (expr_killed_p (expr
->expr
, bb
))
3184 SET_BIT (ae_kill
[bb
->index
], expr
->bitmap_index
);
3188 /* Actually perform the Classic GCSE optimizations. */
3190 /* Return nonzero if occurrence OCCR of expression EXPR reaches block BB.
3192 CHECK_SELF_LOOP is nonzero if we should consider a block reaching itself
3193 as a positive reach. We want to do this when there are two computations
3194 of the expression in the block.
3196 VISITED is a pointer to a working buffer for tracking which BB's have
3197 been visited. It is NULL for the top-level call.
3199 We treat reaching expressions that go through blocks containing the same
3200 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
3201 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
3202 2 as not reaching. The intent is to improve the probability of finding
3203 only one reaching expression and to reduce register lifetimes by picking
3204 the closest such expression. */
3207 expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
)
3211 int check_self_loop
;
3216 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
3218 basic_block pred_bb
= pred
->src
;
3220 if (visited
[pred_bb
->index
])
3221 /* This predecessor has already been visited. Nothing to do. */
3223 else if (pred_bb
== bb
)
3225 /* BB loops on itself. */
3227 && TEST_BIT (ae_gen
[pred_bb
->index
], expr
->bitmap_index
)
3228 && BLOCK_NUM (occr
->insn
) == pred_bb
->index
)
3231 visited
[pred_bb
->index
] = 1;
3234 /* Ignore this predecessor if it kills the expression. */
3235 else if (TEST_BIT (ae_kill
[pred_bb
->index
], expr
->bitmap_index
))
3236 visited
[pred_bb
->index
] = 1;
3238 /* Does this predecessor generate this expression? */
3239 else if (TEST_BIT (ae_gen
[pred_bb
->index
], expr
->bitmap_index
))
3241 /* Is this the occurrence we're looking for?
3242 Note that there's only one generating occurrence per block
3243 so we just need to check the block number. */
3244 if (BLOCK_NUM (occr
->insn
) == pred_bb
->index
)
3247 visited
[pred_bb
->index
] = 1;
3250 /* Neither gen nor kill. */
3253 visited
[pred_bb
->index
] = 1;
3254 if (expr_reaches_here_p_work (occr
, expr
, pred_bb
, check_self_loop
,
3261 /* All paths have been checked. */
3265 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
3266 memory allocated for that function is returned. */
3269 expr_reaches_here_p (occr
, expr
, bb
, check_self_loop
)
3273 int check_self_loop
;
3276 char *visited
= (char *) xcalloc (last_basic_block
, 1);
3278 rval
= expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
);
3284 /* Return the instruction that computes EXPR that reaches INSN's basic block.
3285 If there is more than one such instruction, return NULL.
3287 Called only by handle_avail_expr. */
3290 computing_insn (expr
, insn
)
3294 basic_block bb
= BLOCK_FOR_INSN (insn
);
3296 if (expr
->avail_occr
->next
== NULL
)
3298 if (BLOCK_FOR_INSN (expr
->avail_occr
->insn
) == bb
)
3299 /* The available expression is actually itself
3300 (i.e. a loop in the flow graph) so do nothing. */
3303 /* (FIXME) Case that we found a pattern that was created by
3304 a substitution that took place. */
3305 return expr
->avail_occr
->insn
;
3309 /* Pattern is computed more than once.
3310 Search backwards from this insn to see how many of these
3311 computations actually reach this insn. */
3313 rtx insn_computes_expr
= NULL
;
3316 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
3318 if (BLOCK_FOR_INSN (occr
->insn
) == bb
)
3320 /* The expression is generated in this block.
3321 The only time we care about this is when the expression
3322 is generated later in the block [and thus there's a loop].
3323 We let the normal cse pass handle the other cases. */
3324 if (INSN_CUID (insn
) < INSN_CUID (occr
->insn
)
3325 && expr_reaches_here_p (occr
, expr
, bb
, 1))
3331 insn_computes_expr
= occr
->insn
;
3334 else if (expr_reaches_here_p (occr
, expr
, bb
, 0))
3340 insn_computes_expr
= occr
->insn
;
3344 if (insn_computes_expr
== NULL
)
3347 return insn_computes_expr
;
3351 /* Return nonzero if the definition in DEF_INSN can reach INSN.
3352 Only called by can_disregard_other_sets. */
3355 def_reaches_here_p (insn
, def_insn
)
3360 if (TEST_BIT (reaching_defs
[BLOCK_NUM (insn
)], INSN_CUID (def_insn
)))
3363 if (BLOCK_NUM (insn
) == BLOCK_NUM (def_insn
))
3365 if (INSN_CUID (def_insn
) < INSN_CUID (insn
))
3367 if (GET_CODE (PATTERN (def_insn
)) == PARALLEL
)
3369 else if (GET_CODE (PATTERN (def_insn
)) == CLOBBER
)
3370 reg
= XEXP (PATTERN (def_insn
), 0);
3371 else if (GET_CODE (PATTERN (def_insn
)) == SET
)
3372 reg
= SET_DEST (PATTERN (def_insn
));
3376 return ! reg_set_between_p (reg
, NEXT_INSN (def_insn
), insn
);
3385 /* Return nonzero if *ADDR_THIS_REG can only have one value at INSN. The
3386 value returned is the number of definitions that reach INSN. Returning a
3387 value of zero means that [maybe] more than one definition reaches INSN and
3388 the caller can't perform whatever optimization it is trying. i.e. it is
3389 always safe to return zero. */
3392 can_disregard_other_sets (addr_this_reg
, insn
, for_combine
)
3393 struct reg_set
**addr_this_reg
;
3397 int number_of_reaching_defs
= 0;
3398 struct reg_set
*this_reg
;
3400 for (this_reg
= *addr_this_reg
; this_reg
!= 0; this_reg
= this_reg
->next
)
3401 if (def_reaches_here_p (insn
, this_reg
->insn
))
3403 number_of_reaching_defs
++;
3404 /* Ignore parallels for now. */
3405 if (GET_CODE (PATTERN (this_reg
->insn
)) == PARALLEL
)
3409 && (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
3410 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3411 SET_SRC (PATTERN (insn
)))))
3412 /* A setting of the reg to a different value reaches INSN. */
3415 if (number_of_reaching_defs
> 1)
3417 /* If in this setting the value the register is being set to is
3418 equal to the previous value the register was set to and this
3419 setting reaches the insn we are trying to do the substitution
3420 on then we are ok. */
3421 if (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
)
3423 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3424 SET_SRC (PATTERN (insn
))))
3428 *addr_this_reg
= this_reg
;
3431 return number_of_reaching_defs
;
3434 /* Expression computed by insn is available and the substitution is legal,
3435 so try to perform the substitution.
3437 The result is nonzero if any changes were made. */
3440 handle_avail_expr (insn
, expr
)
3444 rtx pat
, insn_computes_expr
, expr_set
;
3446 struct reg_set
*this_reg
;
3447 int found_setting
, use_src
;
3450 /* We only handle the case where one computation of the expression
3451 reaches this instruction. */
3452 insn_computes_expr
= computing_insn (expr
, insn
);
3453 if (insn_computes_expr
== NULL
)
3455 expr_set
= single_set (insn_computes_expr
);
3462 /* At this point we know only one computation of EXPR outside of this
3463 block reaches this insn. Now try to find a register that the
3464 expression is computed into. */
3465 if (GET_CODE (SET_SRC (expr_set
)) == REG
)
3467 /* This is the case when the available expression that reaches
3468 here has already been handled as an available expression. */
3469 unsigned int regnum_for_replacing
3470 = REGNO (SET_SRC (expr_set
));
3472 /* If the register was created by GCSE we can't use `reg_set_table',
3473 however we know it's set only once. */
3474 if (regnum_for_replacing
>= max_gcse_regno
3475 /* If the register the expression is computed into is set only once,
3476 or only one set reaches this insn, we can use it. */
3477 || (((this_reg
= reg_set_table
[regnum_for_replacing
]),
3478 this_reg
->next
== NULL
)
3479 || can_disregard_other_sets (&this_reg
, insn
, 0)))
3488 unsigned int regnum_for_replacing
3489 = REGNO (SET_DEST (expr_set
));
3491 /* This shouldn't happen. */
3492 if (regnum_for_replacing
>= max_gcse_regno
)
3495 this_reg
= reg_set_table
[regnum_for_replacing
];
3497 /* If the register the expression is computed into is set only once,
3498 or only one set reaches this insn, use it. */
3499 if (this_reg
->next
== NULL
3500 || can_disregard_other_sets (&this_reg
, insn
, 0))
3506 pat
= PATTERN (insn
);
3508 to
= SET_SRC (expr_set
);
3510 to
= SET_DEST (expr_set
);
3511 changed
= validate_change (insn
, &SET_SRC (pat
), to
, 0);
3513 /* We should be able to ignore the return code from validate_change but
3514 to play it safe we check. */
3518 if (gcse_file
!= NULL
)
3520 fprintf (gcse_file
, "GCSE: Replacing the source in insn %d with",
3522 fprintf (gcse_file
, " reg %d %s insn %d\n",
3523 REGNO (to
), use_src
? "from" : "set in",
3524 INSN_UID (insn_computes_expr
));
3529 /* The register that the expr is computed into is set more than once. */
3530 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3532 /* Insert an insn after insnx that copies the reg set in insnx
3533 into a new pseudo register call this new register REGN.
3534 From insnb until end of basic block or until REGB is set
3535 replace all uses of REGB with REGN. */
3538 to
= gen_reg_rtx (GET_MODE (SET_DEST (expr_set
)));
3540 /* Generate the new insn. */
3541 /* ??? If the change fails, we return 0, even though we created
3542 an insn. I think this is ok. */
3544 = emit_insn_after (gen_rtx_SET (VOIDmode
, to
,
3545 SET_DEST (expr_set
)),
3546 insn_computes_expr
);
3548 /* Keep register set table up to date. */
3549 record_one_set (REGNO (to
), new_insn
);
3551 gcse_create_count
++;
3552 if (gcse_file
!= NULL
)
3554 fprintf (gcse_file
, "GCSE: Creating insn %d to copy value of reg %d",
3555 INSN_UID (NEXT_INSN (insn_computes_expr
)),
3556 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr
)))));
3557 fprintf (gcse_file
, ", computed in insn %d,\n",
3558 INSN_UID (insn_computes_expr
));
3559 fprintf (gcse_file
, " into newly allocated reg %d\n",
3563 pat
= PATTERN (insn
);
3565 /* Do register replacement for INSN. */
3566 changed
= validate_change (insn
, &SET_SRC (pat
),
3568 (NEXT_INSN (insn_computes_expr
))),
3571 /* We should be able to ignore the return code from validate_change but
3572 to play it safe we check. */
3576 if (gcse_file
!= NULL
)
3579 "GCSE: Replacing the source in insn %d with reg %d ",
3581 REGNO (SET_DEST (PATTERN (NEXT_INSN
3582 (insn_computes_expr
)))));
3583 fprintf (gcse_file
, "set in insn %d\n",
3584 INSN_UID (insn_computes_expr
));
3592 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3593 the dataflow analysis has been done.
3595 The result is nonzero if a change was made. */
3604 /* Note we start at block 1. */
3606 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
3610 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
3612 /* Reset tables used to keep track of what's still valid [since the
3613 start of the block]. */
3614 reset_opr_set_tables ();
3616 for (insn
= bb
->head
;
3617 insn
!= NULL
&& insn
!= NEXT_INSN (bb
->end
);
3618 insn
= NEXT_INSN (insn
))
3620 /* Is insn of form (set (pseudo-reg) ...)? */
3621 if (GET_CODE (insn
) == INSN
3622 && GET_CODE (PATTERN (insn
)) == SET
3623 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
3624 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_PSEUDO_REGISTER
)
3626 rtx pat
= PATTERN (insn
);
3627 rtx src
= SET_SRC (pat
);
3630 if (want_to_gcse_p (src
)
3631 /* Is the expression recorded? */
3632 && ((expr
= lookup_expr (src
, &expr_hash_table
)) != NULL
)
3633 /* Is the expression available [at the start of the
3635 && TEST_BIT (ae_in
[bb
->index
], expr
->bitmap_index
)
3636 /* Are the operands unchanged since the start of the
3638 && oprs_not_set_p (src
, insn
))
3639 changed
|= handle_avail_expr (insn
, expr
);
3642 /* Keep track of everything modified by this insn. */
3643 /* ??? Need to be careful w.r.t. mods done to INSN. */
3645 mark_oprs_set (insn
);
3652 /* Top level routine to perform one classic GCSE pass.
3654 Return nonzero if a change was made. */
3657 one_classic_gcse_pass (pass
)
3662 gcse_subst_count
= 0;
3663 gcse_create_count
= 0;
3665 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
3666 alloc_rd_mem (last_basic_block
, max_cuid
);
3667 compute_hash_table (&expr_hash_table
);
3669 dump_hash_table (gcse_file
, "Expression", &expr_hash_table
);
3671 if (expr_hash_table
.n_elems
> 0)
3675 alloc_avail_expr_mem (last_basic_block
, expr_hash_table
.n_elems
);
3676 compute_ae_gen (&expr_hash_table
);
3677 compute_ae_kill (ae_gen
, ae_kill
, &expr_hash_table
);
3678 compute_available (ae_gen
, ae_kill
, ae_out
, ae_in
);
3679 changed
= classic_gcse ();
3680 free_avail_expr_mem ();
3684 free_hash_table (&expr_hash_table
);
3688 fprintf (gcse_file
, "\n");
3689 fprintf (gcse_file
, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3690 current_function_name
, pass
, bytes_used
, gcse_subst_count
);
3691 fprintf (gcse_file
, "%d insns created\n", gcse_create_count
);
3697 /* Compute copy/constant propagation working variables. */
3699 /* Local properties of assignments. */
3700 static sbitmap
*cprop_pavloc
;
3701 static sbitmap
*cprop_absaltered
;
3703 /* Global properties of assignments (computed from the local properties). */
3704 static sbitmap
*cprop_avin
;
3705 static sbitmap
*cprop_avout
;
3707 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3708 basic blocks. N_SETS is the number of sets. */
3711 alloc_cprop_mem (n_blocks
, n_sets
)
3712 int n_blocks
, n_sets
;
3714 cprop_pavloc
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3715 cprop_absaltered
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3717 cprop_avin
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3718 cprop_avout
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3721 /* Free vars used by copy/const propagation. */
3726 sbitmap_vector_free (cprop_pavloc
);
3727 sbitmap_vector_free (cprop_absaltered
);
3728 sbitmap_vector_free (cprop_avin
);
3729 sbitmap_vector_free (cprop_avout
);
3732 /* For each block, compute whether X is transparent. X is either an
3733 expression or an assignment [though we don't care which, for this context
3734 an assignment is treated as an expression]. For each block where an
3735 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3739 compute_transp (x
, indx
, bmap
, set_p
)
3751 /* repeat is used to turn tail-recursion into iteration since GCC
3752 can't do it when there's no return value. */
3758 code
= GET_CODE (x
);
3764 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3767 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
3768 SET_BIT (bmap
[bb
->index
], indx
);
3772 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3773 SET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3778 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3781 if (TEST_BIT (reg_set_in_block
[bb
->index
], REGNO (x
)))
3782 RESET_BIT (bmap
[bb
->index
], indx
);
3786 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3787 RESET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3796 rtx list_entry
= canon_modify_mem_list
[bb
->index
];
3800 rtx dest
, dest_addr
;
3802 if (GET_CODE (XEXP (list_entry
, 0)) == CALL_INSN
)
3805 SET_BIT (bmap
[bb
->index
], indx
);
3807 RESET_BIT (bmap
[bb
->index
], indx
);
3810 /* LIST_ENTRY must be an INSN of some kind that sets memory.
3811 Examine each hunk of memory that is modified. */
3813 dest
= XEXP (list_entry
, 0);
3814 list_entry
= XEXP (list_entry
, 1);
3815 dest_addr
= XEXP (list_entry
, 0);
3817 if (canon_true_dependence (dest
, GET_MODE (dest
), dest_addr
,
3818 x
, rtx_addr_varies_p
))
3821 SET_BIT (bmap
[bb
->index
], indx
);
3823 RESET_BIT (bmap
[bb
->index
], indx
);
3826 list_entry
= XEXP (list_entry
, 1);
3849 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3853 /* If we are about to do the last recursive call
3854 needed at this level, change it into iteration.
3855 This function is called enough to be worth it. */
3862 compute_transp (XEXP (x
, i
), indx
, bmap
, set_p
);
3864 else if (fmt
[i
] == 'E')
3865 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3866 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, set_p
);
3870 /* Top level routine to do the dataflow analysis needed by copy/const
3874 compute_cprop_data ()
3876 compute_local_properties (cprop_absaltered
, cprop_pavloc
, NULL
, &set_hash_table
);
3877 compute_available (cprop_pavloc
, cprop_absaltered
,
3878 cprop_avout
, cprop_avin
);
3881 /* Copy/constant propagation. */
3883 /* Maximum number of register uses in an insn that we handle. */
3886 /* Table of uses found in an insn.
3887 Allocated statically to avoid alloc/free complexity and overhead. */
3888 static struct reg_use reg_use_table
[MAX_USES
];
3890 /* Index into `reg_use_table' while building it. */
3891 static int reg_use_count
;
3893 /* Set up a list of register numbers used in INSN. The found uses are stored
3894 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3895 and contains the number of uses in the table upon exit.
3897 ??? If a register appears multiple times we will record it multiple times.
3898 This doesn't hurt anything but it will slow things down. */
3901 find_used_regs (xptr
, data
)
3903 void *data ATTRIBUTE_UNUSED
;
3910 /* repeat is used to turn tail-recursion into iteration since GCC
3911 can't do it when there's no return value. */
3916 code
= GET_CODE (x
);
3919 if (reg_use_count
== MAX_USES
)
3922 reg_use_table
[reg_use_count
].reg_rtx
= x
;
3926 /* Recursively scan the operands of this expression. */
3928 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3932 /* If we are about to do the last recursive call
3933 needed at this level, change it into iteration.
3934 This function is called enough to be worth it. */
3941 find_used_regs (&XEXP (x
, i
), data
);
3943 else if (fmt
[i
] == 'E')
3944 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3945 find_used_regs (&XVECEXP (x
, i
, j
), data
);
3949 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3950 Returns nonzero is successful. */
3953 try_replace_reg (from
, to
, insn
)
3956 rtx note
= find_reg_equal_equiv_note (insn
);
3959 rtx set
= single_set (insn
);
3961 validate_replace_src_group (from
, to
, insn
);
3962 if (num_changes_pending () && apply_change_group ())
3965 /* Try to simplify SET_SRC if we have substituted a constant. */
3966 if (success
&& set
&& CONSTANT_P (to
))
3968 src
= simplify_rtx (SET_SRC (set
));
3971 validate_change (insn
, &SET_SRC (set
), src
, 0);
3974 if (!success
&& set
&& reg_mentioned_p (from
, SET_SRC (set
)))
3976 /* If above failed and this is a single set, try to simplify the source of
3977 the set given our substitution. We could perhaps try this for multiple
3978 SETs, but it probably won't buy us anything. */
3979 src
= simplify_replace_rtx (SET_SRC (set
), from
, to
);
3981 if (!rtx_equal_p (src
, SET_SRC (set
))
3982 && validate_change (insn
, &SET_SRC (set
), src
, 0))
3985 /* If we've failed to do replacement, have a single SET, and don't already
3986 have a note, add a REG_EQUAL note to not lose information. */
3987 if (!success
&& note
== 0 && set
!= 0)
3988 note
= set_unique_reg_note (insn
, REG_EQUAL
, copy_rtx (src
));
3991 /* If there is already a NOTE, update the expression in it with our
3994 XEXP (note
, 0) = simplify_replace_rtx (XEXP (note
, 0), from
, to
);
3996 /* REG_EQUAL may get simplified into register.
3997 We don't allow that. Remove that note. This code ought
3998 not to happen, because previous code ought to synthesize
3999 reg-reg move, but be on the safe side. */
4000 if (note
&& REG_P (XEXP (note
, 0)))
4001 remove_note (insn
, note
);
4006 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
4007 NULL no such set is found. */
4009 static struct expr
*
4010 find_avail_set (regno
, insn
)
4014 /* SET1 contains the last set found that can be returned to the caller for
4015 use in a substitution. */
4016 struct expr
*set1
= 0;
4018 /* Loops are not possible here. To get a loop we would need two sets
4019 available at the start of the block containing INSN. ie we would
4020 need two sets like this available at the start of the block:
4022 (set (reg X) (reg Y))
4023 (set (reg Y) (reg X))
4025 This can not happen since the set of (reg Y) would have killed the
4026 set of (reg X) making it unavailable at the start of this block. */
4030 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
4032 /* Find a set that is available at the start of the block
4033 which contains INSN. */
4036 if (TEST_BIT (cprop_avin
[BLOCK_NUM (insn
)], set
->bitmap_index
))
4038 set
= next_set (regno
, set
);
4041 /* If no available set was found we've reached the end of the
4042 (possibly empty) copy chain. */
4046 if (GET_CODE (set
->expr
) != SET
)
4049 src
= SET_SRC (set
->expr
);
4051 /* We know the set is available.
4052 Now check that SRC is ANTLOC (i.e. none of the source operands
4053 have changed since the start of the block).
4055 If the source operand changed, we may still use it for the next
4056 iteration of this loop, but we may not use it for substitutions. */
4058 if (gcse_constant_p (src
) || oprs_not_set_p (src
, insn
))
4061 /* If the source of the set is anything except a register, then
4062 we have reached the end of the copy chain. */
4063 if (GET_CODE (src
) != REG
)
4066 /* Follow the copy chain, ie start another iteration of the loop
4067 and see if we have an available copy into SRC. */
4068 regno
= REGNO (src
);
4071 /* SET1 holds the last set that was available and anticipatable at
4076 /* Subroutine of cprop_insn that tries to propagate constants into
4077 JUMP_INSNS. JUMP must be a conditional jump. If SETCC is non-NULL
4078 it is the instruction that immediately precedes JUMP, and must be a
4079 single SET of a register. FROM is what we will try to replace,
4080 SRC is the constant we will try to substitute for it. Returns nonzero
4081 if a change was made. */
4084 cprop_jump (bb
, setcc
, jump
, from
, src
)
4091 rtx
new, set_src
, note_src
;
4092 rtx set
= pc_set (jump
);
4093 rtx note
= find_reg_equal_equiv_note (jump
);
4097 note_src
= XEXP (note
, 0);
4098 if (GET_CODE (note_src
) == EXPR_LIST
)
4099 note_src
= NULL_RTX
;
4101 else note_src
= NULL_RTX
;
4103 /* Prefer REG_EQUAL notes except those containing EXPR_LISTs. */
4104 set_src
= note_src
? note_src
: SET_SRC (set
);
4106 /* First substitute the SETCC condition into the JUMP instruction,
4107 then substitute that given values into this expanded JUMP. */
4108 if (setcc
!= NULL_RTX
4109 && !modified_between_p (from
, setcc
, jump
)
4110 && !modified_between_p (src
, setcc
, jump
))
4113 rtx setcc_set
= single_set (setcc
);
4114 rtx setcc_note
= find_reg_equal_equiv_note (setcc
);
4115 setcc_src
= (setcc_note
&& GET_CODE (XEXP (setcc_note
, 0)) != EXPR_LIST
)
4116 ? XEXP (setcc_note
, 0) : SET_SRC (setcc_set
);
4117 set_src
= simplify_replace_rtx (set_src
, SET_DEST (setcc_set
),
4123 new = simplify_replace_rtx (set_src
, from
, src
);
4125 /* If no simplification can be made, then try the next register. */
4126 if (rtx_equal_p (new, SET_SRC (set
)))
4129 /* If this is now a no-op delete it, otherwise this must be a valid insn. */
4134 /* Ensure the value computed inside the jump insn to be equivalent
4135 to one computed by setcc. */
4136 if (setcc
&& modified_in_p (new, setcc
))
4138 if (! validate_change (jump
, &SET_SRC (set
), new, 0))
4140 /* When (some) constants are not valid in a comparison, and there
4141 are two registers to be replaced by constants before the entire
4142 comparison can be folded into a constant, we need to keep
4143 intermediate information in REG_EQUAL notes. For targets with
4144 separate compare insns, such notes are added by try_replace_reg.
4145 When we have a combined compare-and-branch instruction, however,
4146 we need to attach a note to the branch itself to make this
4147 optimization work. */
4149 if (!rtx_equal_p (new, note_src
))
4150 set_unique_reg_note (jump
, REG_EQUAL
, copy_rtx (new));
4154 /* Remove REG_EQUAL note after simplification. */
4156 remove_note (jump
, note
);
4158 /* If this has turned into an unconditional jump,
4159 then put a barrier after it so that the unreachable
4160 code will be deleted. */
4161 if (GET_CODE (SET_SRC (set
)) == LABEL_REF
)
4162 emit_barrier_after (jump
);
4166 /* Delete the cc0 setter. */
4167 if (setcc
!= NULL
&& CC0_P (SET_DEST (single_set (setcc
))))
4168 delete_insn (setcc
);
4171 run_jump_opt_after_gcse
= 1;
4174 if (gcse_file
!= NULL
)
4177 "CONST-PROP: Replacing reg %d in jump_insn %d with constant ",
4178 REGNO (from
), INSN_UID (jump
));
4179 print_rtl (gcse_file
, src
);
4180 fprintf (gcse_file
, "\n");
4182 purge_dead_edges (bb
);
4188 constprop_register (insn
, from
, to
, alter_jumps
)
4196 /* Check for reg or cc0 setting instructions followed by
4197 conditional branch instructions first. */
4199 && (sset
= single_set (insn
)) != NULL
4201 && any_condjump_p (NEXT_INSN (insn
)) && onlyjump_p (NEXT_INSN (insn
)))
4203 rtx dest
= SET_DEST (sset
);
4204 if ((REG_P (dest
) || CC0_P (dest
))
4205 && cprop_jump (BLOCK_FOR_INSN (insn
), insn
, NEXT_INSN (insn
), from
, to
))
4209 /* Handle normal insns next. */
4210 if (GET_CODE (insn
) == INSN
4211 && try_replace_reg (from
, to
, insn
))
4214 /* Try to propagate a CONST_INT into a conditional jump.
4215 We're pretty specific about what we will handle in this
4216 code, we can extend this as necessary over time.
4218 Right now the insn in question must look like
4219 (set (pc) (if_then_else ...)) */
4220 else if (alter_jumps
&& any_condjump_p (insn
) && onlyjump_p (insn
))
4221 return cprop_jump (BLOCK_FOR_INSN (insn
), NULL
, insn
, from
, to
);
4225 /* Perform constant and copy propagation on INSN.
4226 The result is nonzero if a change was made. */
4229 cprop_insn (insn
, alter_jumps
)
4233 struct reg_use
*reg_used
;
4241 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
4243 note
= find_reg_equal_equiv_note (insn
);
4245 /* We may win even when propagating constants into notes. */
4247 find_used_regs (&XEXP (note
, 0), NULL
);
4249 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
4250 reg_used
++, reg_use_count
--)
4252 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
4256 /* Ignore registers created by GCSE.
4257 We do this because ... */
4258 if (regno
>= max_gcse_regno
)
4261 /* If the register has already been set in this block, there's
4262 nothing we can do. */
4263 if (! oprs_not_set_p (reg_used
->reg_rtx
, insn
))
4266 /* Find an assignment that sets reg_used and is available
4267 at the start of the block. */
4268 set
= find_avail_set (regno
, insn
);
4273 /* ??? We might be able to handle PARALLELs. Later. */
4274 if (GET_CODE (pat
) != SET
)
4277 src
= SET_SRC (pat
);
4279 /* Constant propagation. */
4280 if (gcse_constant_p (src
))
4282 if (constprop_register (insn
, reg_used
->reg_rtx
, src
, alter_jumps
))
4286 if (gcse_file
!= NULL
)
4288 fprintf (gcse_file
, "GLOBAL CONST-PROP: Replacing reg %d in ", regno
);
4289 fprintf (gcse_file
, "insn %d with constant ", INSN_UID (insn
));
4290 print_rtl (gcse_file
, src
);
4291 fprintf (gcse_file
, "\n");
4293 if (INSN_DELETED_P (insn
))
4297 else if (GET_CODE (src
) == REG
4298 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
4299 && REGNO (src
) != regno
)
4301 if (try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
4305 if (gcse_file
!= NULL
)
4307 fprintf (gcse_file
, "GLOBAL COPY-PROP: Replacing reg %d in insn %d",
4308 regno
, INSN_UID (insn
));
4309 fprintf (gcse_file
, " with reg %d\n", REGNO (src
));
4312 /* The original insn setting reg_used may or may not now be
4313 deletable. We leave the deletion to flow. */
4314 /* FIXME: If it turns out that the insn isn't deletable,
4315 then we may have unnecessarily extended register lifetimes
4316 and made things worse. */
4324 /* Like find_used_regs, but avoid recording uses that appear in
4325 input-output contexts such as zero_extract or pre_dec. This
4326 restricts the cases we consider to those for which local cprop
4327 can legitimately make replacements. */
4330 local_cprop_find_used_regs (xptr
, data
)
4339 switch (GET_CODE (x
))
4343 case STRICT_LOW_PART
:
4352 /* Can only legitimately appear this early in the context of
4353 stack pushes for function arguments, but handle all of the
4354 codes nonetheless. */
4358 /* Setting a subreg of a register larger than word_mode leaves
4359 the non-written words unchanged. */
4360 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) > BITS_PER_WORD
)
4368 find_used_regs (xptr
, data
);
4371 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4372 their REG_EQUAL notes need updating. */
4375 do_local_cprop (x
, insn
, alter_jumps
, libcall_sp
)
4381 rtx newreg
= NULL
, newcnst
= NULL
;
4383 /* Rule out USE instructions and ASM statements as we don't want to
4384 change the hard registers mentioned. */
4385 if (GET_CODE (x
) == REG
4386 && (REGNO (x
) >= FIRST_PSEUDO_REGISTER
4387 || (GET_CODE (PATTERN (insn
)) != USE
4388 && asm_noperands (PATTERN (insn
)) < 0)))
4390 cselib_val
*val
= cselib_lookup (x
, GET_MODE (x
), 0);
4391 struct elt_loc_list
*l
;
4395 for (l
= val
->locs
; l
; l
= l
->next
)
4397 rtx this_rtx
= l
->loc
;
4403 if (gcse_constant_p (this_rtx
))
4405 if (REG_P (this_rtx
) && REGNO (this_rtx
) >= FIRST_PSEUDO_REGISTER
4406 /* Don't copy propagate if it has attached REG_EQUIV note.
4407 At this point this only function parameters should have
4408 REG_EQUIV notes and if the argument slot is used somewhere
4409 explicitly, it means address of parameter has been taken,
4410 so we should not extend the lifetime of the pseudo. */
4411 && (!(note
= find_reg_note (l
->setting_insn
, REG_EQUIV
, NULL_RTX
))
4412 || GET_CODE (XEXP (note
, 0)) != MEM
))
4415 if (newcnst
&& constprop_register (insn
, x
, newcnst
, alter_jumps
))
4417 /* If we find a case where we can't fix the retval REG_EQUAL notes
4418 match the new register, we either have to abandon this replacement
4419 or fix delete_trivially_dead_insns to preserve the setting insn,
4420 or make it delete the REG_EUAQL note, and fix up all passes that
4421 require the REG_EQUAL note there. */
4422 if (!adjust_libcall_notes (x
, newcnst
, insn
, libcall_sp
))
4424 if (gcse_file
!= NULL
)
4426 fprintf (gcse_file
, "LOCAL CONST-PROP: Replacing reg %d in ",
4428 fprintf (gcse_file
, "insn %d with constant ",
4430 print_rtl (gcse_file
, newcnst
);
4431 fprintf (gcse_file
, "\n");
4436 else if (newreg
&& newreg
!= x
&& try_replace_reg (x
, newreg
, insn
))
4438 adjust_libcall_notes (x
, newreg
, insn
, libcall_sp
);
4439 if (gcse_file
!= NULL
)
4442 "LOCAL COPY-PROP: Replacing reg %d in insn %d",
4443 REGNO (x
), INSN_UID (insn
));
4444 fprintf (gcse_file
, " with reg %d\n", REGNO (newreg
));
4453 /* LIBCALL_SP is a zero-terminated array of insns at the end of a libcall;
4454 their REG_EQUAL notes need updating to reflect that OLDREG has been
4455 replaced with NEWVAL in INSN. Return true if all substitutions could
4458 adjust_libcall_notes (oldreg
, newval
, insn
, libcall_sp
)
4459 rtx oldreg
, newval
, insn
, *libcall_sp
;
4463 while ((end
= *libcall_sp
++))
4465 rtx note
= find_reg_equal_equiv_note (end
);
4472 if (reg_set_between_p (newval
, PREV_INSN (insn
), end
))
4476 note
= find_reg_equal_equiv_note (end
);
4479 if (reg_mentioned_p (newval
, XEXP (note
, 0)))
4482 while ((end
= *libcall_sp
++));
4486 XEXP (note
, 0) = replace_rtx (XEXP (note
, 0), oldreg
, newval
);
4492 #define MAX_NESTED_LIBCALLS 9
4495 local_cprop_pass (alter_jumps
)
4499 struct reg_use
*reg_used
;
4500 rtx libcall_stack
[MAX_NESTED_LIBCALLS
+ 1], *libcall_sp
;
4501 bool changed
= false;
4504 libcall_sp
= &libcall_stack
[MAX_NESTED_LIBCALLS
];
4506 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
4510 rtx note
= find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
);
4514 if (libcall_sp
== libcall_stack
)
4516 *--libcall_sp
= XEXP (note
, 0);
4518 note
= find_reg_note (insn
, REG_RETVAL
, NULL_RTX
);
4521 note
= find_reg_equal_equiv_note (insn
);
4525 note_uses (&PATTERN (insn
), local_cprop_find_used_regs
, NULL
);
4527 local_cprop_find_used_regs (&XEXP (note
, 0), NULL
);
4529 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
4530 reg_used
++, reg_use_count
--)
4531 if (do_local_cprop (reg_used
->reg_rtx
, insn
, alter_jumps
,
4537 if (INSN_DELETED_P (insn
))
4540 while (reg_use_count
);
4542 cselib_process_insn (insn
);
4545 /* Global analysis may get into infinite loops for unreachable blocks. */
4546 if (changed
&& alter_jumps
)
4548 delete_unreachable_blocks ();
4549 free_reg_set_mem ();
4550 alloc_reg_set_mem (max_reg_num ());
4551 compute_sets (get_insns ());
4555 /* Forward propagate copies. This includes copies and constants. Return
4556 nonzero if a change was made. */
4566 /* Note we start at block 1. */
4567 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
4569 if (gcse_file
!= NULL
)
4570 fprintf (gcse_file
, "\n");
4575 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4577 /* Reset tables used to keep track of what's still valid [since the
4578 start of the block]. */
4579 reset_opr_set_tables ();
4581 for (insn
= bb
->head
;
4582 insn
!= NULL
&& insn
!= NEXT_INSN (bb
->end
);
4583 insn
= NEXT_INSN (insn
))
4586 changed
|= cprop_insn (insn
, alter_jumps
);
4588 /* Keep track of everything modified by this insn. */
4589 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4590 call mark_oprs_set if we turned the insn into a NOTE. */
4591 if (GET_CODE (insn
) != NOTE
)
4592 mark_oprs_set (insn
);
4596 if (gcse_file
!= NULL
)
4597 fprintf (gcse_file
, "\n");
4602 /* Similar to get_condition, only the resulting condition must be
4603 valid at JUMP, instead of at EARLIEST.
4605 This differs from noce_get_condition in ifcvt.c in that we prefer not to
4606 settle for the condition variable in the jump instruction being integral.
4607 We prefer to be able to record the value of a user variable, rather than
4608 the value of a temporary used in a condition. This could be solved by
4609 recording the value of *every* register scaned by canonicalize_condition,
4610 but this would require some code reorganization. */
4613 fis_get_condition (jump
)
4616 rtx cond
, set
, tmp
, insn
, earliest
;
4619 if (! any_condjump_p (jump
))
4622 set
= pc_set (jump
);
4623 cond
= XEXP (SET_SRC (set
), 0);
4625 /* If this branches to JUMP_LABEL when the condition is false,
4626 reverse the condition. */
4627 reverse
= (GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4628 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
));
4630 /* Use canonicalize_condition to do the dirty work of manipulating
4631 MODE_CC values and COMPARE rtx codes. */
4632 tmp
= canonicalize_condition (jump
, cond
, reverse
, &earliest
, NULL_RTX
);
4636 /* Verify that the given condition is valid at JUMP by virtue of not
4637 having been modified since EARLIEST. */
4638 for (insn
= earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
4639 if (INSN_P (insn
) && modified_in_p (tmp
, insn
))
4644 /* The condition was modified. See if we can get a partial result
4645 that doesn't follow all the reversals. Perhaps combine can fold
4646 them together later. */
4647 tmp
= XEXP (tmp
, 0);
4648 if (!REG_P (tmp
) || GET_MODE_CLASS (GET_MODE (tmp
)) != MODE_INT
)
4650 tmp
= canonicalize_condition (jump
, cond
, reverse
, &earliest
, tmp
);
4654 /* For sanity's sake, re-validate the new result. */
4655 for (insn
= earliest
; insn
!= jump
; insn
= NEXT_INSN (insn
))
4656 if (INSN_P (insn
) && modified_in_p (tmp
, insn
))
4662 /* Find the implicit sets of a function. An "implicit set" is a constraint
4663 on the value of a variable, implied by a conditional jump. For example,
4664 following "if (x == 2)", the then branch may be optimized as though the
4665 conditional performed an "explicit set", in this example, "x = 2". This
4666 function records the set patterns that are implicit at the start of each
4670 find_implicit_sets ()
4672 basic_block bb
, dest
;
4678 /* Check for more than one sucessor. */
4679 if (bb
->succ
&& bb
->succ
->succ_next
)
4681 cond
= fis_get_condition (bb
->end
);
4684 && (GET_CODE (cond
) == EQ
|| GET_CODE (cond
) == NE
)
4685 && GET_CODE (XEXP (cond
, 0)) == REG
4686 && REGNO (XEXP (cond
, 0)) >= FIRST_PSEUDO_REGISTER
4687 && gcse_constant_p (XEXP (cond
, 1)))
4689 dest
= GET_CODE (cond
) == EQ
? BRANCH_EDGE (bb
)->dest
4690 : FALLTHRU_EDGE (bb
)->dest
;
4692 if (dest
&& ! dest
->pred
->pred_next
4693 && dest
!= EXIT_BLOCK_PTR
)
4695 new = gen_rtx_SET (VOIDmode
, XEXP (cond
, 0),
4697 implicit_sets
[dest
->index
] = new;
4700 fprintf(gcse_file
, "Implicit set of reg %d in ",
4701 REGNO (XEXP (cond
, 0)));
4702 fprintf(gcse_file
, "basic block %d\n", dest
->index
);
4710 fprintf (gcse_file
, "Found %d implicit sets\n", count
);
4713 /* Perform one copy/constant propagation pass.
4714 PASS is the pass count. If CPROP_JUMPS is true, perform constant
4715 propagation into conditional jumps. If BYPASS_JUMPS is true,
4716 perform conditional jump bypassing optimizations. */
4719 one_cprop_pass (pass
, cprop_jumps
, bypass_jumps
)
4726 const_prop_count
= 0;
4727 copy_prop_count
= 0;
4729 local_cprop_pass (cprop_jumps
);
4731 /* Determine implicit sets. */
4732 implicit_sets
= (rtx
*) xcalloc (last_basic_block
, sizeof (rtx
));
4733 find_implicit_sets ();
4735 alloc_hash_table (max_cuid
, &set_hash_table
, 1);
4736 compute_hash_table (&set_hash_table
);
4738 /* Free implicit_sets before peak usage. */
4739 free (implicit_sets
);
4740 implicit_sets
= NULL
;
4743 dump_hash_table (gcse_file
, "SET", &set_hash_table
);
4744 if (set_hash_table
.n_elems
> 0)
4746 alloc_cprop_mem (last_basic_block
, set_hash_table
.n_elems
);
4747 compute_cprop_data ();
4748 changed
= cprop (cprop_jumps
);
4750 changed
|= bypass_conditional_jumps ();
4754 free_hash_table (&set_hash_table
);
4758 fprintf (gcse_file
, "CPROP of %s, pass %d: %d bytes needed, ",
4759 current_function_name
, pass
, bytes_used
);
4760 fprintf (gcse_file
, "%d const props, %d copy props\n\n",
4761 const_prop_count
, copy_prop_count
);
4763 /* Global analysis may get into infinite loops for unreachable blocks. */
4764 if (changed
&& cprop_jumps
)
4765 delete_unreachable_blocks ();
4770 /* Bypass conditional jumps. */
4772 /* The value of last_basic_block at the beginning of the jump_bypass
4773 pass. The use of redirect_edge_and_branch_force may introduce new
4774 basic blocks, but the data flow analysis is only valid for basic
4775 block indices less than bypass_last_basic_block. */
4777 static int bypass_last_basic_block
;
4779 /* Find a set of REGNO to a constant that is available at the end of basic
4780 block BB. Returns NULL if no such set is found. Based heavily upon
4783 static struct expr
*
4784 find_bypass_set (regno
, bb
)
4788 struct expr
*result
= 0;
4793 struct expr
*set
= lookup_set (regno
, &set_hash_table
);
4797 if (TEST_BIT (cprop_avout
[bb
], set
->bitmap_index
))
4799 set
= next_set (regno
, set
);
4805 if (GET_CODE (set
->expr
) != SET
)
4808 src
= SET_SRC (set
->expr
);
4809 if (gcse_constant_p (src
))
4812 if (GET_CODE (src
) != REG
)
4815 regno
= REGNO (src
);
4821 /* Subroutine of bypass_block that checks whether a pseudo is killed by
4822 any of the instructions inserted on an edge. Jump bypassing places
4823 condition code setters on CFG edges using insert_insn_on_edge. This
4824 function is required to check that our data flow analysis is still
4825 valid prior to commit_edge_insertions. */
4828 reg_killed_on_edge (reg
, e
)
4834 for (insn
= e
->insns
; insn
; insn
= NEXT_INSN (insn
))
4835 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
4841 /* Subroutine of bypass_conditional_jumps that attempts to bypass the given
4842 basic block BB which has more than one predecessor. If not NULL, SETCC
4843 is the first instruction of BB, which is immediately followed by JUMP_INSN
4844 JUMP. Otherwise, SETCC is NULL, and JUMP is the first insn of BB.
4845 Returns nonzero if a change was made.
4847 During the jump bypassing pass, we may place copies of SETCC instuctions
4848 on CFG edges. The following routine must be careful to pay attention to
4849 these inserted insns when performing its transformations. */
4852 bypass_block (bb
, setcc
, jump
)
4857 edge e
, enext
, edest
;
4859 int may_be_loop_header
;
4861 insn
= (setcc
!= NULL
) ? setcc
: jump
;
4863 /* Determine set of register uses in INSN. */
4865 note_uses (&PATTERN (insn
), find_used_regs
, NULL
);
4866 note
= find_reg_equal_equiv_note (insn
);
4868 find_used_regs (&XEXP (note
, 0), NULL
);
4870 may_be_loop_header
= false;
4871 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
4872 if (e
->flags
& EDGE_DFS_BACK
)
4874 may_be_loop_header
= true;
4879 for (e
= bb
->pred
; e
; e
= enext
)
4881 enext
= e
->pred_next
;
4882 if (e
->flags
& EDGE_COMPLEX
)
4885 /* We can't redirect edges from new basic blocks. */
4886 if (e
->src
->index
>= bypass_last_basic_block
)
4889 /* The irreducible loops created by redirecting of edges entering the
4890 loop from outside would decrease effectivity of some of the following
4891 optimalizations, so prevent this. */
4892 if (may_be_loop_header
4893 && !(e
->flags
& EDGE_DFS_BACK
))
4896 for (i
= 0; i
< reg_use_count
; i
++)
4898 struct reg_use
*reg_used
= ®_use_table
[i
];
4899 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
4900 basic_block dest
, old_dest
;
4904 if (regno
>= max_gcse_regno
)
4907 set
= find_bypass_set (regno
, e
->src
->index
);
4912 /* Check the data flow is valid after edge insertions. */
4913 if (e
->insns
&& reg_killed_on_edge (reg_used
->reg_rtx
, e
))
4916 src
= SET_SRC (pc_set (jump
));
4919 src
= simplify_replace_rtx (src
,
4920 SET_DEST (PATTERN (setcc
)),
4921 SET_SRC (PATTERN (setcc
)));
4923 new = simplify_replace_rtx (src
, reg_used
->reg_rtx
,
4924 SET_SRC (set
->expr
));
4926 /* Jump bypassing may have already placed instructions on
4927 edges of the CFG. We can't bypass an outgoing edge that
4928 has instructions associated with it, as these insns won't
4929 get executed if the incoming edge is redirected. */
4933 edest
= FALLTHRU_EDGE (bb
);
4934 dest
= edest
->insns
? NULL
: edest
->dest
;
4936 else if (GET_CODE (new) == LABEL_REF
)
4938 dest
= BLOCK_FOR_INSN (XEXP (new, 0));
4939 /* Don't bypass edges containing instructions. */
4940 for (edest
= bb
->succ
; edest
; edest
= edest
->succ_next
)
4941 if (edest
->dest
== dest
&& edest
->insns
)
4953 && dest
!= EXIT_BLOCK_PTR
)
4955 redirect_edge_and_branch_force (e
, dest
);
4957 /* Copy the register setter to the redirected edge.
4958 Don't copy CC0 setters, as CC0 is dead after jump. */
4961 rtx pat
= PATTERN (setcc
);
4962 if (!CC0_P (SET_DEST (pat
)))
4963 insert_insn_on_edge (copy_insn (pat
), e
);
4966 if (gcse_file
!= NULL
)
4968 fprintf (gcse_file
, "JUMP-BYPASS: Proved reg %d in jump_insn %d equals constant ",
4969 regno
, INSN_UID (jump
));
4970 print_rtl (gcse_file
, SET_SRC (set
->expr
));
4971 fprintf (gcse_file
, "\nBypass edge from %d->%d to %d\n",
4972 e
->src
->index
, old_dest
->index
, dest
->index
);
4982 /* Find basic blocks with more than one predecessor that only contain a
4983 single conditional jump. If the result of the comparison is known at
4984 compile-time from any incoming edge, redirect that edge to the
4985 appropriate target. Returns nonzero if a change was made.
4987 This function is now mis-named, because we also handle indirect jumps. */
4990 bypass_conditional_jumps ()
4998 /* Note we start at block 1. */
4999 if (ENTRY_BLOCK_PTR
->next_bb
== EXIT_BLOCK_PTR
)
5002 bypass_last_basic_block
= last_basic_block
;
5003 mark_dfs_back_edges ();
5006 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
->next_bb
,
5007 EXIT_BLOCK_PTR
, next_bb
)
5009 /* Check for more than one predecessor. */
5010 if (bb
->pred
&& bb
->pred
->pred_next
)
5013 for (insn
= bb
->head
;
5014 insn
!= NULL
&& insn
!= NEXT_INSN (bb
->end
);
5015 insn
= NEXT_INSN (insn
))
5016 if (GET_CODE (insn
) == INSN
)
5020 if (GET_CODE (PATTERN (insn
)) != SET
)
5023 dest
= SET_DEST (PATTERN (insn
));
5024 if (REG_P (dest
) || CC0_P (dest
))
5029 else if (GET_CODE (insn
) == JUMP_INSN
)
5031 if ((any_condjump_p (insn
) || computed_jump_p (insn
))
5032 && onlyjump_p (insn
))
5033 changed
|= bypass_block (bb
, setcc
, insn
);
5036 else if (INSN_P (insn
))
5041 /* If we bypassed any register setting insns, we inserted a
5042 copy on the redirected edge. These need to be committed. */
5044 commit_edge_insertions();
5049 /* Compute PRE+LCM working variables. */
5051 /* Local properties of expressions. */
5052 /* Nonzero for expressions that are transparent in the block. */
5053 static sbitmap
*transp
;
5055 /* Nonzero for expressions that are transparent at the end of the block.
5056 This is only zero for expressions killed by abnormal critical edge
5057 created by a calls. */
5058 static sbitmap
*transpout
;
5060 /* Nonzero for expressions that are computed (available) in the block. */
5061 static sbitmap
*comp
;
5063 /* Nonzero for expressions that are locally anticipatable in the block. */
5064 static sbitmap
*antloc
;
5066 /* Nonzero for expressions where this block is an optimal computation
5068 static sbitmap
*pre_optimal
;
5070 /* Nonzero for expressions which are redundant in a particular block. */
5071 static sbitmap
*pre_redundant
;
5073 /* Nonzero for expressions which should be inserted on a specific edge. */
5074 static sbitmap
*pre_insert_map
;
5076 /* Nonzero for expressions which should be deleted in a specific block. */
5077 static sbitmap
*pre_delete_map
;
5079 /* Contains the edge_list returned by pre_edge_lcm. */
5080 static struct edge_list
*edge_list
;
5082 /* Redundant insns. */
5083 static sbitmap pre_redundant_insns
;
5085 /* Allocate vars used for PRE analysis. */
5088 alloc_pre_mem (n_blocks
, n_exprs
)
5089 int n_blocks
, n_exprs
;
5091 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5092 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5093 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5096 pre_redundant
= NULL
;
5097 pre_insert_map
= NULL
;
5098 pre_delete_map
= NULL
;
5101 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5103 /* pre_insert and pre_delete are allocated later. */
5106 /* Free vars used for PRE analysis. */
5111 sbitmap_vector_free (transp
);
5112 sbitmap_vector_free (comp
);
5114 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
5117 sbitmap_vector_free (pre_optimal
);
5119 sbitmap_vector_free (pre_redundant
);
5121 sbitmap_vector_free (pre_insert_map
);
5123 sbitmap_vector_free (pre_delete_map
);
5125 sbitmap_vector_free (ae_in
);
5127 sbitmap_vector_free (ae_out
);
5129 transp
= comp
= NULL
;
5130 pre_optimal
= pre_redundant
= pre_insert_map
= pre_delete_map
= NULL
;
5131 ae_in
= ae_out
= NULL
;
5134 /* Top level routine to do the dataflow analysis needed by PRE. */
5139 sbitmap trapping_expr
;
5143 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
5144 sbitmap_vector_zero (ae_kill
, last_basic_block
);
5146 /* Collect expressions which might trap. */
5147 trapping_expr
= sbitmap_alloc (expr_hash_table
.n_elems
);
5148 sbitmap_zero (trapping_expr
);
5149 for (ui
= 0; ui
< expr_hash_table
.size
; ui
++)
5152 for (e
= expr_hash_table
.table
[ui
]; e
!= NULL
; e
= e
->next_same_hash
)
5153 if (may_trap_p (e
->expr
))
5154 SET_BIT (trapping_expr
, e
->bitmap_index
);
5157 /* Compute ae_kill for each basic block using:
5161 This is significantly faster than compute_ae_kill. */
5167 /* If the current block is the destination of an abnormal edge, we
5168 kill all trapping expressions because we won't be able to properly
5169 place the instruction on the edge. So make them neither
5170 anticipatable nor transparent. This is fairly conservative. */
5171 for (e
= bb
->pred
; e
; e
= e
->pred_next
)
5172 if (e
->flags
& EDGE_ABNORMAL
)
5174 sbitmap_difference (antloc
[bb
->index
], antloc
[bb
->index
], trapping_expr
);
5175 sbitmap_difference (transp
[bb
->index
], transp
[bb
->index
], trapping_expr
);
5179 sbitmap_a_or_b (ae_kill
[bb
->index
], transp
[bb
->index
], comp
[bb
->index
]);
5180 sbitmap_not (ae_kill
[bb
->index
], ae_kill
[bb
->index
]);
5183 edge_list
= pre_edge_lcm (gcse_file
, expr_hash_table
.n_elems
, transp
, comp
, antloc
,
5184 ae_kill
, &pre_insert_map
, &pre_delete_map
);
5185 sbitmap_vector_free (antloc
);
5187 sbitmap_vector_free (ae_kill
);
5189 sbitmap_free (trapping_expr
);
5194 /* Return nonzero if an occurrence of expression EXPR in OCCR_BB would reach
5197 VISITED is a pointer to a working buffer for tracking which BB's have
5198 been visited. It is NULL for the top-level call.
5200 We treat reaching expressions that go through blocks containing the same
5201 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
5202 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
5203 2 as not reaching. The intent is to improve the probability of finding
5204 only one reaching expression and to reduce register lifetimes by picking
5205 the closest such expression. */
5208 pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, visited
)
5209 basic_block occr_bb
;
5216 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
5218 basic_block pred_bb
= pred
->src
;
5220 if (pred
->src
== ENTRY_BLOCK_PTR
5221 /* Has predecessor has already been visited? */
5222 || visited
[pred_bb
->index
])
5223 ;/* Nothing to do. */
5225 /* Does this predecessor generate this expression? */
5226 else if (TEST_BIT (comp
[pred_bb
->index
], expr
->bitmap_index
))
5228 /* Is this the occurrence we're looking for?
5229 Note that there's only one generating occurrence per block
5230 so we just need to check the block number. */
5231 if (occr_bb
== pred_bb
)
5234 visited
[pred_bb
->index
] = 1;
5236 /* Ignore this predecessor if it kills the expression. */
5237 else if (! TEST_BIT (transp
[pred_bb
->index
], expr
->bitmap_index
))
5238 visited
[pred_bb
->index
] = 1;
5240 /* Neither gen nor kill. */
5243 visited
[pred_bb
->index
] = 1;
5244 if (pre_expr_reaches_here_p_work (occr_bb
, expr
, pred_bb
, visited
))
5249 /* All paths have been checked. */
5253 /* The wrapper for pre_expr_reaches_here_work that ensures that any
5254 memory allocated for that function is returned. */
5257 pre_expr_reaches_here_p (occr_bb
, expr
, bb
)
5258 basic_block occr_bb
;
5263 char *visited
= (char *) xcalloc (last_basic_block
, 1);
5265 rval
= pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, visited
);
5272 /* Given an expr, generate RTL which we can insert at the end of a BB,
5273 or on an edge. Set the block number of any insns generated to
5277 process_insert_insn (expr
)
5280 rtx reg
= expr
->reaching_reg
;
5281 rtx exp
= copy_rtx (expr
->expr
);
5286 /* If the expression is something that's an operand, like a constant,
5287 just copy it to a register. */
5288 if (general_operand (exp
, GET_MODE (reg
)))
5289 emit_move_insn (reg
, exp
);
5291 /* Otherwise, make a new insn to compute this expression and make sure the
5292 insn will be recognized (this also adds any needed CLOBBERs). Copy the
5293 expression to make sure we don't have any sharing issues. */
5294 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode
, reg
, exp
))))
5303 /* Add EXPR to the end of basic block BB.
5305 This is used by both the PRE and code hoisting.
5307 For PRE, we want to verify that the expr is either transparent
5308 or locally anticipatable in the target block. This check makes
5309 no sense for code hoisting. */
5312 insert_insn_end_bb (expr
, bb
, pre
)
5319 rtx reg
= expr
->reaching_reg
;
5320 int regno
= REGNO (reg
);
5323 pat
= process_insert_insn (expr
);
5324 if (pat
== NULL_RTX
|| ! INSN_P (pat
))
5328 while (NEXT_INSN (pat_end
) != NULL_RTX
)
5329 pat_end
= NEXT_INSN (pat_end
);
5331 /* If the last insn is a jump, insert EXPR in front [taking care to
5332 handle cc0, etc. properly]. Similary we need to care trapping
5333 instructions in presence of non-call exceptions. */
5335 if (GET_CODE (insn
) == JUMP_INSN
5336 || (GET_CODE (insn
) == INSN
5337 && (bb
->succ
->succ_next
|| (bb
->succ
->flags
& EDGE_ABNORMAL
))))
5342 /* It should always be the case that we can put these instructions
5343 anywhere in the basic block with performing PRE optimizations.
5345 if (GET_CODE (insn
) == INSN
&& pre
5346 && !TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
5347 && !TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
))
5350 /* If this is a jump table, then we can't insert stuff here. Since
5351 we know the previous real insn must be the tablejump, we insert
5352 the new instruction just before the tablejump. */
5353 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
5354 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
5355 insn
= prev_real_insn (insn
);
5358 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
5359 if cc0 isn't set. */
5360 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
5362 insn
= XEXP (note
, 0);
5365 rtx maybe_cc0_setter
= prev_nonnote_insn (insn
);
5366 if (maybe_cc0_setter
5367 && INSN_P (maybe_cc0_setter
)
5368 && sets_cc0_p (PATTERN (maybe_cc0_setter
)))
5369 insn
= maybe_cc0_setter
;
5372 /* FIXME: What if something in cc0/jump uses value set in new insn? */
5373 new_insn
= emit_insn_before (pat
, insn
);
5376 /* Likewise if the last insn is a call, as will happen in the presence
5377 of exception handling. */
5378 else if (GET_CODE (insn
) == CALL_INSN
5379 && (bb
->succ
->succ_next
|| (bb
->succ
->flags
& EDGE_ABNORMAL
)))
5381 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
5382 we search backward and place the instructions before the first
5383 parameter is loaded. Do this for everyone for consistency and a
5384 presumption that we'll get better code elsewhere as well.
5386 It should always be the case that we can put these instructions
5387 anywhere in the basic block with performing PRE optimizations.
5391 && !TEST_BIT (antloc
[bb
->index
], expr
->bitmap_index
)
5392 && !TEST_BIT (transp
[bb
->index
], expr
->bitmap_index
))
5395 /* Since different machines initialize their parameter registers
5396 in different orders, assume nothing. Collect the set of all
5397 parameter registers. */
5398 insn
= find_first_parameter_load (insn
, bb
->head
);
5400 /* If we found all the parameter loads, then we want to insert
5401 before the first parameter load.
5403 If we did not find all the parameter loads, then we might have
5404 stopped on the head of the block, which could be a CODE_LABEL.
5405 If we inserted before the CODE_LABEL, then we would be putting
5406 the insn in the wrong basic block. In that case, put the insn
5407 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
5408 while (GET_CODE (insn
) == CODE_LABEL
5409 || NOTE_INSN_BASIC_BLOCK_P (insn
))
5410 insn
= NEXT_INSN (insn
);
5412 new_insn
= emit_insn_before (pat
, insn
);
5415 new_insn
= emit_insn_after (pat
, insn
);
5421 add_label_notes (PATTERN (pat
), new_insn
);
5422 note_stores (PATTERN (pat
), record_set_info
, pat
);
5426 pat
= NEXT_INSN (pat
);
5429 gcse_create_count
++;
5433 fprintf (gcse_file
, "PRE/HOIST: end of bb %d, insn %d, ",
5434 bb
->index
, INSN_UID (new_insn
));
5435 fprintf (gcse_file
, "copying expression %d to reg %d\n",
5436 expr
->bitmap_index
, regno
);
5440 /* Insert partially redundant expressions on edges in the CFG to make
5441 the expressions fully redundant. */
5444 pre_edge_insert (edge_list
, index_map
)
5445 struct edge_list
*edge_list
;
5446 struct expr
**index_map
;
5448 int e
, i
, j
, num_edges
, set_size
, did_insert
= 0;
5451 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
5452 if it reaches any of the deleted expressions. */
5454 set_size
= pre_insert_map
[0]->size
;
5455 num_edges
= NUM_EDGES (edge_list
);
5456 inserted
= sbitmap_vector_alloc (num_edges
, expr_hash_table
.n_elems
);
5457 sbitmap_vector_zero (inserted
, num_edges
);
5459 for (e
= 0; e
< num_edges
; e
++)
5462 basic_block bb
= INDEX_EDGE_PRED_BB (edge_list
, e
);
5464 for (i
= indx
= 0; i
< set_size
; i
++, indx
+= SBITMAP_ELT_BITS
)
5466 SBITMAP_ELT_TYPE insert
= pre_insert_map
[e
]->elms
[i
];
5468 for (j
= indx
; insert
&& j
< (int) expr_hash_table
.n_elems
; j
++, insert
>>= 1)
5469 if ((insert
& 1) != 0 && index_map
[j
]->reaching_reg
!= NULL_RTX
)
5471 struct expr
*expr
= index_map
[j
];
5474 /* Now look at each deleted occurrence of this expression. */
5475 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5477 if (! occr
->deleted_p
)
5480 /* Insert this expression on this edge if if it would
5481 reach the deleted occurrence in BB. */
5482 if (!TEST_BIT (inserted
[e
], j
))
5485 edge eg
= INDEX_EDGE (edge_list
, e
);
5487 /* We can't insert anything on an abnormal and
5488 critical edge, so we insert the insn at the end of
5489 the previous block. There are several alternatives
5490 detailed in Morgans book P277 (sec 10.5) for
5491 handling this situation. This one is easiest for
5494 if ((eg
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
5495 insert_insn_end_bb (index_map
[j
], bb
, 0);
5498 insn
= process_insert_insn (index_map
[j
]);
5499 insert_insn_on_edge (insn
, eg
);
5504 fprintf (gcse_file
, "PRE/HOIST: edge (%d,%d), ",
5506 INDEX_EDGE_SUCC_BB (edge_list
, e
)->index
);
5507 fprintf (gcse_file
, "copy expression %d\n",
5508 expr
->bitmap_index
);
5511 update_ld_motion_stores (expr
);
5512 SET_BIT (inserted
[e
], j
);
5514 gcse_create_count
++;
5521 sbitmap_vector_free (inserted
);
5525 /* Copy the result of INSN to REG. INDX is the expression number. */
5528 pre_insert_copy_insn (expr
, insn
)
5532 rtx reg
= expr
->reaching_reg
;
5533 int regno
= REGNO (reg
);
5534 int indx
= expr
->bitmap_index
;
5535 rtx set
= single_set (insn
);
5541 new_insn
= emit_insn_after (gen_move_insn (reg
, copy_rtx (SET_DEST (set
))), insn
);
5543 /* Keep register set table up to date. */
5544 record_one_set (regno
, new_insn
);
5546 gcse_create_count
++;
5550 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
5551 BLOCK_NUM (insn
), INSN_UID (new_insn
), indx
,
5552 INSN_UID (insn
), regno
);
5553 update_ld_motion_stores (expr
);
5556 /* Copy available expressions that reach the redundant expression
5557 to `reaching_reg'. */
5560 pre_insert_copies ()
5567 /* For each available expression in the table, copy the result to
5568 `reaching_reg' if the expression reaches a deleted one.
5570 ??? The current algorithm is rather brute force.
5571 Need to do some profiling. */
5573 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5574 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5576 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
5577 we don't want to insert a copy here because the expression may not
5578 really be redundant. So only insert an insn if the expression was
5579 deleted. This test also avoids further processing if the
5580 expression wasn't deleted anywhere. */
5581 if (expr
->reaching_reg
== NULL
)
5584 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5586 if (! occr
->deleted_p
)
5589 for (avail
= expr
->avail_occr
; avail
!= NULL
; avail
= avail
->next
)
5591 rtx insn
= avail
->insn
;
5593 /* No need to handle this one if handled already. */
5594 if (avail
->copied_p
)
5597 /* Don't handle this one if it's a redundant one. */
5598 if (TEST_BIT (pre_redundant_insns
, INSN_CUID (insn
)))
5601 /* Or if the expression doesn't reach the deleted one. */
5602 if (! pre_expr_reaches_here_p (BLOCK_FOR_INSN (avail
->insn
),
5604 BLOCK_FOR_INSN (occr
->insn
)))
5607 /* Copy the result of avail to reaching_reg. */
5608 pre_insert_copy_insn (expr
, insn
);
5609 avail
->copied_p
= 1;
5615 /* Emit move from SRC to DEST noting the equivalence with expression computed
5618 gcse_emit_move_after (src
, dest
, insn
)
5619 rtx src
, dest
, insn
;
5622 rtx set
= single_set (insn
), set2
;
5626 /* This should never fail since we're creating a reg->reg copy
5627 we've verified to be valid. */
5629 new = emit_insn_after (gen_move_insn (dest
, src
), insn
);
5631 /* Note the equivalence for local CSE pass. */
5632 set2
= single_set (new);
5633 if (!set2
|| !rtx_equal_p (SET_DEST (set2
), dest
))
5635 if ((note
= find_reg_equal_equiv_note (insn
)))
5636 eqv
= XEXP (note
, 0);
5638 eqv
= SET_SRC (set
);
5640 set_unique_reg_note (new, REG_EQUAL
, copy_insn_1 (eqv
));
5645 /* Delete redundant computations.
5646 Deletion is done by changing the insn to copy the `reaching_reg' of
5647 the expression into the result of the SET. It is left to later passes
5648 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
5650 Returns nonzero if a change is made. */
5661 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5662 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5664 int indx
= expr
->bitmap_index
;
5666 /* We only need to search antic_occr since we require
5669 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
5671 rtx insn
= occr
->insn
;
5673 basic_block bb
= BLOCK_FOR_INSN (insn
);
5675 if (TEST_BIT (pre_delete_map
[bb
->index
], indx
))
5677 set
= single_set (insn
);
5681 /* Create a pseudo-reg to store the result of reaching
5682 expressions into. Get the mode for the new pseudo from
5683 the mode of the original destination pseudo. */
5684 if (expr
->reaching_reg
== NULL
)
5686 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
5688 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
5690 occr
->deleted_p
= 1;
5691 SET_BIT (pre_redundant_insns
, INSN_CUID (insn
));
5698 "PRE: redundant insn %d (expression %d) in ",
5699 INSN_UID (insn
), indx
);
5700 fprintf (gcse_file
, "bb %d, reaching reg is %d\n",
5701 bb
->index
, REGNO (expr
->reaching_reg
));
5710 /* Perform GCSE optimizations using PRE.
5711 This is called by one_pre_gcse_pass after all the dataflow analysis
5714 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
5715 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
5716 Compiler Design and Implementation.
5718 ??? A new pseudo reg is created to hold the reaching expression. The nice
5719 thing about the classical approach is that it would try to use an existing
5720 reg. If the register can't be adequately optimized [i.e. we introduce
5721 reload problems], one could add a pass here to propagate the new register
5724 ??? We don't handle single sets in PARALLELs because we're [currently] not
5725 able to copy the rest of the parallel when we insert copies to create full
5726 redundancies from partial redundancies. However, there's no reason why we
5727 can't handle PARALLELs in the cases where there are no partial
5734 int did_insert
, changed
;
5735 struct expr
**index_map
;
5738 /* Compute a mapping from expression number (`bitmap_index') to
5739 hash table entry. */
5741 index_map
= (struct expr
**) xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
5742 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5743 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5744 index_map
[expr
->bitmap_index
] = expr
;
5746 /* Reset bitmap used to track which insns are redundant. */
5747 pre_redundant_insns
= sbitmap_alloc (max_cuid
);
5748 sbitmap_zero (pre_redundant_insns
);
5750 /* Delete the redundant insns first so that
5751 - we know what register to use for the new insns and for the other
5752 ones with reaching expressions
5753 - we know which insns are redundant when we go to create copies */
5755 changed
= pre_delete ();
5757 did_insert
= pre_edge_insert (edge_list
, index_map
);
5759 /* In other places with reaching expressions, copy the expression to the
5760 specially allocated pseudo-reg that reaches the redundant expr. */
5761 pre_insert_copies ();
5764 commit_edge_insertions ();
5769 sbitmap_free (pre_redundant_insns
);
5773 /* Top level routine to perform one PRE GCSE pass.
5775 Return nonzero if a change was made. */
5778 one_pre_gcse_pass (pass
)
5783 gcse_subst_count
= 0;
5784 gcse_create_count
= 0;
5786 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
5787 add_noreturn_fake_exit_edges ();
5789 compute_ld_motion_mems ();
5791 compute_hash_table (&expr_hash_table
);
5792 trim_ld_motion_mems ();
5794 dump_hash_table (gcse_file
, "Expression", &expr_hash_table
);
5796 if (expr_hash_table
.n_elems
> 0)
5798 alloc_pre_mem (last_basic_block
, expr_hash_table
.n_elems
);
5799 compute_pre_data ();
5800 changed
|= pre_gcse ();
5801 free_edge_list (edge_list
);
5806 remove_fake_edges ();
5807 free_hash_table (&expr_hash_table
);
5811 fprintf (gcse_file
, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
5812 current_function_name
, pass
, bytes_used
);
5813 fprintf (gcse_file
, "%d substs, %d insns created\n",
5814 gcse_subst_count
, gcse_create_count
);
5820 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
5821 If notes are added to an insn which references a CODE_LABEL, the
5822 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
5823 because the following loop optimization pass requires them. */
5825 /* ??? This is very similar to the loop.c add_label_notes function. We
5826 could probably share code here. */
5828 /* ??? If there was a jump optimization pass after gcse and before loop,
5829 then we would not need to do this here, because jump would add the
5830 necessary REG_LABEL notes. */
5833 add_label_notes (x
, insn
)
5837 enum rtx_code code
= GET_CODE (x
);
5841 if (code
== LABEL_REF
&& !LABEL_REF_NONLOCAL_P (x
))
5843 /* This code used to ignore labels that referred to dispatch tables to
5844 avoid flow generating (slighly) worse code.
5846 We no longer ignore such label references (see LABEL_REF handling in
5847 mark_jump_label for additional information). */
5849 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, XEXP (x
, 0),
5851 if (LABEL_P (XEXP (x
, 0)))
5852 LABEL_NUSES (XEXP (x
, 0))++;
5856 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
5859 add_label_notes (XEXP (x
, i
), insn
);
5860 else if (fmt
[i
] == 'E')
5861 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
5862 add_label_notes (XVECEXP (x
, i
, j
), insn
);
5866 /* Compute transparent outgoing information for each block.
5868 An expression is transparent to an edge unless it is killed by
5869 the edge itself. This can only happen with abnormal control flow,
5870 when the edge is traversed through a call. This happens with
5871 non-local labels and exceptions.
5873 This would not be necessary if we split the edge. While this is
5874 normally impossible for abnormal critical edges, with some effort
5875 it should be possible with exception handling, since we still have
5876 control over which handler should be invoked. But due to increased
5877 EH table sizes, this may not be worthwhile. */
5880 compute_transpout ()
5886 sbitmap_vector_ones (transpout
, last_basic_block
);
5890 /* Note that flow inserted a nop a the end of basic blocks that
5891 end in call instructions for reasons other than abnormal
5893 if (GET_CODE (bb
->end
) != CALL_INSN
)
5896 for (i
= 0; i
< expr_hash_table
.size
; i
++)
5897 for (expr
= expr_hash_table
.table
[i
]; expr
; expr
= expr
->next_same_hash
)
5898 if (GET_CODE (expr
->expr
) == MEM
)
5900 if (GET_CODE (XEXP (expr
->expr
, 0)) == SYMBOL_REF
5901 && CONSTANT_POOL_ADDRESS_P (XEXP (expr
->expr
, 0)))
5904 /* ??? Optimally, we would use interprocedural alias
5905 analysis to determine if this mem is actually killed
5907 RESET_BIT (transpout
[bb
->index
], expr
->bitmap_index
);
5912 /* Removal of useless null pointer checks */
5914 /* Called via note_stores. X is set by SETTER. If X is a register we must
5915 invalidate nonnull_local and set nonnull_killed. DATA is really a
5916 `null_pointer_info *'.
5918 We ignore hard registers. */
5921 invalidate_nonnull_info (x
, setter
, data
)
5923 rtx setter ATTRIBUTE_UNUSED
;
5927 struct null_pointer_info
*npi
= (struct null_pointer_info
*) data
;
5929 while (GET_CODE (x
) == SUBREG
)
5932 /* Ignore anything that is not a register or is a hard register. */
5933 if (GET_CODE (x
) != REG
5934 || REGNO (x
) < npi
->min_reg
5935 || REGNO (x
) >= npi
->max_reg
)
5938 regno
= REGNO (x
) - npi
->min_reg
;
5940 RESET_BIT (npi
->nonnull_local
[npi
->current_block
->index
], regno
);
5941 SET_BIT (npi
->nonnull_killed
[npi
->current_block
->index
], regno
);
5944 /* Do null-pointer check elimination for the registers indicated in
5945 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
5946 they are not our responsibility to free. */
5949 delete_null_pointer_checks_1 (block_reg
, nonnull_avin
,
5951 unsigned int *block_reg
;
5952 sbitmap
*nonnull_avin
;
5953 sbitmap
*nonnull_avout
;
5954 struct null_pointer_info
*npi
;
5956 basic_block bb
, current_block
;
5957 sbitmap
*nonnull_local
= npi
->nonnull_local
;
5958 sbitmap
*nonnull_killed
= npi
->nonnull_killed
;
5959 int something_changed
= 0;
5961 /* Compute local properties, nonnull and killed. A register will have
5962 the nonnull property if at the end of the current block its value is
5963 known to be nonnull. The killed property indicates that somewhere in
5964 the block any information we had about the register is killed.
5966 Note that a register can have both properties in a single block. That
5967 indicates that it's killed, then later in the block a new value is
5969 sbitmap_vector_zero (nonnull_local
, last_basic_block
);
5970 sbitmap_vector_zero (nonnull_killed
, last_basic_block
);
5972 FOR_EACH_BB (current_block
)
5974 rtx insn
, stop_insn
;
5976 /* Set the current block for invalidate_nonnull_info. */
5977 npi
->current_block
= current_block
;
5979 /* Scan each insn in the basic block looking for memory references and
5981 stop_insn
= NEXT_INSN (current_block
->end
);
5982 for (insn
= current_block
->head
;
5984 insn
= NEXT_INSN (insn
))
5989 /* Ignore anything that is not a normal insn. */
5990 if (! INSN_P (insn
))
5993 /* Basically ignore anything that is not a simple SET. We do have
5994 to make sure to invalidate nonnull_local and set nonnull_killed
5995 for such insns though. */
5996 set
= single_set (insn
);
5999 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
6003 /* See if we've got a usable memory load. We handle it first
6004 in case it uses its address register as a dest (which kills
6005 the nonnull property). */
6006 if (GET_CODE (SET_SRC (set
)) == MEM
6007 && GET_CODE ((reg
= XEXP (SET_SRC (set
), 0))) == REG
6008 && REGNO (reg
) >= npi
->min_reg
6009 && REGNO (reg
) < npi
->max_reg
)
6010 SET_BIT (nonnull_local
[current_block
->index
],
6011 REGNO (reg
) - npi
->min_reg
);
6013 /* Now invalidate stuff clobbered by this insn. */
6014 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
6016 /* And handle stores, we do these last since any sets in INSN can
6017 not kill the nonnull property if it is derived from a MEM
6018 appearing in a SET_DEST. */
6019 if (GET_CODE (SET_DEST (set
)) == MEM
6020 && GET_CODE ((reg
= XEXP (SET_DEST (set
), 0))) == REG
6021 && REGNO (reg
) >= npi
->min_reg
6022 && REGNO (reg
) < npi
->max_reg
)
6023 SET_BIT (nonnull_local
[current_block
->index
],
6024 REGNO (reg
) - npi
->min_reg
);
6028 /* Now compute global properties based on the local properties. This
6029 is a classic global availability algorithm. */
6030 compute_available (nonnull_local
, nonnull_killed
,
6031 nonnull_avout
, nonnull_avin
);
6033 /* Now look at each bb and see if it ends with a compare of a value
6037 rtx last_insn
= bb
->end
;
6038 rtx condition
, earliest
;
6039 int compare_and_branch
;
6041 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
6042 since BLOCK_REG[BB] is zero if this block did not end with a
6043 comparison against zero, this condition works. */
6044 if (block_reg
[bb
->index
] < npi
->min_reg
6045 || block_reg
[bb
->index
] >= npi
->max_reg
)
6048 /* LAST_INSN is a conditional jump. Get its condition. */
6049 condition
= get_condition (last_insn
, &earliest
);
6051 /* If we can't determine the condition then skip. */
6055 /* Is the register known to have a nonzero value? */
6056 if (!TEST_BIT (nonnull_avout
[bb
->index
], block_reg
[bb
->index
] - npi
->min_reg
))
6059 /* Try to compute whether the compare/branch at the loop end is one or
6060 two instructions. */
6061 if (earliest
== last_insn
)
6062 compare_and_branch
= 1;
6063 else if (earliest
== prev_nonnote_insn (last_insn
))
6064 compare_and_branch
= 2;
6068 /* We know the register in this comparison is nonnull at exit from
6069 this block. We can optimize this comparison. */
6070 if (GET_CODE (condition
) == NE
)
6074 new_jump
= emit_jump_insn_after (gen_jump (JUMP_LABEL (last_insn
)),
6076 JUMP_LABEL (new_jump
) = JUMP_LABEL (last_insn
);
6077 LABEL_NUSES (JUMP_LABEL (new_jump
))++;
6078 emit_barrier_after (new_jump
);
6081 something_changed
= 1;
6082 delete_insn (last_insn
);
6083 if (compare_and_branch
== 2)
6084 delete_insn (earliest
);
6085 purge_dead_edges (bb
);
6087 /* Don't check this block again. (Note that BLOCK_END is
6088 invalid here; we deleted the last instruction in the
6090 block_reg
[bb
->index
] = 0;
6093 return something_changed
;
6096 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
6099 This is conceptually similar to global constant/copy propagation and
6100 classic global CSE (it even uses the same dataflow equations as cprop).
6102 If a register is used as memory address with the form (mem (reg)), then we
6103 know that REG can not be zero at that point in the program. Any instruction
6104 which sets REG "kills" this property.
6106 So, if every path leading to a conditional branch has an available memory
6107 reference of that form, then we know the register can not have the value
6108 zero at the conditional branch.
6110 So we merely need to compute the local properties and propagate that data
6111 around the cfg, then optimize where possible.
6113 We run this pass two times. Once before CSE, then again after CSE. This
6114 has proven to be the most profitable approach. It is rare for new
6115 optimization opportunities of this nature to appear after the first CSE
6118 This could probably be integrated with global cprop with a little work. */
6121 delete_null_pointer_checks (f
)
6122 rtx f ATTRIBUTE_UNUSED
;
6124 sbitmap
*nonnull_avin
, *nonnull_avout
;
6125 unsigned int *block_reg
;
6130 struct null_pointer_info npi
;
6131 int something_changed
= 0;
6133 /* If we have only a single block, then there's nothing to do. */
6134 if (n_basic_blocks
<= 1)
6137 /* Trying to perform global optimizations on flow graphs which have
6138 a high connectivity will take a long time and is unlikely to be
6139 particularly useful.
6141 In normal circumstances a cfg should have about twice as many edges
6142 as blocks. But we do not want to punish small functions which have
6143 a couple switch statements. So we require a relatively large number
6144 of basic blocks and the ratio of edges to blocks to be high. */
6145 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
6148 /* We need four bitmaps, each with a bit for each register in each
6150 max_reg
= max_reg_num ();
6151 regs_per_pass
= get_bitmap_width (4, last_basic_block
, max_reg
);
6153 /* Allocate bitmaps to hold local and global properties. */
6154 npi
.nonnull_local
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6155 npi
.nonnull_killed
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6156 nonnull_avin
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6157 nonnull_avout
= sbitmap_vector_alloc (last_basic_block
, regs_per_pass
);
6159 /* Go through the basic blocks, seeing whether or not each block
6160 ends with a conditional branch whose condition is a comparison
6161 against zero. Record the register compared in BLOCK_REG. */
6162 block_reg
= (unsigned int *) xcalloc (last_basic_block
, sizeof (int));
6165 rtx last_insn
= bb
->end
;
6166 rtx condition
, earliest
, reg
;
6168 /* We only want conditional branches. */
6169 if (GET_CODE (last_insn
) != JUMP_INSN
6170 || !any_condjump_p (last_insn
)
6171 || !onlyjump_p (last_insn
))
6174 /* LAST_INSN is a conditional jump. Get its condition. */
6175 condition
= get_condition (last_insn
, &earliest
);
6177 /* If we were unable to get the condition, or it is not an equality
6178 comparison against zero then there's nothing we can do. */
6180 || (GET_CODE (condition
) != NE
&& GET_CODE (condition
) != EQ
)
6181 || GET_CODE (XEXP (condition
, 1)) != CONST_INT
6182 || (XEXP (condition
, 1)
6183 != CONST0_RTX (GET_MODE (XEXP (condition
, 0)))))
6186 /* We must be checking a register against zero. */
6187 reg
= XEXP (condition
, 0);
6188 if (GET_CODE (reg
) != REG
)
6191 block_reg
[bb
->index
] = REGNO (reg
);
6194 /* Go through the algorithm for each block of registers. */
6195 for (reg
= FIRST_PSEUDO_REGISTER
; reg
< max_reg
; reg
+= regs_per_pass
)
6198 npi
.max_reg
= MIN (reg
+ regs_per_pass
, max_reg
);
6199 something_changed
|= delete_null_pointer_checks_1 (block_reg
,
6205 /* Free the table of registers compared at the end of every block. */
6209 sbitmap_vector_free (npi
.nonnull_local
);
6210 sbitmap_vector_free (npi
.nonnull_killed
);
6211 sbitmap_vector_free (nonnull_avin
);
6212 sbitmap_vector_free (nonnull_avout
);
6214 return something_changed
;
6217 /* Code Hoisting variables and subroutines. */
6219 /* Very busy expressions. */
6220 static sbitmap
*hoist_vbein
;
6221 static sbitmap
*hoist_vbeout
;
6223 /* Hoistable expressions. */
6224 static sbitmap
*hoist_exprs
;
6226 /* Dominator bitmaps. */
6227 dominance_info dominators
;
6229 /* ??? We could compute post dominators and run this algorithm in
6230 reverse to perform tail merging, doing so would probably be
6231 more effective than the tail merging code in jump.c.
6233 It's unclear if tail merging could be run in parallel with
6234 code hoisting. It would be nice. */
6236 /* Allocate vars used for code hoisting analysis. */
6239 alloc_code_hoist_mem (n_blocks
, n_exprs
)
6240 int n_blocks
, n_exprs
;
6242 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6243 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6244 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6246 hoist_vbein
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6247 hoist_vbeout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6248 hoist_exprs
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6249 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
6252 /* Free vars used for code hoisting analysis. */
6255 free_code_hoist_mem ()
6257 sbitmap_vector_free (antloc
);
6258 sbitmap_vector_free (transp
);
6259 sbitmap_vector_free (comp
);
6261 sbitmap_vector_free (hoist_vbein
);
6262 sbitmap_vector_free (hoist_vbeout
);
6263 sbitmap_vector_free (hoist_exprs
);
6264 sbitmap_vector_free (transpout
);
6266 free_dominance_info (dominators
);
6269 /* Compute the very busy expressions at entry/exit from each block.
6271 An expression is very busy if all paths from a given point
6272 compute the expression. */
6275 compute_code_hoist_vbeinout ()
6277 int changed
, passes
;
6280 sbitmap_vector_zero (hoist_vbeout
, last_basic_block
);
6281 sbitmap_vector_zero (hoist_vbein
, last_basic_block
);
6290 /* We scan the blocks in the reverse order to speed up
6292 FOR_EACH_BB_REVERSE (bb
)
6294 changed
|= sbitmap_a_or_b_and_c_cg (hoist_vbein
[bb
->index
], antloc
[bb
->index
],
6295 hoist_vbeout
[bb
->index
], transp
[bb
->index
]);
6296 if (bb
->next_bb
!= EXIT_BLOCK_PTR
)
6297 sbitmap_intersection_of_succs (hoist_vbeout
[bb
->index
], hoist_vbein
, bb
->index
);
6304 fprintf (gcse_file
, "hoisting vbeinout computation: %d passes\n", passes
);
6307 /* Top level routine to do the dataflow analysis needed by code hoisting. */
6310 compute_code_hoist_data ()
6312 compute_local_properties (transp
, comp
, antloc
, &expr_hash_table
);
6313 compute_transpout ();
6314 compute_code_hoist_vbeinout ();
6315 dominators
= calculate_dominance_info (CDI_DOMINATORS
);
6317 fprintf (gcse_file
, "\n");
6320 /* Determine if the expression identified by EXPR_INDEX would
6321 reach BB unimpared if it was placed at the end of EXPR_BB.
6323 It's unclear exactly what Muchnick meant by "unimpared". It seems
6324 to me that the expression must either be computed or transparent in
6325 *every* block in the path(s) from EXPR_BB to BB. Any other definition
6326 would allow the expression to be hoisted out of loops, even if
6327 the expression wasn't a loop invariant.
6329 Contrast this to reachability for PRE where an expression is
6330 considered reachable if *any* path reaches instead of *all*
6334 hoist_expr_reaches_here_p (expr_bb
, expr_index
, bb
, visited
)
6335 basic_block expr_bb
;
6341 int visited_allocated_locally
= 0;
6344 if (visited
== NULL
)
6346 visited_allocated_locally
= 1;
6347 visited
= xcalloc (last_basic_block
, 1);
6350 for (pred
= bb
->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
6352 basic_block pred_bb
= pred
->src
;
6354 if (pred
->src
== ENTRY_BLOCK_PTR
)
6356 else if (pred_bb
== expr_bb
)
6358 else if (visited
[pred_bb
->index
])
6361 /* Does this predecessor generate this expression? */
6362 else if (TEST_BIT (comp
[pred_bb
->index
], expr_index
))
6364 else if (! TEST_BIT (transp
[pred_bb
->index
], expr_index
))
6370 visited
[pred_bb
->index
] = 1;
6371 if (! hoist_expr_reaches_here_p (expr_bb
, expr_index
,
6376 if (visited_allocated_locally
)
6379 return (pred
== NULL
);
6382 /* Actually perform code hoisting. */
6387 basic_block bb
, dominated
;
6389 unsigned int domby_len
;
6391 struct expr
**index_map
;
6394 sbitmap_vector_zero (hoist_exprs
, last_basic_block
);
6396 /* Compute a mapping from expression number (`bitmap_index') to
6397 hash table entry. */
6399 index_map
= (struct expr
**) xcalloc (expr_hash_table
.n_elems
, sizeof (struct expr
*));
6400 for (i
= 0; i
< expr_hash_table
.size
; i
++)
6401 for (expr
= expr_hash_table
.table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
6402 index_map
[expr
->bitmap_index
] = expr
;
6404 /* Walk over each basic block looking for potentially hoistable
6405 expressions, nothing gets hoisted from the entry block. */
6409 int insn_inserted_p
;
6411 domby_len
= get_dominated_by (dominators
, bb
, &domby
);
6412 /* Examine each expression that is very busy at the exit of this
6413 block. These are the potentially hoistable expressions. */
6414 for (i
= 0; i
< hoist_vbeout
[bb
->index
]->n_bits
; i
++)
6418 if (TEST_BIT (hoist_vbeout
[bb
->index
], i
)
6419 && TEST_BIT (transpout
[bb
->index
], i
))
6421 /* We've found a potentially hoistable expression, now
6422 we look at every block BB dominates to see if it
6423 computes the expression. */
6424 for (j
= 0; j
< domby_len
; j
++)
6426 dominated
= domby
[j
];
6427 /* Ignore self dominance. */
6428 if (bb
== dominated
)
6430 /* We've found a dominated block, now see if it computes
6431 the busy expression and whether or not moving that
6432 expression to the "beginning" of that block is safe. */
6433 if (!TEST_BIT (antloc
[dominated
->index
], i
))
6436 /* Note if the expression would reach the dominated block
6437 unimpared if it was placed at the end of BB.
6439 Keep track of how many times this expression is hoistable
6440 from a dominated block into BB. */
6441 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
6445 /* If we found more than one hoistable occurrence of this
6446 expression, then note it in the bitmap of expressions to
6447 hoist. It makes no sense to hoist things which are computed
6448 in only one BB, and doing so tends to pessimize register
6449 allocation. One could increase this value to try harder
6450 to avoid any possible code expansion due to register
6451 allocation issues; however experiments have shown that
6452 the vast majority of hoistable expressions are only movable
6453 from two successors, so raising this threshhold is likely
6454 to nullify any benefit we get from code hoisting. */
6457 SET_BIT (hoist_exprs
[bb
->index
], i
);
6462 /* If we found nothing to hoist, then quit now. */
6469 /* Loop over all the hoistable expressions. */
6470 for (i
= 0; i
< hoist_exprs
[bb
->index
]->n_bits
; i
++)
6472 /* We want to insert the expression into BB only once, so
6473 note when we've inserted it. */
6474 insn_inserted_p
= 0;
6476 /* These tests should be the same as the tests above. */
6477 if (TEST_BIT (hoist_vbeout
[bb
->index
], i
))
6479 /* We've found a potentially hoistable expression, now
6480 we look at every block BB dominates to see if it
6481 computes the expression. */
6482 for (j
= 0; j
< domby_len
; j
++)
6484 dominated
= domby
[j
];
6485 /* Ignore self dominance. */
6486 if (bb
== dominated
)
6489 /* We've found a dominated block, now see if it computes
6490 the busy expression and whether or not moving that
6491 expression to the "beginning" of that block is safe. */
6492 if (!TEST_BIT (antloc
[dominated
->index
], i
))
6495 /* The expression is computed in the dominated block and
6496 it would be safe to compute it at the start of the
6497 dominated block. Now we have to determine if the
6498 expression would reach the dominated block if it was
6499 placed at the end of BB. */
6500 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
6502 struct expr
*expr
= index_map
[i
];
6503 struct occr
*occr
= expr
->antic_occr
;
6507 /* Find the right occurrence of this expression. */
6508 while (BLOCK_FOR_INSN (occr
->insn
) != dominated
&& occr
)
6511 /* Should never happen. */
6517 set
= single_set (insn
);
6521 /* Create a pseudo-reg to store the result of reaching
6522 expressions into. Get the mode for the new pseudo
6523 from the mode of the original destination pseudo. */
6524 if (expr
->reaching_reg
== NULL
)
6526 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
6528 gcse_emit_move_after (expr
->reaching_reg
, SET_DEST (set
), insn
);
6530 occr
->deleted_p
= 1;
6531 if (!insn_inserted_p
)
6533 insert_insn_end_bb (index_map
[i
], bb
, 0);
6534 insn_inserted_p
= 1;
6546 /* Top level routine to perform one code hoisting (aka unification) pass
6548 Return nonzero if a change was made. */
6551 one_code_hoisting_pass ()
6555 alloc_hash_table (max_cuid
, &expr_hash_table
, 0);
6556 compute_hash_table (&expr_hash_table
);
6558 dump_hash_table (gcse_file
, "Code Hosting Expressions", &expr_hash_table
);
6560 if (expr_hash_table
.n_elems
> 0)
6562 alloc_code_hoist_mem (last_basic_block
, expr_hash_table
.n_elems
);
6563 compute_code_hoist_data ();
6565 free_code_hoist_mem ();
6568 free_hash_table (&expr_hash_table
);
6573 /* Here we provide the things required to do store motion towards
6574 the exit. In order for this to be effective, gcse also needed to
6575 be taught how to move a load when it is kill only by a store to itself.
6580 void foo(float scale)
6582 for (i=0; i<10; i++)
6586 'i' is both loaded and stored to in the loop. Normally, gcse cannot move
6587 the load out since its live around the loop, and stored at the bottom
6590 The 'Load Motion' referred to and implemented in this file is
6591 an enhancement to gcse which when using edge based lcm, recognizes
6592 this situation and allows gcse to move the load out of the loop.
6594 Once gcse has hoisted the load, store motion can then push this
6595 load towards the exit, and we end up with no loads or stores of 'i'
6598 /* This will search the ldst list for a matching expression. If it
6599 doesn't find one, we create one and initialize it. */
6601 static struct ls_expr
*
6605 struct ls_expr
* ptr
;
6607 for (ptr
= first_ls_expr(); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6608 if (expr_equiv_p (ptr
->pattern
, x
))
6613 ptr
= (struct ls_expr
*) xmalloc (sizeof (struct ls_expr
));
6615 ptr
->next
= pre_ldst_mems
;
6618 ptr
->pattern_regs
= NULL_RTX
;
6619 ptr
->loads
= NULL_RTX
;
6620 ptr
->stores
= NULL_RTX
;
6621 ptr
->reaching_reg
= NULL_RTX
;
6624 ptr
->hash_index
= 0;
6625 pre_ldst_mems
= ptr
;
6631 /* Free up an individual ldst entry. */
6634 free_ldst_entry (ptr
)
6635 struct ls_expr
* ptr
;
6637 free_INSN_LIST_list (& ptr
->loads
);
6638 free_INSN_LIST_list (& ptr
->stores
);
6643 /* Free up all memory associated with the ldst list. */
6648 while (pre_ldst_mems
)
6650 struct ls_expr
* tmp
= pre_ldst_mems
;
6652 pre_ldst_mems
= pre_ldst_mems
->next
;
6654 free_ldst_entry (tmp
);
6657 pre_ldst_mems
= NULL
;
6660 /* Dump debugging info about the ldst list. */
6663 print_ldst_list (file
)
6666 struct ls_expr
* ptr
;
6668 fprintf (file
, "LDST list: \n");
6670 for (ptr
= first_ls_expr(); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
6672 fprintf (file
, " Pattern (%3d): ", ptr
->index
);
6674 print_rtl (file
, ptr
->pattern
);
6676 fprintf (file
, "\n Loads : ");
6679 print_rtl (file
, ptr
->loads
);
6681 fprintf (file
, "(nil)");
6683 fprintf (file
, "\n Stores : ");
6686 print_rtl (file
, ptr
->stores
);
6688 fprintf (file
, "(nil)");
6690 fprintf (file
, "\n\n");
6693 fprintf (file
, "\n");
6696 /* Returns 1 if X is in the list of ldst only expressions. */
6698 static struct ls_expr
*
6699 find_rtx_in_ldst (x
)
6702 struct ls_expr
* ptr
;
6704 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
6705 if (expr_equiv_p (ptr
->pattern
, x
) && ! ptr
->invalid
)
6711 /* Assign each element of the list of mems a monotonically increasing value. */
6716 struct ls_expr
* ptr
;
6719 for (ptr
= pre_ldst_mems
; ptr
!= NULL
; ptr
= ptr
->next
)
6725 /* Return first item in the list. */
6727 static inline struct ls_expr
*
6730 return pre_ldst_mems
;
6733 /* Return the next item in ther list after the specified one. */
6735 static inline struct ls_expr
*
6737 struct ls_expr
* ptr
;
6742 /* Load Motion for loads which only kill themselves. */
6744 /* Return true if x is a simple MEM operation, with no registers or
6745 side effects. These are the types of loads we consider for the
6746 ld_motion list, otherwise we let the usual aliasing take care of it. */
6752 if (GET_CODE (x
) != MEM
)
6755 if (MEM_VOLATILE_P (x
))
6758 if (GET_MODE (x
) == BLKmode
)
6761 /* If we are handling exceptions, we must be careful with memory references
6762 that may trap. If we are not, the behavior is undefined, so we may just
6764 if (flag_non_call_exceptions
&& may_trap_p (x
))
6767 if (side_effects_p (x
))
6770 /* Do not consider function arguments passed on stack. */
6771 if (reg_mentioned_p (stack_pointer_rtx
, x
))
6774 if (flag_float_store
&& FLOAT_MODE_P (GET_MODE (x
)))
6780 /* Make sure there isn't a buried reference in this pattern anywhere.
6781 If there is, invalidate the entry for it since we're not capable
6782 of fixing it up just yet.. We have to be sure we know about ALL
6783 loads since the aliasing code will allow all entries in the
6784 ld_motion list to not-alias itself. If we miss a load, we will get
6785 the wrong value since gcse might common it and we won't know to
6789 invalidate_any_buried_refs (x
)
6794 struct ls_expr
* ptr
;
6796 /* Invalidate it in the list. */
6797 if (GET_CODE (x
) == MEM
&& simple_mem (x
))
6799 ptr
= ldst_entry (x
);
6803 /* Recursively process the insn. */
6804 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
6806 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
6809 invalidate_any_buried_refs (XEXP (x
, i
));
6810 else if (fmt
[i
] == 'E')
6811 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
6812 invalidate_any_buried_refs (XVECEXP (x
, i
, j
));
6816 /* Find all the 'simple' MEMs which are used in LOADs and STORES. Simple
6817 being defined as MEM loads and stores to symbols, with no side effects
6818 and no registers in the expression. For a MEM destination, we also
6819 check that the insn is still valid if we replace the destination with a
6820 REG, as is done in update_ld_motion_stores. If there are any uses/defs
6821 which don't match this criteria, they are invalidated and trimmed out
6825 compute_ld_motion_mems ()
6827 struct ls_expr
* ptr
;
6831 pre_ldst_mems
= NULL
;
6835 for (insn
= bb
->head
;
6836 insn
&& insn
!= NEXT_INSN (bb
->end
);
6837 insn
= NEXT_INSN (insn
))
6839 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
6841 if (GET_CODE (PATTERN (insn
)) == SET
)
6843 rtx src
= SET_SRC (PATTERN (insn
));
6844 rtx dest
= SET_DEST (PATTERN (insn
));
6846 /* Check for a simple LOAD... */
6847 if (GET_CODE (src
) == MEM
&& simple_mem (src
))
6849 ptr
= ldst_entry (src
);
6850 if (GET_CODE (dest
) == REG
)
6851 ptr
->loads
= alloc_INSN_LIST (insn
, ptr
->loads
);
6857 /* Make sure there isn't a buried load somewhere. */
6858 invalidate_any_buried_refs (src
);
6861 /* Check for stores. Don't worry about aliased ones, they
6862 will block any movement we might do later. We only care
6863 about this exact pattern since those are the only
6864 circumstance that we will ignore the aliasing info. */
6865 if (GET_CODE (dest
) == MEM
&& simple_mem (dest
))
6867 ptr
= ldst_entry (dest
);
6869 if (GET_CODE (src
) != MEM
6870 && GET_CODE (src
) != ASM_OPERANDS
6871 /* Check for REG manually since want_to_gcse_p
6872 returns 0 for all REGs. */
6873 && (REG_P (src
) || want_to_gcse_p (src
)))
6874 ptr
->stores
= alloc_INSN_LIST (insn
, ptr
->stores
);
6880 invalidate_any_buried_refs (PATTERN (insn
));
6886 /* Remove any references that have been either invalidated or are not in the
6887 expression list for pre gcse. */
6890 trim_ld_motion_mems ()
6892 struct ls_expr
* last
= NULL
;
6893 struct ls_expr
* ptr
= first_ls_expr ();
6897 int del
= ptr
->invalid
;
6898 struct expr
* expr
= NULL
;
6900 /* Delete if entry has been made invalid. */
6906 /* Delete if we cannot find this mem in the expression list. */
6907 for (i
= 0; i
< expr_hash_table
.size
&& del
; i
++)
6909 for (expr
= expr_hash_table
.table
[i
];
6911 expr
= expr
->next_same_hash
)
6912 if (expr_equiv_p (expr
->expr
, ptr
->pattern
))
6924 last
->next
= ptr
->next
;
6925 free_ldst_entry (ptr
);
6930 pre_ldst_mems
= pre_ldst_mems
->next
;
6931 free_ldst_entry (ptr
);
6932 ptr
= pre_ldst_mems
;
6937 /* Set the expression field if we are keeping it. */
6944 /* Show the world what we've found. */
6945 if (gcse_file
&& pre_ldst_mems
!= NULL
)
6946 print_ldst_list (gcse_file
);
6949 /* This routine will take an expression which we are replacing with
6950 a reaching register, and update any stores that are needed if
6951 that expression is in the ld_motion list. Stores are updated by
6952 copying their SRC to the reaching register, and then storeing
6953 the reaching register into the store location. These keeps the
6954 correct value in the reaching register for the loads. */
6957 update_ld_motion_stores (expr
)
6960 struct ls_expr
* mem_ptr
;
6962 if ((mem_ptr
= find_rtx_in_ldst (expr
->expr
)))
6964 /* We can try to find just the REACHED stores, but is shouldn't
6965 matter to set the reaching reg everywhere... some might be
6966 dead and should be eliminated later. */
6968 /* We replace (set mem expr) with (set reg expr) (set mem reg)
6969 where reg is the reaching reg used in the load. We checked in
6970 compute_ld_motion_mems that we can replace (set mem expr) with
6971 (set reg expr) in that insn. */
6972 rtx list
= mem_ptr
->stores
;
6974 for ( ; list
!= NULL_RTX
; list
= XEXP (list
, 1))
6976 rtx insn
= XEXP (list
, 0);
6977 rtx pat
= PATTERN (insn
);
6978 rtx src
= SET_SRC (pat
);
6979 rtx reg
= expr
->reaching_reg
;
6982 /* If we've already copied it, continue. */
6983 if (expr
->reaching_reg
== src
)
6988 fprintf (gcse_file
, "PRE: store updated with reaching reg ");
6989 print_rtl (gcse_file
, expr
->reaching_reg
);
6990 fprintf (gcse_file
, ":\n ");
6991 print_inline_rtx (gcse_file
, insn
, 8);
6992 fprintf (gcse_file
, "\n");
6995 copy
= gen_move_insn ( reg
, copy_rtx (SET_SRC (pat
)));
6996 new = emit_insn_before (copy
, insn
);
6997 record_one_set (REGNO (reg
), new);
6998 SET_SRC (pat
) = reg
;
7000 /* un-recognize this pattern since it's probably different now. */
7001 INSN_CODE (insn
) = -1;
7002 gcse_create_count
++;
7007 /* Store motion code. */
7009 #define ANTIC_STORE_LIST(x) ((x)->loads)
7010 #define AVAIL_STORE_LIST(x) ((x)->stores)
7011 #define LAST_AVAIL_CHECK_FAILURE(x) ((x)->reaching_reg)
7013 /* This is used to communicate the target bitvector we want to use in the
7014 reg_set_info routine when called via the note_stores mechanism. */
7015 static int * regvec
;
7017 /* And current insn, for the same routine. */
7018 static rtx compute_store_table_current_insn
;
7020 /* Used in computing the reverse edge graph bit vectors. */
7021 static sbitmap
* st_antloc
;
7023 /* Global holding the number of store expressions we are dealing with. */
7024 static int num_stores
;
7026 /* Checks to set if we need to mark a register set. Called from note_stores. */
7029 reg_set_info (dest
, setter
, data
)
7030 rtx dest
, setter ATTRIBUTE_UNUSED
;
7031 void * data ATTRIBUTE_UNUSED
;
7033 if (GET_CODE (dest
) == SUBREG
)
7034 dest
= SUBREG_REG (dest
);
7036 if (GET_CODE (dest
) == REG
)
7037 regvec
[REGNO (dest
)] = INSN_UID (compute_store_table_current_insn
);
7040 /* Return zero if some of the registers in list X are killed
7041 due to set of registers in bitmap REGS_SET. */
7044 store_ops_ok (x
, regs_set
)
7050 for (; x
; x
= XEXP (x
, 1))
7053 if (regs_set
[REGNO(reg
)])
7060 /* Returns a list of registers mentioned in X. */
7062 extract_mentioned_regs (x
)
7065 return extract_mentioned_regs_helper (x
, NULL_RTX
);
7068 /* Helper for extract_mentioned_regs; ACCUM is used to accumulate used
7071 extract_mentioned_regs_helper (x
, accum
)
7079 /* Repeat is used to turn tail-recursion into iteration. */
7085 code
= GET_CODE (x
);
7089 return alloc_EXPR_LIST (0, x
, accum
);
7099 /* We do not run this function with arguments having side effects. */
7118 i
= GET_RTX_LENGTH (code
) - 1;
7119 fmt
= GET_RTX_FORMAT (code
);
7125 rtx tem
= XEXP (x
, i
);
7127 /* If we are about to do the last recursive call
7128 needed at this level, change it into iteration. */
7135 accum
= extract_mentioned_regs_helper (tem
, accum
);
7137 else if (fmt
[i
] == 'E')
7141 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
7142 accum
= extract_mentioned_regs_helper (XVECEXP (x
, i
, j
), accum
);
7149 /* Determine whether INSN is MEM store pattern that we will consider moving.
7150 REGS_SET_BEFORE is bitmap of registers set before (and including) the
7151 current insn, REGS_SET_AFTER is bitmap of registers set after (and
7152 including) the insn in this basic block. We must be passing through BB from
7153 head to end, as we are using this fact to speed things up.
7155 The results are stored this way:
7157 -- the first anticipatable expression is added into ANTIC_STORE_LIST
7158 -- if the processed expression is not anticipatable, NULL_RTX is added
7159 there instead, so that we can use it as indicator that no further
7160 expression of this type may be anticipatable
7161 -- if the expression is available, it is added as head of AVAIL_STORE_LIST;
7162 consequently, all of them but this head are dead and may be deleted.
7163 -- if the expression is not available, the insn due to that it fails to be
7164 available is stored in reaching_reg.
7166 The things are complicated a bit by fact that there already may be stores
7167 to the same MEM from other blocks; also caller must take care of the
7168 neccessary cleanup of the temporary markers after end of the basic block.
7172 find_moveable_store (insn
, regs_set_before
, regs_set_after
)
7174 int *regs_set_before
;
7175 int *regs_set_after
;
7177 struct ls_expr
* ptr
;
7179 int check_anticipatable
, check_available
;
7180 basic_block bb
= BLOCK_FOR_INSN (insn
);
7182 set
= single_set (insn
);
7186 dest
= SET_DEST (set
);
7188 if (GET_CODE (dest
) != MEM
|| MEM_VOLATILE_P (dest
)
7189 || GET_MODE (dest
) == BLKmode
)
7192 if (side_effects_p (dest
))
7195 /* If we are handling exceptions, we must be careful with memory references
7196 that may trap. If we are not, the behavior is undefined, so we may just
7198 if (flag_non_call_exceptions
&& may_trap_p (dest
))
7201 ptr
= ldst_entry (dest
);
7202 if (!ptr
->pattern_regs
)
7203 ptr
->pattern_regs
= extract_mentioned_regs (dest
);
7205 /* Do not check for anticipatability if we either found one anticipatable
7206 store already, or tested for one and found out that it was killed. */
7207 check_anticipatable
= 0;
7208 if (!ANTIC_STORE_LIST (ptr
))
7209 check_anticipatable
= 1;
7212 tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0);
7214 && BLOCK_FOR_INSN (tmp
) != bb
)
7215 check_anticipatable
= 1;
7217 if (check_anticipatable
)
7219 if (store_killed_before (dest
, ptr
->pattern_regs
, insn
, bb
, regs_set_before
))
7223 ANTIC_STORE_LIST (ptr
) = alloc_INSN_LIST (tmp
,
7224 ANTIC_STORE_LIST (ptr
));
7227 /* It is not neccessary to check whether store is available if we did
7228 it successfully before; if we failed before, do not bother to check
7229 until we reach the insn that caused us to fail. */
7230 check_available
= 0;
7231 if (!AVAIL_STORE_LIST (ptr
))
7232 check_available
= 1;
7235 tmp
= XEXP (AVAIL_STORE_LIST (ptr
), 0);
7236 if (BLOCK_FOR_INSN (tmp
) != bb
)
7237 check_available
= 1;
7239 if (check_available
)
7241 /* Check that we have already reached the insn at that the check
7242 failed last time. */
7243 if (LAST_AVAIL_CHECK_FAILURE (ptr
))
7246 tmp
!= insn
&& tmp
!= LAST_AVAIL_CHECK_FAILURE (ptr
);
7247 tmp
= PREV_INSN (tmp
))
7250 check_available
= 0;
7253 check_available
= store_killed_after (dest
, ptr
->pattern_regs
, insn
,
7255 &LAST_AVAIL_CHECK_FAILURE (ptr
));
7257 if (!check_available
)
7258 AVAIL_STORE_LIST (ptr
) = alloc_INSN_LIST (insn
, AVAIL_STORE_LIST (ptr
));
7261 /* Find available and anticipatable stores. */
7264 compute_store_table ()
7270 int *last_set_in
, *already_set
;
7271 struct ls_expr
* ptr
, **prev_next_ptr_ptr
;
7273 max_gcse_regno
= max_reg_num ();
7275 reg_set_in_block
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
,
7277 sbitmap_vector_zero (reg_set_in_block
, last_basic_block
);
7279 last_set_in
= xmalloc (sizeof (int) * max_gcse_regno
);
7280 already_set
= xmalloc (sizeof (int) * max_gcse_regno
);
7282 /* Find all the stores we care about. */
7285 /* First compute the registers set in this block. */
7286 memset (last_set_in
, 0, sizeof (int) * max_gcse_regno
);
7287 regvec
= last_set_in
;
7289 for (insn
= bb
->head
;
7290 insn
!= NEXT_INSN (bb
->end
);
7291 insn
= NEXT_INSN (insn
))
7293 if (! INSN_P (insn
))
7296 if (GET_CODE (insn
) == CALL_INSN
)
7298 bool clobbers_all
= false;
7299 #ifdef NON_SAVING_SETJMP
7300 if (NON_SAVING_SETJMP
7301 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
7302 clobbers_all
= true;
7305 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7307 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
7308 last_set_in
[regno
] = INSN_UID (insn
);
7311 pat
= PATTERN (insn
);
7312 compute_store_table_current_insn
= insn
;
7313 note_stores (pat
, reg_set_info
, NULL
);
7316 /* Record the set registers. */
7317 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
7318 if (last_set_in
[regno
])
7319 SET_BIT (reg_set_in_block
[bb
->index
], regno
);
7321 /* Now find the stores. */
7322 memset (already_set
, 0, sizeof (int) * max_gcse_regno
);
7323 regvec
= already_set
;
7324 for (insn
= bb
->head
;
7325 insn
!= NEXT_INSN (bb
->end
);
7326 insn
= NEXT_INSN (insn
))
7328 if (! INSN_P (insn
))
7331 if (GET_CODE (insn
) == CALL_INSN
)
7333 bool clobbers_all
= false;
7334 #ifdef NON_SAVING_SETJMP
7335 if (NON_SAVING_SETJMP
7336 && find_reg_note (insn
, REG_SETJMP
, NULL_RTX
))
7337 clobbers_all
= true;
7340 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
7342 || TEST_HARD_REG_BIT (regs_invalidated_by_call
, regno
))
7343 already_set
[regno
] = 1;
7346 pat
= PATTERN (insn
);
7347 note_stores (pat
, reg_set_info
, NULL
);
7349 /* Now that we've marked regs, look for stores. */
7350 find_moveable_store (insn
, already_set
, last_set_in
);
7352 /* Unmark regs that are no longer set. */
7353 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
7354 if (last_set_in
[regno
] == INSN_UID (insn
))
7355 last_set_in
[regno
] = 0;
7358 /* Clear temporary marks. */
7359 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7361 LAST_AVAIL_CHECK_FAILURE(ptr
) = NULL_RTX
;
7362 if (ANTIC_STORE_LIST (ptr
)
7363 && (tmp
= XEXP (ANTIC_STORE_LIST (ptr
), 0)) == NULL_RTX
)
7364 ANTIC_STORE_LIST (ptr
) = XEXP (ANTIC_STORE_LIST (ptr
), 1);
7368 /* Remove the stores that are not available anywhere, as there will
7369 be no opportunity to optimize them. */
7370 for (ptr
= pre_ldst_mems
, prev_next_ptr_ptr
= &pre_ldst_mems
;
7372 ptr
= *prev_next_ptr_ptr
)
7374 if (!AVAIL_STORE_LIST (ptr
))
7376 *prev_next_ptr_ptr
= ptr
->next
;
7377 free_ldst_entry (ptr
);
7380 prev_next_ptr_ptr
= &ptr
->next
;
7383 ret
= enumerate_ldsts ();
7387 fprintf (gcse_file
, "ST_avail and ST_antic (shown under loads..)\n");
7388 print_ldst_list (gcse_file
);
7396 /* Check to see if the load X is aliased with STORE_PATTERN. */
7399 load_kills_store (x
, store_pattern
)
7400 rtx x
, store_pattern
;
7402 if (true_dependence (x
, GET_MODE (x
), store_pattern
, rtx_addr_varies_p
))
7407 /* Go through the entire insn X, looking for any loads which might alias
7408 STORE_PATTERN. Return true if found. */
7411 find_loads (x
, store_pattern
)
7412 rtx x
, store_pattern
;
7421 if (GET_CODE (x
) == SET
)
7424 if (GET_CODE (x
) == MEM
)
7426 if (load_kills_store (x
, store_pattern
))
7430 /* Recursively process the insn. */
7431 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
7433 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0 && !ret
; i
--)
7436 ret
|= find_loads (XEXP (x
, i
), store_pattern
);
7437 else if (fmt
[i
] == 'E')
7438 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
7439 ret
|= find_loads (XVECEXP (x
, i
, j
), store_pattern
);
7444 /* Check if INSN kills the store pattern X (is aliased with it).
7445 Return true if it it does. */
7448 store_killed_in_insn (x
, x_regs
, insn
)
7449 rtx x
, x_regs
, insn
;
7453 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
7456 if (GET_CODE (insn
) == CALL_INSN
)
7458 /* A normal or pure call might read from pattern,
7459 but a const call will not. */
7460 if (! CONST_OR_PURE_CALL_P (insn
) || pure_call_p (insn
))
7463 /* But even a const call reads its parameters. Check whether the
7464 base of some of registers used in mem is stack pointer. */
7465 for (reg
= x_regs
; reg
; reg
= XEXP (reg
, 1))
7467 base
= find_base_term (XEXP (reg
, 0));
7469 || (GET_CODE (base
) == ADDRESS
7470 && GET_MODE (base
) == Pmode
7471 && XEXP (base
, 0) == stack_pointer_rtx
))
7478 if (GET_CODE (PATTERN (insn
)) == SET
)
7480 rtx pat
= PATTERN (insn
);
7481 /* Check for memory stores to aliased objects. */
7482 if (GET_CODE (SET_DEST (pat
)) == MEM
&& !expr_equiv_p (SET_DEST (pat
), x
))
7483 /* pretend its a load and check for aliasing. */
7484 if (find_loads (SET_DEST (pat
), x
))
7486 return find_loads (SET_SRC (pat
), x
);
7489 return find_loads (PATTERN (insn
), x
);
7492 /* Returns true if the expression X is loaded or clobbered on or after INSN
7493 within basic block BB. REGS_SET_AFTER is bitmap of registers set in
7494 or after the insn. X_REGS is list of registers mentioned in X. If the store
7495 is killed, return the last insn in that it occurs in FAIL_INSN. */
7498 store_killed_after (x
, x_regs
, insn
, bb
, regs_set_after
, fail_insn
)
7499 rtx x
, x_regs
, insn
;
7501 int *regs_set_after
;
7504 rtx last
= bb
->end
, act
;
7506 if (!store_ops_ok (x_regs
, regs_set_after
))
7508 /* We do not know where it will happen. */
7510 *fail_insn
= NULL_RTX
;
7514 /* Scan from the end, so that fail_insn is determined correctly. */
7515 for (act
= last
; act
!= PREV_INSN (insn
); act
= PREV_INSN (act
))
7516 if (store_killed_in_insn (x
, x_regs
, act
))
7526 /* Returns true if the expression X is loaded or clobbered on or before INSN
7527 within basic block BB. X_REGS is list of registers mentioned in X.
7528 REGS_SET_BEFORE is bitmap of registers set before or in this insn. */
7530 store_killed_before (x
, x_regs
, insn
, bb
, regs_set_before
)
7531 rtx x
, x_regs
, insn
;
7533 int *regs_set_before
;
7535 rtx first
= bb
->head
;
7537 if (!store_ops_ok (x_regs
, regs_set_before
))
7540 for ( ; insn
!= PREV_INSN (first
); insn
= PREV_INSN (insn
))
7541 if (store_killed_in_insn (x
, x_regs
, insn
))
7547 /* Fill in available, anticipatable, transparent and kill vectors in
7548 STORE_DATA, based on lists of available and anticipatable stores. */
7550 build_store_vectors ()
7553 int *regs_set_in_block
;
7555 struct ls_expr
* ptr
;
7558 /* Build the gen_vector. This is any store in the table which is not killed
7559 by aliasing later in its block. */
7560 ae_gen
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
, num_stores
);
7561 sbitmap_vector_zero (ae_gen
, last_basic_block
);
7563 st_antloc
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
, num_stores
);
7564 sbitmap_vector_zero (st_antloc
, last_basic_block
);
7566 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7568 for (st
= AVAIL_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
7570 insn
= XEXP (st
, 0);
7571 bb
= BLOCK_FOR_INSN (insn
);
7573 /* If we've already seen an available expression in this block,
7574 we can delete this one (It occurs earlier in the block). We'll
7575 copy the SRC expression to an unused register in case there
7576 are any side effects. */
7577 if (TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
7579 rtx r
= gen_reg_rtx (GET_MODE (ptr
->pattern
));
7581 fprintf (gcse_file
, "Removing redundant store:\n");
7582 replace_store_insn (r
, XEXP (st
, 0), bb
);
7585 SET_BIT (ae_gen
[bb
->index
], ptr
->index
);
7588 for (st
= ANTIC_STORE_LIST (ptr
); st
!= NULL
; st
= XEXP (st
, 1))
7590 insn
= XEXP (st
, 0);
7591 bb
= BLOCK_FOR_INSN (insn
);
7592 SET_BIT (st_antloc
[bb
->index
], ptr
->index
);
7596 ae_kill
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
, num_stores
);
7597 sbitmap_vector_zero (ae_kill
, last_basic_block
);
7599 transp
= (sbitmap
*) sbitmap_vector_alloc (last_basic_block
, num_stores
);
7600 sbitmap_vector_zero (transp
, last_basic_block
);
7601 regs_set_in_block
= xmalloc (sizeof (int) * max_gcse_regno
);
7605 for (regno
= 0; regno
< max_gcse_regno
; regno
++)
7606 regs_set_in_block
[regno
] = TEST_BIT (reg_set_in_block
[bb
->index
], regno
);
7608 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7610 if (store_killed_after (ptr
->pattern
, ptr
->pattern_regs
, bb
->head
,
7611 bb
, regs_set_in_block
, NULL
))
7613 /* It should not be neccessary to consider the expression
7614 killed if it is both anticipatable and available. */
7615 if (!TEST_BIT (st_antloc
[bb
->index
], ptr
->index
)
7616 || !TEST_BIT (ae_gen
[bb
->index
], ptr
->index
))
7617 SET_BIT (ae_kill
[bb
->index
], ptr
->index
);
7620 SET_BIT (transp
[bb
->index
], ptr
->index
);
7624 free (regs_set_in_block
);
7628 dump_sbitmap_vector (gcse_file
, "st_antloc", "", st_antloc
, last_basic_block
);
7629 dump_sbitmap_vector (gcse_file
, "st_kill", "", ae_kill
, last_basic_block
);
7630 dump_sbitmap_vector (gcse_file
, "Transpt", "", transp
, last_basic_block
);
7631 dump_sbitmap_vector (gcse_file
, "st_avloc", "", ae_gen
, last_basic_block
);
7635 /* Insert an instruction at the beginning of a basic block, and update
7636 the BLOCK_HEAD if needed. */
7639 insert_insn_start_bb (insn
, bb
)
7643 /* Insert at start of successor block. */
7644 rtx prev
= PREV_INSN (bb
->head
);
7645 rtx before
= bb
->head
;
7648 if (GET_CODE (before
) != CODE_LABEL
7649 && (GET_CODE (before
) != NOTE
7650 || NOTE_LINE_NUMBER (before
) != NOTE_INSN_BASIC_BLOCK
))
7653 if (prev
== bb
->end
)
7655 before
= NEXT_INSN (before
);
7658 insn
= emit_insn_after (insn
, prev
);
7662 fprintf (gcse_file
, "STORE_MOTION insert store at start of BB %d:\n",
7664 print_inline_rtx (gcse_file
, insn
, 6);
7665 fprintf (gcse_file
, "\n");
7669 /* This routine will insert a store on an edge. EXPR is the ldst entry for
7670 the memory reference, and E is the edge to insert it on. Returns nonzero
7671 if an edge insertion was performed. */
7674 insert_store (expr
, e
)
7675 struct ls_expr
* expr
;
7682 /* We did all the deleted before this insert, so if we didn't delete a
7683 store, then we haven't set the reaching reg yet either. */
7684 if (expr
->reaching_reg
== NULL_RTX
)
7687 reg
= expr
->reaching_reg
;
7688 insn
= gen_move_insn (copy_rtx (expr
->pattern
), reg
);
7690 /* If we are inserting this expression on ALL predecessor edges of a BB,
7691 insert it at the start of the BB, and reset the insert bits on the other
7692 edges so we don't try to insert it on the other edges. */
7694 for (tmp
= e
->dest
->pred
; tmp
; tmp
= tmp
->pred_next
)
7696 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
7697 if (index
== EDGE_INDEX_NO_EDGE
)
7699 if (! TEST_BIT (pre_insert_map
[index
], expr
->index
))
7703 /* If tmp is NULL, we found an insertion on every edge, blank the
7704 insertion vector for these edges, and insert at the start of the BB. */
7705 if (!tmp
&& bb
!= EXIT_BLOCK_PTR
)
7707 for (tmp
= e
->dest
->pred
; tmp
; tmp
= tmp
->pred_next
)
7709 int index
= EDGE_INDEX (edge_list
, tmp
->src
, tmp
->dest
);
7710 RESET_BIT (pre_insert_map
[index
], expr
->index
);
7712 insert_insn_start_bb (insn
, bb
);
7716 /* We can't insert on this edge, so we'll insert at the head of the
7717 successors block. See Morgan, sec 10.5. */
7718 if ((e
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
7720 insert_insn_start_bb (insn
, bb
);
7724 insert_insn_on_edge (insn
, e
);
7728 fprintf (gcse_file
, "STORE_MOTION insert insn on edge (%d, %d):\n",
7729 e
->src
->index
, e
->dest
->index
);
7730 print_inline_rtx (gcse_file
, insn
, 6);
7731 fprintf (gcse_file
, "\n");
7737 /* This routine will replace a store with a SET to a specified register. */
7740 replace_store_insn (reg
, del
, bb
)
7746 insn
= gen_move_insn (reg
, SET_SRC (single_set (del
)));
7747 insn
= emit_insn_after (insn
, del
);
7752 "STORE_MOTION delete insn in BB %d:\n ", bb
->index
);
7753 print_inline_rtx (gcse_file
, del
, 6);
7754 fprintf (gcse_file
, "\nSTORE MOTION replaced with insn:\n ");
7755 print_inline_rtx (gcse_file
, insn
, 6);
7756 fprintf (gcse_file
, "\n");
7763 /* Delete a store, but copy the value that would have been stored into
7764 the reaching_reg for later storing. */
7767 delete_store (expr
, bb
)
7768 struct ls_expr
* expr
;
7773 if (expr
->reaching_reg
== NULL_RTX
)
7774 expr
->reaching_reg
= gen_reg_rtx (GET_MODE (expr
->pattern
));
7776 reg
= expr
->reaching_reg
;
7778 for (i
= AVAIL_STORE_LIST (expr
); i
; i
= XEXP (i
, 1))
7781 if (BLOCK_FOR_INSN (del
) == bb
)
7783 /* We know there is only one since we deleted redundant
7784 ones during the available computation. */
7785 replace_store_insn (reg
, del
, bb
);
7791 /* Free memory used by store motion. */
7794 free_store_memory ()
7799 sbitmap_vector_free (ae_gen
);
7801 sbitmap_vector_free (ae_kill
);
7803 sbitmap_vector_free (transp
);
7805 sbitmap_vector_free (st_antloc
);
7807 sbitmap_vector_free (pre_insert_map
);
7809 sbitmap_vector_free (pre_delete_map
);
7810 if (reg_set_in_block
)
7811 sbitmap_vector_free (reg_set_in_block
);
7813 ae_gen
= ae_kill
= transp
= st_antloc
= NULL
;
7814 pre_insert_map
= pre_delete_map
= reg_set_in_block
= NULL
;
7817 /* Perform store motion. Much like gcse, except we move expressions the
7818 other way by looking at the flowgraph in reverse. */
7825 struct ls_expr
* ptr
;
7826 int update_flow
= 0;
7830 fprintf (gcse_file
, "before store motion\n");
7831 print_rtl (gcse_file
, get_insns ());
7834 init_alias_analysis ();
7836 /* Find all the available and anticipatable stores. */
7837 num_stores
= compute_store_table ();
7838 if (num_stores
== 0)
7840 sbitmap_vector_free (reg_set_in_block
);
7841 end_alias_analysis ();
7845 /* Now compute kill & transp vectors. */
7846 build_store_vectors ();
7847 add_noreturn_fake_exit_edges ();
7849 edge_list
= pre_edge_rev_lcm (gcse_file
, num_stores
, transp
, ae_gen
,
7850 st_antloc
, ae_kill
, &pre_insert_map
,
7853 /* Now we want to insert the new stores which are going to be needed. */
7854 for (ptr
= first_ls_expr (); ptr
!= NULL
; ptr
= next_ls_expr (ptr
))
7857 if (TEST_BIT (pre_delete_map
[bb
->index
], ptr
->index
))
7858 delete_store (ptr
, bb
);
7860 for (x
= 0; x
< NUM_EDGES (edge_list
); x
++)
7861 if (TEST_BIT (pre_insert_map
[x
], ptr
->index
))
7862 update_flow
|= insert_store (ptr
, INDEX_EDGE (edge_list
, x
));
7866 commit_edge_insertions ();
7868 free_store_memory ();
7869 free_edge_list (edge_list
);
7870 remove_fake_edges ();
7871 end_alias_analysis ();
7875 /* Entry point for jump bypassing optimization pass. */
7883 /* We do not construct an accurate cfg in functions which call
7884 setjmp, so just punt to be safe. */
7885 if (current_function_calls_setjmp
)
7888 /* For calling dump_foo fns from gdb. */
7889 debug_stderr
= stderr
;
7892 /* Identify the basic block information for this function, including
7893 successors and predecessors. */
7894 max_gcse_regno
= max_reg_num ();
7897 dump_flow_info (file
);
7899 /* Return if there's nothing to do. */
7900 if (n_basic_blocks
<= 1)
7903 /* Trying to perform global optimizations on flow graphs which have
7904 a high connectivity will take a long time and is unlikely to be
7905 particularly useful.
7907 In normal circumstances a cfg should have about twice as many edges
7908 as blocks. But we do not want to punish small functions which have
7909 a couple switch statements. So we require a relatively large number
7910 of basic blocks and the ratio of edges to blocks to be high. */
7911 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
7913 if (warn_disabled_optimization
)
7914 warning ("BYPASS disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
7915 n_basic_blocks
, n_edges
/ n_basic_blocks
);
7919 /* If allocating memory for the cprop bitmap would take up too much
7920 storage it's better just to disable the optimization. */
7922 * SBITMAP_SET_SIZE (max_gcse_regno
)
7923 * sizeof (SBITMAP_ELT_TYPE
)) > MAX_GCSE_MEMORY
)
7925 if (warn_disabled_optimization
)
7926 warning ("GCSE disabled: %d basic blocks and %d registers",
7927 n_basic_blocks
, max_gcse_regno
);
7932 gcc_obstack_init (&gcse_obstack
);
7935 /* We need alias. */
7936 init_alias_analysis ();
7938 /* Record where pseudo-registers are set. This data is kept accurate
7939 during each pass. ??? We could also record hard-reg information here
7940 [since it's unchanging], however it is currently done during hash table
7943 It may be tempting to compute MEM set information here too, but MEM sets
7944 will be subject to code motion one day and thus we need to compute
7945 information about memory sets when we build the hash tables. */
7947 alloc_reg_set_mem (max_gcse_regno
);
7948 compute_sets (get_insns ());
7950 max_gcse_regno
= max_reg_num ();
7951 alloc_gcse_mem (get_insns ());
7952 changed
= one_cprop_pass (1, 1, 1);
7957 fprintf (file
, "BYPASS of %s: %d basic blocks, ",
7958 current_function_name
, n_basic_blocks
);
7959 fprintf (file
, "%d bytes\n\n", bytes_used
);
7962 obstack_free (&gcse_obstack
, NULL
);
7963 free_reg_set_mem ();
7965 /* We are finished with alias. */
7966 end_alias_analysis ();
7967 allocate_reg_info (max_reg_num (), FALSE
, FALSE
);
7972 #include "gt-gcse.h"