1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
153 #include "hard-reg-set.h"
156 #include "insn-config.h"
158 #include "basic-block.h"
160 #include "function.h"
164 #define obstack_chunk_alloc gmalloc
165 #define obstack_chunk_free free
167 /* Maximum number of passes to perform. */
170 /* Propagate flow information through back edges and thus enable PRE's
171 moving loop invariant calculations out of loops.
173 Originally this tended to create worse overall code, but several
174 improvements during the development of PRE seem to have made following
175 back edges generally a win.
177 Note much of the loop invariant code motion done here would normally
178 be done by loop.c, which has more heuristics for when to move invariants
179 out of loops. At some point we might need to move some of those
180 heuristics into gcse.c. */
181 #define FOLLOW_BACK_EDGES 1
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file
;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse
;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr
;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack
;
302 /* Non-zero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p
[(int) NUM_MACHINE_MODES
];
307 /* Non-zero if can_copy_p has been initialized. */
308 static int can_copy_init_p
;
314 /* Hash table of expressions. */
318 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
320 /* Index in the available expression bitmaps. */
322 /* Next entry with the same hash. */
323 struct expr
*next_same_hash
;
324 /* List of anticipatable occurrences in basic blocks in the function.
325 An "anticipatable occurrence" is one that is the first occurrence in the
326 basic block, the operands are not modified in the basic block prior
327 to the occurrence and the output is not used between the start of
328 the block and the occurrence. */
329 struct occr
*antic_occr
;
330 /* List of available occurrence in basic blocks in the function.
331 An "available occurrence" is one that is the last occurrence in the
332 basic block and the operands are not modified by following statements in
333 the basic block [including this insn]. */
334 struct occr
*avail_occr
;
335 /* Non-null if the computation is PRE redundant.
336 The value is the newly created pseudo-reg to record a copy of the
337 expression in all the places that reach the redundant copy. */
341 /* Occurrence of an expression.
342 There is one per basic block. If a pattern appears more than once the
343 last appearance is used [or first for anticipatable expressions]. */
347 /* Next occurrence of this expression. */
349 /* The insn that computes the expression. */
351 /* Non-zero if this [anticipatable] occurrence has been deleted. */
353 /* Non-zero if this [available] occurrence has been copied to
355 /* ??? This is mutually exclusive with deleted_p, so they could share
360 /* Expression and copy propagation hash tables.
361 Each hash table is an array of buckets.
362 ??? It is known that if it were an array of entries, structure elements
363 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
364 not clear whether in the final analysis a sufficient amount of memory would
365 be saved as the size of the available expression bitmaps would be larger
366 [one could build a mapping table without holes afterwards though].
367 Someday I'll perform the computation and figure it out.
370 /* Total size of the expression hash table, in elements. */
371 static int expr_hash_table_size
;
373 This is an array of `expr_hash_table_size' elements. */
374 static struct expr
**expr_hash_table
;
376 /* Total size of the copy propagation hash table, in elements. */
377 static int set_hash_table_size
;
379 This is an array of `set_hash_table_size' elements. */
380 static struct expr
**set_hash_table
;
382 /* Mapping of uids to cuids.
383 Only real insns get cuids. */
384 static int *uid_cuid
;
386 /* Highest UID in UID_CUID. */
389 /* Get the cuid of an insn. */
390 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
392 /* Number of cuids. */
395 /* Mapping of cuids to insns. */
396 static rtx
*cuid_insn
;
398 /* Get insn from cuid. */
399 #define CUID_INSN(CUID) (cuid_insn[CUID])
401 /* Maximum register number in function prior to doing gcse + 1.
402 Registers created during this pass have regno >= max_gcse_regno.
403 This is named with "gcse" to not collide with global of same name. */
404 static int max_gcse_regno
;
406 /* Maximum number of cse-able expressions found. */
408 /* Maximum number of assignments for copy propagation found. */
411 /* Table of registers that are modified.
412 For each register, each element is a list of places where the pseudo-reg
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
417 a bitmap instead of the lists `reg_set_table' uses].
419 `reg_set_table' and could be turned into an array of bitmaps
421 [however perhaps it may be useful to keep the data as is].
422 One advantage of recording things this way is that `reg_set_table' is
423 fairly sparse with respect to pseudo regs but for hard regs could be
424 fairly dense [relatively speaking].
425 And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over hard-regs
433 stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set
{
436 /* The next setting of this register. */
437 struct reg_set
*next
;
438 /* The insn where it was set. */
441 static reg_set
**reg_set_table
;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
445 static int reg_set_table_size
;
446 /* Amount to grow `reg_set_table' by when it's full. */
447 #define REG_SET_TABLE_SLOP 100
449 /* Bitmap containing one bit for each register in the program.
450 Used when performing GCSE to track which registers have been set since
451 the start of the basic block. */
452 static sbitmap reg_set_bitmap
;
454 /* For each block, a bitmap of registers set in the block.
455 This is used by expr_killed_p and compute_transp.
456 It is computed during hash table computation and not by compute_sets
457 as it includes registers added since the last pass (or between cprop and
458 gcse) and it's currently not easy to realloc sbitmap vectors. */
459 static sbitmap
*reg_set_in_block
;
461 /* For each block, non-zero if memory is set in that block.
462 This is computed during hash table computation and is used by
463 expr_killed_p and compute_transp.
464 ??? Handling of memory is very simple, we don't make any attempt
465 to optimize things (later).
466 ??? This can be computed by compute_sets since the information
468 static char *mem_set_in_block
;
470 /* Various variables for statistics gathering. */
472 /* Memory used in a pass.
473 This isn't intended to be absolutely precise. Its intent is only
474 to keep an eye on memory usage. */
475 static int bytes_used
;
476 /* GCSE substitutions made. */
477 static int gcse_subst_count
;
478 /* Number of copy instructions created. */
479 static int gcse_create_count
;
480 /* Number of constants propagated. */
481 static int const_prop_count
;
482 /* Number of copys propagated. */
483 static int copy_prop_count
;
485 /* These variables are used by classic GCSE.
486 Normally they'd be defined a bit later, but `rd_gen' needs to
487 be declared sooner. */
489 /* A bitmap of all ones for implementing the algorithm for available
490 expressions and reaching definitions. */
491 /* ??? Available expression bitmaps have a different size than reaching
492 definition bitmaps. This should be the larger of the two, however, it
493 is not currently used for reaching definitions. */
494 static sbitmap u_bitmap
;
496 /* Each block has a bitmap of each type.
497 The length of each blocks bitmap is:
499 max_cuid - for reaching definitions
500 n_exprs - for available expressions
502 Thus we view the bitmaps as 2 dimensional arrays. i.e.
503 rd_kill[block_num][cuid_num]
504 ae_kill[block_num][expr_num]
507 /* For reaching defs */
508 static sbitmap
*rd_kill
, *rd_gen
, *reaching_defs
, *rd_out
;
510 /* for available exprs */
511 static sbitmap
*ae_kill
, *ae_gen
, *ae_in
, *ae_out
;
514 static void compute_can_copy
PROTO ((void));
516 static char *gmalloc
PROTO ((unsigned int));
517 static char *grealloc
PROTO ((char *, unsigned int));
518 static char *gcse_alloc
PROTO ((unsigned long));
519 static void alloc_gcse_mem
PROTO ((rtx
));
520 static void free_gcse_mem
PROTO ((void));
521 static void alloc_reg_set_mem
PROTO ((int));
522 static void free_reg_set_mem
PROTO ((void));
523 static void record_one_set
PROTO ((int, rtx
));
524 static void record_set_info
PROTO ((rtx
, rtx
));
525 static void compute_sets
PROTO ((rtx
));
527 static void hash_scan_insn
PROTO ((rtx
, int, int));
528 static void hash_scan_set
PROTO ((rtx
, rtx
, int));
529 static void hash_scan_clobber
PROTO ((rtx
, rtx
));
530 static void hash_scan_call
PROTO ((rtx
, rtx
));
531 static int want_to_gcse_p
PROTO ((rtx
));
532 static int oprs_unchanged_p
PROTO ((rtx
, rtx
, int));
533 static int oprs_anticipatable_p
PROTO ((rtx
, rtx
));
534 static int oprs_available_p
PROTO ((rtx
, rtx
));
535 static void insert_expr_in_table
PROTO ((rtx
, enum machine_mode
,
537 static void insert_set_in_table
PROTO ((rtx
, rtx
));
538 static unsigned int hash_expr
PROTO ((rtx
, enum machine_mode
,
540 static unsigned int hash_expr_1
PROTO ((rtx
, enum machine_mode
, int *));
541 static unsigned int hash_set
PROTO ((int, int));
542 static int expr_equiv_p
PROTO ((rtx
, rtx
));
543 static void record_last_reg_set_info
PROTO ((rtx
, int));
544 static void record_last_mem_set_info
PROTO ((rtx
));
545 static void record_last_set_info
PROTO ((rtx
, rtx
));
546 static void compute_hash_table
PROTO ((int));
547 static void alloc_set_hash_table
PROTO ((int));
548 static void free_set_hash_table
PROTO ((void));
549 static void compute_set_hash_table
PROTO ((void));
550 static void alloc_expr_hash_table
PROTO ((int));
551 static void free_expr_hash_table
PROTO ((void));
552 static void compute_expr_hash_table
PROTO ((void));
553 static void dump_hash_table
PROTO ((FILE *, const char *, struct expr
**,
555 static struct expr
*lookup_expr
PROTO ((rtx
));
556 static struct expr
*lookup_set
PROTO ((int, rtx
));
557 static struct expr
*next_set
PROTO ((int, struct expr
*));
558 static void reset_opr_set_tables
PROTO ((void));
559 static int oprs_not_set_p
PROTO ((rtx
, rtx
));
560 static void mark_call
PROTO ((rtx
));
561 static void mark_set
PROTO ((rtx
, rtx
));
562 static void mark_clobber
PROTO ((rtx
, rtx
));
563 static void mark_oprs_set
PROTO ((rtx
));
565 static void alloc_cprop_mem
PROTO ((int, int));
566 static void free_cprop_mem
PROTO ((void));
567 static void compute_transp
PROTO ((rtx
, int, sbitmap
*, int));
568 static void compute_transpout
PROTO ((void));
569 static void compute_local_properties
PROTO ((sbitmap
*, sbitmap
*,
571 static void compute_cprop_avinout
PROTO ((void));
572 static void compute_cprop_data
PROTO ((void));
573 static void find_used_regs
PROTO ((rtx
));
574 static int try_replace_reg
PROTO ((rtx
, rtx
, rtx
));
575 static struct expr
*find_avail_set
PROTO ((int, rtx
));
576 static int cprop_jump
PROTO((rtx
, rtx
, struct reg_use
*, rtx
));
578 static int cprop_cc0_jump
PROTO((rtx
, struct reg_use
*, rtx
));
580 static int cprop_insn
PROTO ((rtx
, int));
581 static int cprop
PROTO ((int));
582 static int one_cprop_pass
PROTO ((int, int));
584 static void alloc_pre_mem
PROTO ((int, int));
585 static void free_pre_mem
PROTO ((void));
586 static void compute_pre_data
PROTO ((void));
587 static int pre_expr_reaches_here_p
PROTO ((int, struct expr
*,
589 static void insert_insn_end_bb
PROTO ((struct expr
*, int, int));
590 static void pre_insert_copy_insn
PROTO ((struct expr
*, rtx
));
591 static void pre_insert_copies
PROTO ((void));
592 static int pre_delete
PROTO ((void));
593 static int pre_gcse
PROTO ((void));
594 static int one_pre_gcse_pass
PROTO ((int));
596 static void add_label_notes
PROTO ((rtx
, rtx
));
598 static void alloc_code_hoist_mem
PROTO ((int, int));
599 static void free_code_hoist_mem
PROTO ((void));
600 static void compute_code_hoist_vbeinout
PROTO ((void));
601 static void compute_code_hoist_data
PROTO ((void));
602 static int hoist_expr_reaches_here_p
PROTO ((int, int, int, char *));
603 static void hoist_code
PROTO ((void));
604 static int one_code_hoisting_pass
PROTO ((void));
606 static void alloc_rd_mem
PROTO ((int, int));
607 static void free_rd_mem
PROTO ((void));
608 static void handle_rd_kill_set
PROTO ((rtx
, int, int));
609 static void compute_kill_rd
PROTO ((void));
610 static void compute_rd
PROTO ((void));
611 static void alloc_avail_expr_mem
PROTO ((int, int));
612 static void free_avail_expr_mem
PROTO ((void));
613 static void compute_ae_gen
PROTO ((void));
614 static int expr_killed_p
PROTO ((rtx
, int));
615 static void compute_ae_kill
PROTO ((sbitmap
*, sbitmap
*));
616 static int expr_reaches_here_p
PROTO ((struct occr
*, struct expr
*,
618 static rtx computing_insn
PROTO ((struct expr
*, rtx
));
619 static int def_reaches_here_p
PROTO ((rtx
, rtx
));
620 static int can_disregard_other_sets
PROTO ((struct reg_set
**, rtx
, int));
621 static int handle_avail_expr
PROTO ((rtx
, struct expr
*));
622 static int classic_gcse
PROTO ((void));
623 static int one_classic_gcse_pass
PROTO ((int));
624 static void invalidate_nonnull_info
PROTO ((rtx
, rtx
));
625 static rtx process_insert_insn
PROTO ((struct expr
*));
626 static int pre_edge_insert
PROTO ((struct edge_list
*, struct expr
**));
628 /* Entry point for global common subexpression elimination.
629 F is the first instruction in the function. */
637 /* Bytes used at start of pass. */
638 int initial_bytes_used
;
639 /* Maximum number of bytes used by a pass. */
641 /* Point to release obstack data from for each pass. */
642 char *gcse_obstack_bottom
;
644 /* We do not construct an accurate cfg in functions which call
645 setjmp, so just punt to be safe. */
646 if (current_function_calls_setjmp
)
649 /* Assume that we do not need to run jump optimizations after gcse. */
650 run_jump_opt_after_gcse
= 0;
652 /* For calling dump_foo fns from gdb. */
653 debug_stderr
= stderr
;
656 /* Identify the basic block information for this function, including
657 successors and predecessors. */
658 max_gcse_regno
= max_reg_num ();
659 find_basic_blocks (f
, max_gcse_regno
, file
, 1);
662 dump_flow_info (file
);
664 /* Return if there's nothing to do. */
665 if (n_basic_blocks
<= 1)
667 /* Free storage allocated by find_basic_blocks. */
668 free_basic_block_vars (0);
672 /* Trying to perform global optimizations on flow graphs which have
673 a high connectivity will take a long time and is unlikely to be
676 In normal circumstances a cfg should have about twice has many edges
677 as blocks. But we do not want to punish small functions which have
678 a couple switch statements. So we require a relatively large number
679 of basic blocks and the ratio of edges to blocks to be high. */
680 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
682 /* Free storage allocated by find_basic_blocks. */
683 free_basic_block_vars (0);
687 /* See what modes support reg/reg copy operations. */
688 if (! can_copy_init_p
)
694 gcc_obstack_init (&gcse_obstack
);
697 /* Record where pseudo-registers are set.
698 This data is kept accurate during each pass.
699 ??? We could also record hard-reg information here
700 [since it's unchanging], however it is currently done during
701 hash table computation.
703 It may be tempting to compute MEM set information here too, but MEM
704 sets will be subject to code motion one day and thus we need to compute
705 information about memory sets when we build the hash tables. */
707 alloc_reg_set_mem (max_gcse_regno
);
711 initial_bytes_used
= bytes_used
;
713 gcse_obstack_bottom
= gcse_alloc (1);
715 while (changed
&& pass
< MAX_PASSES
)
719 fprintf (file
, "GCSE pass %d\n\n", pass
+ 1);
721 /* Initialize bytes_used to the space for the pred/succ lists,
722 and the reg_set_table data. */
723 bytes_used
= initial_bytes_used
;
725 /* Each pass may create new registers, so recalculate each time. */
726 max_gcse_regno
= max_reg_num ();
730 /* Don't allow constant propagation to modify jumps
732 changed
= one_cprop_pass (pass
+ 1, 0);
735 changed
|= one_classic_gcse_pass (pass
+ 1);
738 changed
|= one_pre_gcse_pass (pass
+ 1);
740 alloc_reg_set_mem (max_reg_num ());
742 run_jump_opt_after_gcse
= 1;
745 if (max_pass_bytes
< bytes_used
)
746 max_pass_bytes
= bytes_used
;
748 /* Free up memory, then reallocate for code hoisting. We can
749 not re-use the existing allocated memory because the tables
750 will not have info for the insns or registers created by
751 partial redundancy elimination. */
754 /* It does not make sense to run code hoisting unless we optimizing
755 for code size -- it rarely makes programs faster, and can make
756 them bigger if we did partial redundancy elimination (when optimizing
757 for space, we use a classic gcse algorithm instead of partial
758 redundancy algorithms). */
761 max_gcse_regno
= max_reg_num ();
763 changed
|= one_code_hoisting_pass ();
766 if (max_pass_bytes
< bytes_used
)
767 max_pass_bytes
= bytes_used
;
772 fprintf (file
, "\n");
775 obstack_free (&gcse_obstack
, gcse_obstack_bottom
);
779 /* Do one last pass of copy propagation, including cprop into
780 conditional jumps. */
782 max_gcse_regno
= max_reg_num ();
784 /* This time, go ahead and allow cprop to alter jumps. */
785 one_cprop_pass (pass
+ 1, 1);
790 fprintf (file
, "GCSE of %s: %d basic blocks, ",
791 current_function_name
, n_basic_blocks
);
792 fprintf (file
, "%d pass%s, %d bytes\n\n",
793 pass
, pass
> 1 ? "es" : "", max_pass_bytes
);
796 /* Free our obstack. */
797 obstack_free (&gcse_obstack
, NULL_PTR
);
798 /* Free reg_set_table. */
800 /* Free storage used to record predecessor/successor data. */
802 /* Free storage allocated by find_basic_blocks. */
803 free_basic_block_vars (0);
804 return run_jump_opt_after_gcse
;
807 /* Misc. utilities. */
809 /* Compute which modes support reg/reg copy operations. */
815 #ifndef AVOID_CCMODE_COPIES
818 char *free_point
= (char *) oballoc (1);
820 bzero (can_copy_p
, NUM_MACHINE_MODES
);
823 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
825 switch (GET_MODE_CLASS (i
))
828 #ifdef AVOID_CCMODE_COPIES
831 reg
= gen_rtx_REG ((enum machine_mode
) i
, LAST_VIRTUAL_REGISTER
+ 1);
832 insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, reg
));
833 if (recog (PATTERN (insn
), insn
, NULL_PTR
) >= 0)
844 /* Free the objects we just allocated. */
848 /* Cover function to xmalloc to record bytes allocated. */
855 return xmalloc (size
);
858 /* Cover function to xrealloc.
859 We don't record the additional size since we don't know it.
860 It won't affect memory usage stats much anyway. */
867 return xrealloc (ptr
, size
);
870 /* Cover function to obstack_alloc.
871 We don't need to record the bytes allocated here since
872 obstack_chunk_alloc is set to gmalloc. */
878 return (char *) obstack_alloc (&gcse_obstack
, size
);
881 /* Allocate memory for the cuid mapping array,
882 and reg/memory set tracking tables.
884 This is called at the start of each pass. */
893 /* Find the largest UID and create a mapping from UIDs to CUIDs.
894 CUIDs are like UIDs except they increase monotonically, have no gaps,
895 and only apply to real insns. */
897 max_uid
= get_max_uid ();
898 n
= (max_uid
+ 1) * sizeof (int);
899 uid_cuid
= (int *) gmalloc (n
);
900 bzero ((char *) uid_cuid
, n
);
901 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
903 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
904 INSN_CUID (insn
) = i
++;
906 INSN_CUID (insn
) = i
;
909 /* Create a table mapping cuids to insns. */
912 n
= (max_cuid
+ 1) * sizeof (rtx
);
913 cuid_insn
= (rtx
*) gmalloc (n
);
914 bzero ((char *) cuid_insn
, n
);
915 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
917 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
919 CUID_INSN (i
) = insn
;
924 /* Allocate vars to track sets of regs. */
926 reg_set_bitmap
= (sbitmap
) sbitmap_alloc (max_gcse_regno
);
928 /* Allocate vars to track sets of regs, memory per block. */
930 reg_set_in_block
= (sbitmap
*) sbitmap_vector_alloc (n_basic_blocks
,
932 mem_set_in_block
= (char *) gmalloc (n_basic_blocks
);
935 /* Free memory allocated by alloc_gcse_mem. */
943 free (reg_set_bitmap
);
945 free (reg_set_in_block
);
946 free (mem_set_in_block
);
950 /* Compute the local properties of each recorded expression.
951 Local properties are those that are defined by the block, irrespective
954 An expression is transparent in a block if its operands are not modified
957 An expression is computed (locally available) in a block if it is computed
958 at least once and expression would contain the same value if the
959 computation was moved to the end of the block.
961 An expression is locally anticipatable in a block if it is computed at
962 least once and expression would contain the same value if the computation
963 was moved to the beginning of the block.
965 We call this routine for cprop, pre and code hoisting. They all
966 compute basically the same information and thus can easily share
969 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording
970 local properties. If NULL, then it is not necessary to compute
971 or record that particular property.
973 SETP controls which hash table to look at. If zero, this routine
974 looks at the expr hash table; if nonzero this routine looks at
975 the set hash table. Additionally, TRANSP is computed as ~TRANSP,
976 since this is really cprop's ABSALTERED. */
979 compute_local_properties (transp
, comp
, antloc
, setp
)
985 int i
, hash_table_size
;
986 struct expr
**hash_table
;
988 /* Initialize any bitmaps that were passed in. */
992 sbitmap_vector_zero (transp
, n_basic_blocks
);
994 sbitmap_vector_ones (transp
, n_basic_blocks
);
997 sbitmap_vector_zero (comp
, n_basic_blocks
);
999 sbitmap_vector_zero (antloc
, n_basic_blocks
);
1001 /* We use the same code for cprop, pre and hoisting. For cprop
1002 we care about the set hash table, for pre and hoisting we
1003 care about the expr hash table. */
1004 hash_table_size
= setp
? set_hash_table_size
: expr_hash_table_size
;
1005 hash_table
= setp
? set_hash_table
: expr_hash_table
;
1007 for (i
= 0; i
< hash_table_size
; i
++)
1011 for (expr
= hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1014 int indx
= expr
->bitmap_index
;
1016 /* The expression is transparent in this block if it is not killed.
1017 We start by assuming all are transparent [none are killed], and
1018 then reset the bits for those that are. */
1021 compute_transp (expr
->expr
, indx
, transp
, setp
);
1023 /* The occurrences recorded in antic_occr are exactly those that
1024 we want to set to non-zero in ANTLOC. */
1028 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
1030 int bb
= BLOCK_NUM (occr
->insn
);
1031 SET_BIT (antloc
[bb
], indx
);
1033 /* While we're scanning the table, this is a good place to
1035 occr
->deleted_p
= 0;
1039 /* The occurrences recorded in avail_occr are exactly those that
1040 we want to set to non-zero in COMP. */
1044 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
1046 int bb
= BLOCK_NUM (occr
->insn
);
1047 SET_BIT (comp
[bb
], indx
);
1049 /* While we're scanning the table, this is a good place to
1055 /* While we're scanning the table, this is a good place to
1057 expr
->reaching_reg
= 0;
1063 /* Register set information.
1065 `reg_set_table' records where each register is set or otherwise
1068 static struct obstack reg_set_obstack
;
1071 alloc_reg_set_mem (n_regs
)
1076 reg_set_table_size
= n_regs
+ REG_SET_TABLE_SLOP
;
1077 n
= reg_set_table_size
* sizeof (struct reg_set
*);
1078 reg_set_table
= (struct reg_set
**) gmalloc (n
);
1079 bzero ((char *) reg_set_table
, n
);
1081 gcc_obstack_init (®_set_obstack
);
1087 free (reg_set_table
);
1088 obstack_free (®_set_obstack
, NULL_PTR
);
1091 /* Record REGNO in the reg_set table. */
1094 record_one_set (regno
, insn
)
1098 /* allocate a new reg_set element and link it onto the list */
1099 struct reg_set
*new_reg_info
, *reg_info_ptr1
, *reg_info_ptr2
;
1101 /* If the table isn't big enough, enlarge it. */
1102 if (regno
>= reg_set_table_size
)
1104 int new_size
= regno
+ REG_SET_TABLE_SLOP
;
1105 reg_set_table
= (struct reg_set
**)
1106 grealloc ((char *) reg_set_table
,
1107 new_size
* sizeof (struct reg_set
*));
1108 bzero ((char *) (reg_set_table
+ reg_set_table_size
),
1109 (new_size
- reg_set_table_size
) * sizeof (struct reg_set
*));
1110 reg_set_table_size
= new_size
;
1113 new_reg_info
= (struct reg_set
*) obstack_alloc (®_set_obstack
,
1114 sizeof (struct reg_set
));
1115 bytes_used
+= sizeof (struct reg_set
);
1116 new_reg_info
->insn
= insn
;
1117 new_reg_info
->next
= NULL
;
1118 if (reg_set_table
[regno
] == NULL
)
1119 reg_set_table
[regno
] = new_reg_info
;
1122 reg_info_ptr1
= reg_info_ptr2
= reg_set_table
[regno
];
1123 /* ??? One could keep a "last" pointer to speed this up. */
1124 while (reg_info_ptr1
!= NULL
)
1126 reg_info_ptr2
= reg_info_ptr1
;
1127 reg_info_ptr1
= reg_info_ptr1
->next
;
1129 reg_info_ptr2
->next
= new_reg_info
;
1133 /* For communication between next two functions (via note_stores). */
1134 static rtx record_set_insn
;
1136 /* Called from compute_sets via note_stores to handle one
1137 SET or CLOBBER in an insn. */
1140 record_set_info (dest
, setter
)
1141 rtx dest
, setter ATTRIBUTE_UNUSED
;
1143 if (GET_CODE (dest
) == SUBREG
)
1144 dest
= SUBREG_REG (dest
);
1146 if (GET_CODE (dest
) == REG
)
1148 if (REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
1149 record_one_set (REGNO (dest
), record_set_insn
);
1153 /* Scan the function and record each set of each pseudo-register.
1155 This is called once, at the start of the gcse pass.
1156 See the comments for `reg_set_table' for further docs. */
1166 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1168 record_set_insn
= insn
;
1169 note_stores (PATTERN (insn
), record_set_info
);
1171 insn
= NEXT_INSN (insn
);
1175 /* Hash table support. */
1177 #define NEVER_SET -1
1179 /* For each register, the cuid of the first/last insn in the block to set it,
1180 or -1 if not set. */
1181 static int *reg_first_set
;
1182 static int *reg_last_set
;
1184 /* While computing "first/last set" info, this is the CUID of first/last insn
1185 to set memory or -1 if not set. `mem_last_set' is also used when
1186 performing GCSE to record whether memory has been set since the beginning
1188 Note that handling of memory is very simple, we don't make any attempt
1189 to optimize things (later). */
1190 static int mem_first_set
;
1191 static int mem_last_set
;
1193 /* Perform a quick check whether X, the source of a set, is something
1194 we want to consider for GCSE. */
1200 enum rtx_code code
= GET_CODE (x
);
1218 /* Return non-zero if the operands of expression X are unchanged from the
1219 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1220 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1223 oprs_unchanged_p (x
, insn
, avail_p
)
1231 /* repeat is used to turn tail-recursion into iteration. */
1237 code
= GET_CODE (x
);
1242 return (reg_last_set
[REGNO (x
)] == NEVER_SET
1243 || reg_last_set
[REGNO (x
)] < INSN_CUID (insn
));
1245 return (reg_first_set
[REGNO (x
)] == NEVER_SET
1246 || reg_first_set
[REGNO (x
)] >= INSN_CUID (insn
));
1251 if (mem_last_set
!= NEVER_SET
1252 && mem_last_set
>= INSN_CUID (insn
))
1257 if (mem_first_set
!= NEVER_SET
1258 && mem_first_set
< INSN_CUID (insn
))
1285 i
= GET_RTX_LENGTH (code
) - 1;
1286 fmt
= GET_RTX_FORMAT (code
);
1291 rtx tem
= XEXP (x
, i
);
1293 /* If we are about to do the last recursive call
1294 needed at this level, change it into iteration.
1295 This function is called enough to be worth it. */
1301 if (! oprs_unchanged_p (tem
, insn
, avail_p
))
1304 else if (fmt
[i
] == 'E')
1307 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1309 if (! oprs_unchanged_p (XVECEXP (x
, i
, j
), insn
, avail_p
))
1318 /* Return non-zero if the operands of expression X are unchanged from
1319 the start of INSN's basic block up to but not including INSN. */
1322 oprs_anticipatable_p (x
, insn
)
1325 return oprs_unchanged_p (x
, insn
, 0);
1328 /* Return non-zero if the operands of expression X are unchanged from
1329 INSN to the end of INSN's basic block. */
1332 oprs_available_p (x
, insn
)
1335 return oprs_unchanged_p (x
, insn
, 1);
1338 /* Hash expression X.
1339 MODE is only used if X is a CONST_INT.
1340 A boolean indicating if a volatile operand is found or if the expression
1341 contains something we don't want to insert in the table is stored in
1344 ??? One might want to merge this with canon_hash. Later. */
1347 hash_expr (x
, mode
, do_not_record_p
, hash_table_size
)
1349 enum machine_mode mode
;
1350 int *do_not_record_p
;
1351 int hash_table_size
;
1355 *do_not_record_p
= 0;
1357 hash
= hash_expr_1 (x
, mode
, do_not_record_p
);
1358 return hash
% hash_table_size
;
1361 /* Subroutine of hash_expr to do the actual work. */
1364 hash_expr_1 (x
, mode
, do_not_record_p
)
1366 enum machine_mode mode
;
1367 int *do_not_record_p
;
1374 /* repeat is used to turn tail-recursion into iteration. */
1380 code
= GET_CODE (x
);
1385 register int regno
= REGNO (x
);
1386 hash
+= ((unsigned) REG
<< 7) + regno
;
1392 unsigned HOST_WIDE_INT tem
= INTVAL (x
);
1393 hash
+= ((unsigned) CONST_INT
<< 7) + (unsigned) mode
+ tem
;
1398 /* This is like the general case, except that it only counts
1399 the integers representing the constant. */
1400 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1401 if (GET_MODE (x
) != VOIDmode
)
1402 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1404 unsigned tem
= XWINT (x
, i
);
1408 hash
+= ((unsigned) CONST_DOUBLE_LOW (x
)
1409 + (unsigned) CONST_DOUBLE_HIGH (x
));
1412 /* Assume there is only one rtx object for any given label. */
1414 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1415 differences and differences between each stage's debugging dumps. */
1416 hash
+= ((unsigned) LABEL_REF
<< 7) + CODE_LABEL_NUMBER (XEXP (x
, 0));
1421 /* Don't hash on the symbol's address to avoid bootstrap differences.
1422 Different hash values may cause expressions to be recorded in
1423 different orders and thus different registers to be used in the
1424 final assembler. This also avoids differences in the dump files
1425 between various stages. */
1427 unsigned char *p
= (unsigned char *) XSTR (x
, 0);
1429 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1430 hash
+= ((unsigned) SYMBOL_REF
<< 7) + h
;
1435 if (MEM_VOLATILE_P (x
))
1437 *do_not_record_p
= 1;
1440 hash
+= (unsigned) MEM
;
1441 hash
+= MEM_ALIAS_SET (x
);
1452 case UNSPEC_VOLATILE
:
1453 *do_not_record_p
= 1;
1457 if (MEM_VOLATILE_P (x
))
1459 *do_not_record_p
= 1;
1467 i
= GET_RTX_LENGTH (code
) - 1;
1468 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1469 fmt
= GET_RTX_FORMAT (code
);
1474 rtx tem
= XEXP (x
, i
);
1476 /* If we are about to do the last recursive call
1477 needed at this level, change it into iteration.
1478 This function is called enough to be worth it. */
1484 hash
+= hash_expr_1 (tem
, 0, do_not_record_p
);
1485 if (*do_not_record_p
)
1488 else if (fmt
[i
] == 'E')
1489 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1491 hash
+= hash_expr_1 (XVECEXP (x
, i
, j
), 0, do_not_record_p
);
1492 if (*do_not_record_p
)
1495 else if (fmt
[i
] == 's')
1497 register unsigned char *p
= (unsigned char *) XSTR (x
, i
);
1502 else if (fmt
[i
] == 'i')
1504 register unsigned tem
= XINT (x
, i
);
1514 /* Hash a set of register REGNO.
1516 Sets are hashed on the register that is set.
1517 This simplifies the PRE copy propagation code.
1519 ??? May need to make things more elaborate. Later, as necessary. */
1522 hash_set (regno
, hash_table_size
)
1524 int hash_table_size
;
1529 return hash
% hash_table_size
;
1532 /* Return non-zero if exp1 is equivalent to exp2.
1533 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1540 register enum rtx_code code
;
1541 register const char *fmt
;
1545 if (x
== 0 || y
== 0)
1548 code
= GET_CODE (x
);
1549 if (code
!= GET_CODE (y
))
1552 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1553 if (GET_MODE (x
) != GET_MODE (y
))
1563 return INTVAL (x
) == INTVAL (y
);
1566 return XEXP (x
, 0) == XEXP (y
, 0);
1569 return XSTR (x
, 0) == XSTR (y
, 0);
1572 return REGNO (x
) == REGNO (y
);
1575 /* Can't merge two expressions in different alias sets, since we can
1576 decide that the expression is transparent in a block when it isn't,
1577 due to it being set with the different alias set. */
1578 if (MEM_ALIAS_SET (x
) != MEM_ALIAS_SET (y
))
1582 /* For commutative operations, check both orders. */
1590 return ((expr_equiv_p (XEXP (x
, 0), XEXP (y
, 0))
1591 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 1)))
1592 || (expr_equiv_p (XEXP (x
, 0), XEXP (y
, 1))
1593 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 0))));
1599 /* Compare the elements. If any pair of corresponding elements
1600 fail to match, return 0 for the whole thing. */
1602 fmt
= GET_RTX_FORMAT (code
);
1603 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1608 if (! expr_equiv_p (XEXP (x
, i
), XEXP (y
, i
)))
1613 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1615 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1616 if (! expr_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1621 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1626 if (XINT (x
, i
) != XINT (y
, i
))
1631 if (XWINT (x
, i
) != XWINT (y
, i
))
1646 /* Insert expression X in INSN in the hash table.
1647 If it is already present, record it as the last occurrence in INSN's
1650 MODE is the mode of the value X is being stored into.
1651 It is only used if X is a CONST_INT.
1653 ANTIC_P is non-zero if X is an anticipatable expression.
1654 AVAIL_P is non-zero if X is an available expression. */
1657 insert_expr_in_table (x
, mode
, insn
, antic_p
, avail_p
)
1659 enum machine_mode mode
;
1661 int antic_p
, avail_p
;
1663 int found
, do_not_record_p
;
1665 struct expr
*cur_expr
, *last_expr
= NULL
;
1666 struct occr
*antic_occr
, *avail_occr
;
1667 struct occr
*last_occr
= NULL
;
1669 hash
= hash_expr (x
, mode
, &do_not_record_p
, expr_hash_table_size
);
1671 /* Do not insert expression in table if it contains volatile operands,
1672 or if hash_expr determines the expression is something we don't want
1673 to or can't handle. */
1674 if (do_not_record_p
)
1677 cur_expr
= expr_hash_table
[hash
];
1680 while (cur_expr
&& ! (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1682 /* If the expression isn't found, save a pointer to the end of
1684 last_expr
= cur_expr
;
1685 cur_expr
= cur_expr
->next_same_hash
;
1690 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
1691 bytes_used
+= sizeof (struct expr
);
1692 if (expr_hash_table
[hash
] == NULL
)
1694 /* This is the first pattern that hashed to this index. */
1695 expr_hash_table
[hash
] = cur_expr
;
1699 /* Add EXPR to end of this hash chain. */
1700 last_expr
->next_same_hash
= cur_expr
;
1702 /* Set the fields of the expr element. */
1704 cur_expr
->bitmap_index
= n_exprs
++;
1705 cur_expr
->next_same_hash
= NULL
;
1706 cur_expr
->antic_occr
= NULL
;
1707 cur_expr
->avail_occr
= NULL
;
1710 /* Now record the occurrence(s). */
1714 antic_occr
= cur_expr
->antic_occr
;
1716 /* Search for another occurrence in the same basic block. */
1717 while (antic_occr
&& BLOCK_NUM (antic_occr
->insn
) != BLOCK_NUM (insn
))
1719 /* If an occurrence isn't found, save a pointer to the end of
1721 last_occr
= antic_occr
;
1722 antic_occr
= antic_occr
->next
;
1727 /* Found another instance of the expression in the same basic block.
1728 Prefer the currently recorded one. We want the first one in the
1729 block and the block is scanned from start to end. */
1730 ; /* nothing to do */
1734 /* First occurrence of this expression in this basic block. */
1735 antic_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1736 bytes_used
+= sizeof (struct occr
);
1737 /* First occurrence of this expression in any block? */
1738 if (cur_expr
->antic_occr
== NULL
)
1739 cur_expr
->antic_occr
= antic_occr
;
1741 last_occr
->next
= antic_occr
;
1742 antic_occr
->insn
= insn
;
1743 antic_occr
->next
= NULL
;
1749 avail_occr
= cur_expr
->avail_occr
;
1751 /* Search for another occurrence in the same basic block. */
1752 while (avail_occr
&& BLOCK_NUM (avail_occr
->insn
) != BLOCK_NUM (insn
))
1754 /* If an occurrence isn't found, save a pointer to the end of
1756 last_occr
= avail_occr
;
1757 avail_occr
= avail_occr
->next
;
1762 /* Found another instance of the expression in the same basic block.
1763 Prefer this occurrence to the currently recorded one. We want
1764 the last one in the block and the block is scanned from start
1766 avail_occr
->insn
= insn
;
1770 /* First occurrence of this expression in this basic block. */
1771 avail_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1772 bytes_used
+= sizeof (struct occr
);
1773 /* First occurrence of this expression in any block? */
1774 if (cur_expr
->avail_occr
== NULL
)
1775 cur_expr
->avail_occr
= avail_occr
;
1777 last_occr
->next
= avail_occr
;
1778 avail_occr
->insn
= insn
;
1779 avail_occr
->next
= NULL
;
1784 /* Insert pattern X in INSN in the hash table.
1785 X is a SET of a reg to either another reg or a constant.
1786 If it is already present, record it as the last occurrence in INSN's
1790 insert_set_in_table (x
, insn
)
1796 struct expr
*cur_expr
, *last_expr
= NULL
;
1797 struct occr
*cur_occr
, *last_occr
= NULL
;
1799 if (GET_CODE (x
) != SET
1800 || GET_CODE (SET_DEST (x
)) != REG
)
1803 hash
= hash_set (REGNO (SET_DEST (x
)), set_hash_table_size
);
1805 cur_expr
= set_hash_table
[hash
];
1808 while (cur_expr
&& ! (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1810 /* If the expression isn't found, save a pointer to the end of
1812 last_expr
= cur_expr
;
1813 cur_expr
= cur_expr
->next_same_hash
;
1818 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
1819 bytes_used
+= sizeof (struct expr
);
1820 if (set_hash_table
[hash
] == NULL
)
1822 /* This is the first pattern that hashed to this index. */
1823 set_hash_table
[hash
] = cur_expr
;
1827 /* Add EXPR to end of this hash chain. */
1828 last_expr
->next_same_hash
= cur_expr
;
1830 /* Set the fields of the expr element.
1831 We must copy X because it can be modified when copy propagation is
1832 performed on its operands. */
1833 /* ??? Should this go in a different obstack? */
1834 cur_expr
->expr
= copy_rtx (x
);
1835 cur_expr
->bitmap_index
= n_sets
++;
1836 cur_expr
->next_same_hash
= NULL
;
1837 cur_expr
->antic_occr
= NULL
;
1838 cur_expr
->avail_occr
= NULL
;
1841 /* Now record the occurrence. */
1843 cur_occr
= cur_expr
->avail_occr
;
1845 /* Search for another occurrence in the same basic block. */
1846 while (cur_occr
&& BLOCK_NUM (cur_occr
->insn
) != BLOCK_NUM (insn
))
1848 /* If an occurrence isn't found, save a pointer to the end of
1850 last_occr
= cur_occr
;
1851 cur_occr
= cur_occr
->next
;
1856 /* Found another instance of the expression in the same basic block.
1857 Prefer this occurrence to the currently recorded one. We want
1858 the last one in the block and the block is scanned from start
1860 cur_occr
->insn
= insn
;
1864 /* First occurrence of this expression in this basic block. */
1865 cur_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1866 bytes_used
+= sizeof (struct occr
);
1867 /* First occurrence of this expression in any block? */
1868 if (cur_expr
->avail_occr
== NULL
)
1869 cur_expr
->avail_occr
= cur_occr
;
1871 last_occr
->next
= cur_occr
;
1872 cur_occr
->insn
= insn
;
1873 cur_occr
->next
= NULL
;
1877 /* Scan pattern PAT of INSN and add an entry to the hash table.
1878 If SET_P is non-zero, this is for the assignment hash table,
1879 otherwise it is for the expression hash table. */
1882 hash_scan_set (pat
, insn
, set_p
)
1886 rtx src
= SET_SRC (pat
);
1887 rtx dest
= SET_DEST (pat
);
1889 if (GET_CODE (src
) == CALL
)
1890 hash_scan_call (src
, insn
);
1892 if (GET_CODE (dest
) == REG
)
1894 int regno
= REGNO (dest
);
1897 /* Only record sets of pseudo-regs in the hash table. */
1899 && regno
>= FIRST_PSEUDO_REGISTER
1900 /* Don't GCSE something if we can't do a reg/reg copy. */
1901 && can_copy_p
[GET_MODE (dest
)]
1902 /* Is SET_SRC something we want to gcse? */
1903 && want_to_gcse_p (src
))
1905 /* An expression is not anticipatable if its operands are
1906 modified before this insn. */
1907 int antic_p
= oprs_anticipatable_p (src
, insn
);
1908 /* An expression is not available if its operands are
1909 subsequently modified, including this insn. */
1910 int avail_p
= oprs_available_p (src
, insn
);
1911 insert_expr_in_table (src
, GET_MODE (dest
), insn
, antic_p
, avail_p
);
1913 /* Record sets for constant/copy propagation. */
1915 && regno
>= FIRST_PSEUDO_REGISTER
1916 && ((GET_CODE (src
) == REG
1917 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
1918 && can_copy_p
[GET_MODE (dest
)])
1919 || GET_CODE (src
) == CONST_INT
1920 || GET_CODE (src
) == SYMBOL_REF
1921 || GET_CODE (src
) == CONST_DOUBLE
)
1922 /* A copy is not available if its src or dest is subsequently
1923 modified. Here we want to search from INSN+1 on, but
1924 oprs_available_p searches from INSN on. */
1925 && (insn
== BLOCK_END (BLOCK_NUM (insn
))
1926 || ((tmp
= next_nonnote_insn (insn
)) != NULL_RTX
1927 && oprs_available_p (pat
, tmp
))))
1928 insert_set_in_table (pat
, insn
);
1933 hash_scan_clobber (x
, insn
)
1934 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
1936 /* Currently nothing to do. */
1940 hash_scan_call (x
, insn
)
1941 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
1943 /* Currently nothing to do. */
1946 /* Process INSN and add hash table entries as appropriate.
1948 Only available expressions that set a single pseudo-reg are recorded.
1950 Single sets in a PARALLEL could be handled, but it's an extra complication
1951 that isn't dealt with right now. The trick is handling the CLOBBERs that
1952 are also in the PARALLEL. Later.
1954 If SET_P is non-zero, this is for the assignment hash table,
1955 otherwise it is for the expression hash table.
1956 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1957 not record any expressions. */
1960 hash_scan_insn (insn
, set_p
, in_libcall_block
)
1963 int in_libcall_block
;
1965 rtx pat
= PATTERN (insn
);
1967 /* Pick out the sets of INSN and for other forms of instructions record
1968 what's been modified. */
1970 if (GET_CODE (pat
) == SET
&& ! in_libcall_block
)
1972 /* Ignore obvious no-ops. */
1973 if (SET_SRC (pat
) != SET_DEST (pat
))
1974 hash_scan_set (pat
, insn
, set_p
);
1976 else if (GET_CODE (pat
) == PARALLEL
)
1980 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1982 rtx x
= XVECEXP (pat
, 0, i
);
1984 if (GET_CODE (x
) == SET
)
1986 if (GET_CODE (SET_SRC (x
)) == CALL
)
1987 hash_scan_call (SET_SRC (x
), insn
);
1989 else if (GET_CODE (x
) == CLOBBER
)
1990 hash_scan_clobber (x
, insn
);
1991 else if (GET_CODE (x
) == CALL
)
1992 hash_scan_call (x
, insn
);
1995 else if (GET_CODE (pat
) == CLOBBER
)
1996 hash_scan_clobber (pat
, insn
);
1997 else if (GET_CODE (pat
) == CALL
)
1998 hash_scan_call (pat
, insn
);
2002 dump_hash_table (file
, name
, table
, table_size
, total_size
)
2005 struct expr
**table
;
2006 int table_size
, total_size
;
2009 /* Flattened out table, so it's printed in proper order. */
2010 struct expr
**flat_table
= (struct expr
**) alloca (total_size
* sizeof (struct expr
*));
2011 unsigned int *hash_val
= (unsigned int *) alloca (total_size
* sizeof (unsigned int));
2013 bzero ((char *) flat_table
, total_size
* sizeof (struct expr
*));
2014 for (i
= 0; i
< table_size
; i
++)
2018 for (expr
= table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
2020 flat_table
[expr
->bitmap_index
] = expr
;
2021 hash_val
[expr
->bitmap_index
] = i
;
2025 fprintf (file
, "%s hash table (%d buckets, %d entries)\n",
2026 name
, table_size
, total_size
);
2028 for (i
= 0; i
< total_size
; i
++)
2030 struct expr
*expr
= flat_table
[i
];
2032 fprintf (file
, "Index %d (hash value %d)\n ",
2033 expr
->bitmap_index
, hash_val
[i
]);
2034 print_rtl (file
, expr
->expr
);
2035 fprintf (file
, "\n");
2038 fprintf (file
, "\n");
2041 /* Record register first/last/block set information for REGNO in INSN.
2042 reg_first_set records the first place in the block where the register
2043 is set and is used to compute "anticipatability".
2044 reg_last_set records the last place in the block where the register
2045 is set and is used to compute "availability".
2046 reg_set_in_block records whether the register is set in the block
2047 and is used to compute "transparency". */
2050 record_last_reg_set_info (insn
, regno
)
2054 if (reg_first_set
[regno
] == NEVER_SET
)
2055 reg_first_set
[regno
] = INSN_CUID (insn
);
2056 reg_last_set
[regno
] = INSN_CUID (insn
);
2057 SET_BIT (reg_set_in_block
[BLOCK_NUM (insn
)], regno
);
2060 /* Record memory first/last/block set information for INSN. */
2063 record_last_mem_set_info (insn
)
2066 if (mem_first_set
== NEVER_SET
)
2067 mem_first_set
= INSN_CUID (insn
);
2068 mem_last_set
= INSN_CUID (insn
);
2069 mem_set_in_block
[BLOCK_NUM (insn
)] = 1;
2072 /* Used for communicating between next two routines. */
2073 static rtx last_set_insn
;
2075 /* Called from compute_hash_table via note_stores to handle one
2076 SET or CLOBBER in an insn. */
2079 record_last_set_info (dest
, setter
)
2080 rtx dest
, setter ATTRIBUTE_UNUSED
;
2082 if (GET_CODE (dest
) == SUBREG
)
2083 dest
= SUBREG_REG (dest
);
2085 if (GET_CODE (dest
) == REG
)
2086 record_last_reg_set_info (last_set_insn
, REGNO (dest
));
2087 else if (GET_CODE (dest
) == MEM
2088 /* Ignore pushes, they clobber nothing. */
2089 && ! push_operand (dest
, GET_MODE (dest
)))
2090 record_last_mem_set_info (last_set_insn
);
2093 /* Top level function to create an expression or assignment hash table.
2095 Expression entries are placed in the hash table if
2096 - they are of the form (set (pseudo-reg) src),
2097 - src is something we want to perform GCSE on,
2098 - none of the operands are subsequently modified in the block
2100 Assignment entries are placed in the hash table if
2101 - they are of the form (set (pseudo-reg) src),
2102 - src is something we want to perform const/copy propagation on,
2103 - none of the operands or target are subsequently modified in the block
2104 Currently src must be a pseudo-reg or a const_int.
2106 F is the first insn.
2107 SET_P is non-zero for computing the assignment hash table. */
2110 compute_hash_table (set_p
)
2115 /* While we compute the hash table we also compute a bit array of which
2116 registers are set in which blocks.
2117 We also compute which blocks set memory, in the absence of aliasing
2118 support [which is TODO].
2119 ??? This isn't needed during const/copy propagation, but it's cheap to
2121 sbitmap_vector_zero (reg_set_in_block
, n_basic_blocks
);
2122 bzero ((char *) mem_set_in_block
, n_basic_blocks
);
2124 /* Some working arrays used to track first and last set in each block. */
2125 /* ??? One could use alloca here, but at some size a threshold is crossed
2126 beyond which one should use malloc. Are we at that threshold here? */
2127 reg_first_set
= (int *) gmalloc (max_gcse_regno
* sizeof (int));
2128 reg_last_set
= (int *) gmalloc (max_gcse_regno
* sizeof (int));
2130 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2134 int in_libcall_block
;
2137 /* First pass over the instructions records information used to
2138 determine when registers and memory are first and last set.
2139 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2140 could be moved to compute_sets since they currently don't change. */
2142 for (i
= 0; i
< max_gcse_regno
; i
++)
2143 reg_first_set
[i
] = reg_last_set
[i
] = NEVER_SET
;
2144 mem_first_set
= NEVER_SET
;
2145 mem_last_set
= NEVER_SET
;
2147 for (insn
= BLOCK_HEAD (bb
);
2148 insn
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
2149 insn
= NEXT_INSN (insn
))
2151 #ifdef NON_SAVING_SETJMP
2152 if (NON_SAVING_SETJMP
&& GET_CODE (insn
) == NOTE
2153 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
2155 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2156 record_last_reg_set_info (insn
, regno
);
2161 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
2164 if (GET_CODE (insn
) == CALL_INSN
)
2166 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2167 if ((call_used_regs
[regno
]
2168 && regno
!= STACK_POINTER_REGNUM
2169 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2170 && regno
!= HARD_FRAME_POINTER_REGNUM
2172 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2173 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2175 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2176 && ! (regno
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2179 && regno
!= FRAME_POINTER_REGNUM
)
2180 || global_regs
[regno
])
2181 record_last_reg_set_info (insn
, regno
);
2182 if (! CONST_CALL_P (insn
))
2183 record_last_mem_set_info (insn
);
2186 last_set_insn
= insn
;
2187 note_stores (PATTERN (insn
), record_last_set_info
);
2190 /* The next pass builds the hash table. */
2192 for (insn
= BLOCK_HEAD (bb
), in_libcall_block
= 0;
2193 insn
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
2194 insn
= NEXT_INSN (insn
))
2196 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2198 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2199 in_libcall_block
= 1;
2200 else if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2201 in_libcall_block
= 0;
2202 hash_scan_insn (insn
, set_p
, in_libcall_block
);
2207 free (reg_first_set
);
2208 free (reg_last_set
);
2209 /* Catch bugs early. */
2210 reg_first_set
= reg_last_set
= 0;
2213 /* Allocate space for the set hash table.
2214 N_INSNS is the number of instructions in the function.
2215 It is used to determine the number of buckets to use. */
2218 alloc_set_hash_table (n_insns
)
2223 set_hash_table_size
= n_insns
/ 4;
2224 if (set_hash_table_size
< 11)
2225 set_hash_table_size
= 11;
2226 /* Attempt to maintain efficient use of hash table.
2227 Making it an odd number is simplest for now.
2228 ??? Later take some measurements. */
2229 set_hash_table_size
|= 1;
2230 n
= set_hash_table_size
* sizeof (struct expr
*);
2231 set_hash_table
= (struct expr
**) gmalloc (n
);
2234 /* Free things allocated by alloc_set_hash_table. */
2237 free_set_hash_table ()
2239 free (set_hash_table
);
2242 /* Compute the hash table for doing copy/const propagation. */
2245 compute_set_hash_table ()
2247 /* Initialize count of number of entries in hash table. */
2249 bzero ((char *) set_hash_table
, set_hash_table_size
* sizeof (struct expr
*));
2251 compute_hash_table (1);
2254 /* Allocate space for the expression hash table.
2255 N_INSNS is the number of instructions in the function.
2256 It is used to determine the number of buckets to use. */
2259 alloc_expr_hash_table (n_insns
)
2264 expr_hash_table_size
= n_insns
/ 2;
2265 /* Make sure the amount is usable. */
2266 if (expr_hash_table_size
< 11)
2267 expr_hash_table_size
= 11;
2268 /* Attempt to maintain efficient use of hash table.
2269 Making it an odd number is simplest for now.
2270 ??? Later take some measurements. */
2271 expr_hash_table_size
|= 1;
2272 n
= expr_hash_table_size
* sizeof (struct expr
*);
2273 expr_hash_table
= (struct expr
**) gmalloc (n
);
2276 /* Free things allocated by alloc_expr_hash_table. */
2279 free_expr_hash_table ()
2281 free (expr_hash_table
);
2284 /* Compute the hash table for doing GCSE. */
2287 compute_expr_hash_table ()
2289 /* Initialize count of number of entries in hash table. */
2291 bzero ((char *) expr_hash_table
, expr_hash_table_size
* sizeof (struct expr
*));
2293 compute_hash_table (0);
2296 /* Expression tracking support. */
2298 /* Lookup pattern PAT in the expression table.
2299 The result is a pointer to the table entry, or NULL if not found. */
2301 static struct expr
*
2305 int do_not_record_p
;
2306 unsigned int hash
= hash_expr (pat
, GET_MODE (pat
), &do_not_record_p
,
2307 expr_hash_table_size
);
2310 if (do_not_record_p
)
2313 expr
= expr_hash_table
[hash
];
2315 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2316 expr
= expr
->next_same_hash
;
2321 /* Lookup REGNO in the set table.
2322 If PAT is non-NULL look for the entry that matches it, otherwise return
2323 the first entry for REGNO.
2324 The result is a pointer to the table entry, or NULL if not found. */
2326 static struct expr
*
2327 lookup_set (regno
, pat
)
2331 unsigned int hash
= hash_set (regno
, set_hash_table_size
);
2334 expr
= set_hash_table
[hash
];
2338 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2339 expr
= expr
->next_same_hash
;
2343 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
)
2344 expr
= expr
->next_same_hash
;
2350 /* Return the next entry for REGNO in list EXPR. */
2352 static struct expr
*
2353 next_set (regno
, expr
)
2358 expr
= expr
->next_same_hash
;
2359 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
);
2363 /* Reset tables used to keep track of what's still available [since the
2364 start of the block]. */
2367 reset_opr_set_tables ()
2369 /* Maintain a bitmap of which regs have been set since beginning of
2371 sbitmap_zero (reg_set_bitmap
);
2372 /* Also keep a record of the last instruction to modify memory.
2373 For now this is very trivial, we only record whether any memory
2374 location has been modified. */
2378 /* Return non-zero if the operands of X are not set before INSN in
2379 INSN's basic block. */
2382 oprs_not_set_p (x
, insn
)
2389 /* repeat is used to turn tail-recursion into iteration. */
2395 code
= GET_CODE (x
);
2410 if (mem_last_set
!= 0)
2416 return ! TEST_BIT (reg_set_bitmap
, REGNO (x
));
2422 fmt
= GET_RTX_FORMAT (code
);
2423 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2428 /* If we are about to do the last recursive call
2429 needed at this level, change it into iteration.
2430 This function is called enough to be worth it. */
2436 not_set_p
= oprs_not_set_p (XEXP (x
, i
), insn
);
2440 else if (fmt
[i
] == 'E')
2443 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2445 int not_set_p
= oprs_not_set_p (XVECEXP (x
, i
, j
), insn
);
2455 /* Mark things set by a CALL. */
2461 mem_last_set
= INSN_CUID (insn
);
2464 /* Mark things set by a SET. */
2467 mark_set (pat
, insn
)
2470 rtx dest
= SET_DEST (pat
);
2472 while (GET_CODE (dest
) == SUBREG
2473 || GET_CODE (dest
) == ZERO_EXTRACT
2474 || GET_CODE (dest
) == SIGN_EXTRACT
2475 || GET_CODE (dest
) == STRICT_LOW_PART
)
2476 dest
= XEXP (dest
, 0);
2478 if (GET_CODE (dest
) == REG
)
2479 SET_BIT (reg_set_bitmap
, REGNO (dest
));
2480 else if (GET_CODE (dest
) == MEM
)
2481 mem_last_set
= INSN_CUID (insn
);
2483 if (GET_CODE (SET_SRC (pat
)) == CALL
)
2487 /* Record things set by a CLOBBER. */
2490 mark_clobber (pat
, insn
)
2493 rtx clob
= XEXP (pat
, 0);
2495 while (GET_CODE (clob
) == SUBREG
|| GET_CODE (clob
) == STRICT_LOW_PART
)
2496 clob
= XEXP (clob
, 0);
2498 if (GET_CODE (clob
) == REG
)
2499 SET_BIT (reg_set_bitmap
, REGNO (clob
));
2501 mem_last_set
= INSN_CUID (insn
);
2504 /* Record things set by INSN.
2505 This data is used by oprs_not_set_p. */
2508 mark_oprs_set (insn
)
2511 rtx pat
= PATTERN (insn
);
2513 if (GET_CODE (pat
) == SET
)
2514 mark_set (pat
, insn
);
2515 else if (GET_CODE (pat
) == PARALLEL
)
2519 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2521 rtx x
= XVECEXP (pat
, 0, i
);
2523 if (GET_CODE (x
) == SET
)
2525 else if (GET_CODE (x
) == CLOBBER
)
2526 mark_clobber (x
, insn
);
2527 else if (GET_CODE (x
) == CALL
)
2531 else if (GET_CODE (pat
) == CLOBBER
)
2532 mark_clobber (pat
, insn
);
2533 else if (GET_CODE (pat
) == CALL
)
2538 /* Classic GCSE reaching definition support. */
2540 /* Allocate reaching def variables. */
2543 alloc_rd_mem (n_blocks
, n_insns
)
2544 int n_blocks
, n_insns
;
2546 rd_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2547 sbitmap_vector_zero (rd_kill
, n_basic_blocks
);
2549 rd_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2550 sbitmap_vector_zero (rd_gen
, n_basic_blocks
);
2552 reaching_defs
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2553 sbitmap_vector_zero (reaching_defs
, n_basic_blocks
);
2555 rd_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2556 sbitmap_vector_zero (rd_out
, n_basic_blocks
);
2559 /* Free reaching def variables. */
2566 free (reaching_defs
);
2570 /* Add INSN to the kills of BB.
2571 REGNO, set in BB, is killed by INSN. */
2574 handle_rd_kill_set (insn
, regno
, bb
)
2578 struct reg_set
*this_reg
= reg_set_table
[regno
];
2582 if (BLOCK_NUM (this_reg
->insn
) != BLOCK_NUM (insn
))
2583 SET_BIT (rd_kill
[bb
], INSN_CUID (this_reg
->insn
));
2584 this_reg
= this_reg
->next
;
2588 /* Compute the set of kill's for reaching definitions. */
2596 For each set bit in `gen' of the block (i.e each insn which
2597 generates a definition in the block)
2598 Call the reg set by the insn corresponding to that bit regx
2599 Look at the linked list starting at reg_set_table[regx]
2600 For each setting of regx in the linked list, which is not in
2602 Set the bit in `kill' corresponding to that insn
2605 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2607 for (cuid
= 0; cuid
< max_cuid
; cuid
++)
2609 if (TEST_BIT (rd_gen
[bb
], cuid
))
2611 rtx insn
= CUID_INSN (cuid
);
2612 rtx pat
= PATTERN (insn
);
2614 if (GET_CODE (insn
) == CALL_INSN
)
2618 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2620 if ((call_used_regs
[regno
]
2621 && regno
!= STACK_POINTER_REGNUM
2622 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2623 && regno
!= HARD_FRAME_POINTER_REGNUM
2625 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2626 && ! (regno
== ARG_POINTER_REGNUM
2627 && fixed_regs
[regno
])
2629 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2630 && ! (regno
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2632 && regno
!= FRAME_POINTER_REGNUM
)
2633 || global_regs
[regno
])
2634 handle_rd_kill_set (insn
, regno
, bb
);
2638 if (GET_CODE (pat
) == PARALLEL
)
2642 /* We work backwards because ... */
2643 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
2645 enum rtx_code code
= GET_CODE (XVECEXP (pat
, 0, i
));
2646 if ((code
== SET
|| code
== CLOBBER
)
2647 && GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0)) == REG
)
2648 handle_rd_kill_set (insn
,
2649 REGNO (XEXP (XVECEXP (pat
, 0, i
), 0)),
2653 else if (GET_CODE (pat
) == SET
)
2655 if (GET_CODE (SET_DEST (pat
)) == REG
)
2657 /* Each setting of this register outside of this block
2658 must be marked in the set of kills in this block. */
2659 handle_rd_kill_set (insn
, REGNO (SET_DEST (pat
)), bb
);
2662 /* FIXME: CLOBBER? */
2668 /* Compute the reaching definitions as in
2669 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2670 Chapter 10. It is the same algorithm as used for computing available
2671 expressions but applied to the gens and kills of reaching definitions. */
2676 int bb
, changed
, passes
;
2678 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2679 sbitmap_copy (rd_out
[bb
] /*dst*/, rd_gen
[bb
] /*src*/);
2686 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2688 sbitmap_union_of_preds (reaching_defs
[bb
], rd_out
, bb
);
2689 changed
|= sbitmap_union_of_diff (rd_out
[bb
], rd_gen
[bb
],
2690 reaching_defs
[bb
], rd_kill
[bb
]);
2696 fprintf (gcse_file
, "reaching def computation: %d passes\n", passes
);
2699 /* Classic GCSE available expression support. */
2701 /* Allocate memory for available expression computation. */
2704 alloc_avail_expr_mem (n_blocks
, n_exprs
)
2705 int n_blocks
, n_exprs
;
2707 ae_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2708 sbitmap_vector_zero (ae_kill
, n_basic_blocks
);
2710 ae_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2711 sbitmap_vector_zero (ae_gen
, n_basic_blocks
);
2713 ae_in
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2714 sbitmap_vector_zero (ae_in
, n_basic_blocks
);
2716 ae_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2717 sbitmap_vector_zero (ae_out
, n_basic_blocks
);
2719 u_bitmap
= (sbitmap
) sbitmap_alloc (n_exprs
);
2720 sbitmap_ones (u_bitmap
);
2724 free_avail_expr_mem ()
2733 /* Compute the set of available expressions generated in each basic block. */
2740 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2741 This is all we have to do because an expression is not recorded if it
2742 is not available, and the only expressions we want to work with are the
2743 ones that are recorded. */
2745 for (i
= 0; i
< expr_hash_table_size
; i
++)
2747 struct expr
*expr
= expr_hash_table
[i
];
2748 while (expr
!= NULL
)
2750 struct occr
*occr
= expr
->avail_occr
;
2751 while (occr
!= NULL
)
2753 SET_BIT (ae_gen
[BLOCK_NUM (occr
->insn
)], expr
->bitmap_index
);
2756 expr
= expr
->next_same_hash
;
2761 /* Return non-zero if expression X is killed in BB. */
2764 expr_killed_p (x
, bb
)
2772 /* repeat is used to turn tail-recursion into iteration. */
2778 code
= GET_CODE (x
);
2782 return TEST_BIT (reg_set_in_block
[bb
], REGNO (x
));
2785 if (mem_set_in_block
[bb
])
2805 i
= GET_RTX_LENGTH (code
) - 1;
2806 fmt
= GET_RTX_FORMAT (code
);
2811 rtx tem
= XEXP (x
, i
);
2813 /* If we are about to do the last recursive call
2814 needed at this level, change it into iteration.
2815 This function is called enough to be worth it. */
2821 if (expr_killed_p (tem
, bb
))
2824 else if (fmt
[i
] == 'E')
2827 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2829 if (expr_killed_p (XVECEXP (x
, i
, j
), bb
))
2838 /* Compute the set of available expressions killed in each basic block. */
2841 compute_ae_kill (ae_gen
, ae_kill
)
2842 sbitmap
*ae_gen
, *ae_kill
;
2846 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2848 for (i
= 0; i
< expr_hash_table_size
; i
++)
2850 struct expr
*expr
= expr_hash_table
[i
];
2852 for ( ; expr
!= NULL
; expr
= expr
->next_same_hash
)
2854 /* Skip EXPR if generated in this block. */
2855 if (TEST_BIT (ae_gen
[bb
], expr
->bitmap_index
))
2858 if (expr_killed_p (expr
->expr
, bb
))
2859 SET_BIT (ae_kill
[bb
], expr
->bitmap_index
);
2865 /* Actually perform the Classic GCSE optimizations. */
2867 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2869 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2870 as a positive reach. We want to do this when there are two computations
2871 of the expression in the block.
2873 VISITED is a pointer to a working buffer for tracking which BB's have
2874 been visited. It is NULL for the top-level call.
2876 We treat reaching expressions that go through blocks containing the same
2877 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2878 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2879 2 as not reaching. The intent is to improve the probability of finding
2880 only one reaching expression and to reduce register lifetimes by picking
2881 the closest such expression. */
2884 expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
)
2888 int check_self_loop
;
2893 for (pred
= BASIC_BLOCK(bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
2895 int pred_bb
= pred
->src
->index
;
2897 if (visited
[pred_bb
])
2899 /* This predecessor has already been visited.
2903 else if (pred_bb
== bb
)
2905 /* BB loops on itself. */
2907 && TEST_BIT (ae_gen
[pred_bb
], expr
->bitmap_index
)
2908 && BLOCK_NUM (occr
->insn
) == pred_bb
)
2910 visited
[pred_bb
] = 1;
2912 /* Ignore this predecessor if it kills the expression. */
2913 else if (TEST_BIT (ae_kill
[pred_bb
], expr
->bitmap_index
))
2914 visited
[pred_bb
] = 1;
2915 /* Does this predecessor generate this expression? */
2916 else if (TEST_BIT (ae_gen
[pred_bb
], expr
->bitmap_index
))
2918 /* Is this the occurrence we're looking for?
2919 Note that there's only one generating occurrence per block
2920 so we just need to check the block number. */
2921 if (BLOCK_NUM (occr
->insn
) == pred_bb
)
2923 visited
[pred_bb
] = 1;
2925 /* Neither gen nor kill. */
2928 visited
[pred_bb
] = 1;
2929 if (expr_reaches_here_p_work (occr
, expr
, pred_bb
, check_self_loop
,
2935 /* All paths have been checked. */
2939 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
2940 memory allocated for that function is returned. */
2943 expr_reaches_here_p (occr
, expr
, bb
, check_self_loop
)
2947 int check_self_loop
;
2950 char * visited
= (char *) xcalloc (n_basic_blocks
, 1);
2952 rval
= expr_reaches_here_p_work(occr
, expr
, bb
, check_self_loop
, visited
);
2959 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2960 If there is more than one such instruction, return NULL.
2962 Called only by handle_avail_expr. */
2965 computing_insn (expr
, insn
)
2969 int bb
= BLOCK_NUM (insn
);
2971 if (expr
->avail_occr
->next
== NULL
)
2973 if (BLOCK_NUM (expr
->avail_occr
->insn
) == bb
)
2975 /* The available expression is actually itself
2976 (i.e. a loop in the flow graph) so do nothing. */
2979 /* (FIXME) Case that we found a pattern that was created by
2980 a substitution that took place. */
2981 return expr
->avail_occr
->insn
;
2985 /* Pattern is computed more than once.
2986 Search backwards from this insn to see how many of these
2987 computations actually reach this insn. */
2989 rtx insn_computes_expr
= NULL
;
2992 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
2994 if (BLOCK_NUM (occr
->insn
) == bb
)
2996 /* The expression is generated in this block.
2997 The only time we care about this is when the expression
2998 is generated later in the block [and thus there's a loop].
2999 We let the normal cse pass handle the other cases. */
3000 if (INSN_CUID (insn
) < INSN_CUID (occr
->insn
))
3002 if (expr_reaches_here_p (occr
, expr
, bb
, 1))
3007 insn_computes_expr
= occr
->insn
;
3011 else /* Computation of the pattern outside this block. */
3013 if (expr_reaches_here_p (occr
, expr
, bb
, 0))
3018 insn_computes_expr
= occr
->insn
;
3023 if (insn_computes_expr
== NULL
)
3025 return insn_computes_expr
;
3029 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3030 Only called by can_disregard_other_sets. */
3033 def_reaches_here_p (insn
, def_insn
)
3038 if (TEST_BIT (reaching_defs
[BLOCK_NUM (insn
)], INSN_CUID (def_insn
)))
3041 if (BLOCK_NUM (insn
) == BLOCK_NUM (def_insn
))
3043 if (INSN_CUID (def_insn
) < INSN_CUID (insn
))
3045 if (GET_CODE (PATTERN (def_insn
)) == PARALLEL
)
3047 if (GET_CODE (PATTERN (def_insn
)) == CLOBBER
)
3048 reg
= XEXP (PATTERN (def_insn
), 0);
3049 else if (GET_CODE (PATTERN (def_insn
)) == SET
)
3050 reg
= SET_DEST (PATTERN (def_insn
));
3053 return ! reg_set_between_p (reg
, NEXT_INSN (def_insn
), insn
);
3062 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN.
3063 The value returned is the number of definitions that reach INSN.
3064 Returning a value of zero means that [maybe] more than one definition
3065 reaches INSN and the caller can't perform whatever optimization it is
3066 trying. i.e. it is always safe to return zero. */
3069 can_disregard_other_sets (addr_this_reg
, insn
, for_combine
)
3070 struct reg_set
**addr_this_reg
;
3074 int number_of_reaching_defs
= 0;
3075 struct reg_set
*this_reg
= *addr_this_reg
;
3079 if (def_reaches_here_p (insn
, this_reg
->insn
))
3081 number_of_reaching_defs
++;
3082 /* Ignore parallels for now. */
3083 if (GET_CODE (PATTERN (this_reg
->insn
)) == PARALLEL
)
3086 && (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
3087 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3088 SET_SRC (PATTERN (insn
)))))
3090 /* A setting of the reg to a different value reaches INSN. */
3093 if (number_of_reaching_defs
> 1)
3095 /* If in this setting the value the register is being
3096 set to is equal to the previous value the register
3097 was set to and this setting reaches the insn we are
3098 trying to do the substitution on then we are ok. */
3100 if (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
)
3102 if (! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3103 SET_SRC (PATTERN (insn
))))
3106 *addr_this_reg
= this_reg
;
3109 /* prev_this_reg = this_reg; */
3110 this_reg
= this_reg
->next
;
3113 return number_of_reaching_defs
;
3116 /* Expression computed by insn is available and the substitution is legal,
3117 so try to perform the substitution.
3119 The result is non-zero if any changes were made. */
3122 handle_avail_expr (insn
, expr
)
3126 rtx pat
, insn_computes_expr
;
3128 struct reg_set
*this_reg
;
3129 int found_setting
, use_src
;
3132 /* We only handle the case where one computation of the expression
3133 reaches this instruction. */
3134 insn_computes_expr
= computing_insn (expr
, insn
);
3135 if (insn_computes_expr
== NULL
)
3141 /* At this point we know only one computation of EXPR outside of this
3142 block reaches this insn. Now try to find a register that the
3143 expression is computed into. */
3145 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr
))) == REG
)
3147 /* This is the case when the available expression that reaches
3148 here has already been handled as an available expression. */
3149 int regnum_for_replacing
= REGNO (SET_SRC (PATTERN (insn_computes_expr
)));
3150 /* If the register was created by GCSE we can't use `reg_set_table',
3151 however we know it's set only once. */
3152 if (regnum_for_replacing
>= max_gcse_regno
3153 /* If the register the expression is computed into is set only once,
3154 or only one set reaches this insn, we can use it. */
3155 || (((this_reg
= reg_set_table
[regnum_for_replacing
]),
3156 this_reg
->next
== NULL
)
3157 || can_disregard_other_sets (&this_reg
, insn
, 0)))
3166 int regnum_for_replacing
= REGNO (SET_DEST (PATTERN (insn_computes_expr
)));
3167 /* This shouldn't happen. */
3168 if (regnum_for_replacing
>= max_gcse_regno
)
3170 this_reg
= reg_set_table
[regnum_for_replacing
];
3171 /* If the register the expression is computed into is set only once,
3172 or only one set reaches this insn, use it. */
3173 if (this_reg
->next
== NULL
3174 || can_disregard_other_sets (&this_reg
, insn
, 0))
3180 pat
= PATTERN (insn
);
3182 to
= SET_SRC (PATTERN (insn_computes_expr
));
3184 to
= SET_DEST (PATTERN (insn_computes_expr
));
3185 changed
= validate_change (insn
, &SET_SRC (pat
), to
, 0);
3187 /* We should be able to ignore the return code from validate_change but
3188 to play it safe we check. */
3192 if (gcse_file
!= NULL
)
3194 fprintf (gcse_file
, "GCSE: Replacing the source in insn %d with reg %d %s insn %d\n",
3195 INSN_UID (insn
), REGNO (to
),
3196 use_src
? "from" : "set in",
3197 INSN_UID (insn_computes_expr
));
3202 /* The register that the expr is computed into is set more than once. */
3203 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3205 /* Insert an insn after insnx that copies the reg set in insnx
3206 into a new pseudo register call this new register REGN.
3207 From insnb until end of basic block or until REGB is set
3208 replace all uses of REGB with REGN. */
3211 to
= gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr
))));
3213 /* Generate the new insn. */
3214 /* ??? If the change fails, we return 0, even though we created
3215 an insn. I think this is ok. */
3217 = emit_insn_after (gen_rtx_SET (VOIDmode
, to
,
3218 SET_DEST (PATTERN (insn_computes_expr
))),
3219 insn_computes_expr
);
3220 /* Keep block number table up to date. */
3221 set_block_num (new_insn
, BLOCK_NUM (insn_computes_expr
));
3222 /* Keep register set table up to date. */
3223 record_one_set (REGNO (to
), new_insn
);
3225 gcse_create_count
++;
3226 if (gcse_file
!= NULL
)
3228 fprintf (gcse_file
, "GCSE: Creating insn %d to copy value of reg %d, computed in insn %d,\n",
3229 INSN_UID (NEXT_INSN (insn_computes_expr
)),
3230 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr
)))),
3231 INSN_UID (insn_computes_expr
));
3232 fprintf (gcse_file
, " into newly allocated reg %d\n", REGNO (to
));
3235 pat
= PATTERN (insn
);
3237 /* Do register replacement for INSN. */
3238 changed
= validate_change (insn
, &SET_SRC (pat
),
3239 SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr
))),
3242 /* We should be able to ignore the return code from validate_change but
3243 to play it safe we check. */
3247 if (gcse_file
!= NULL
)
3249 fprintf (gcse_file
, "GCSE: Replacing the source in insn %d with reg %d set in insn %d\n",
3251 REGNO (SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr
)))),
3252 INSN_UID (insn_computes_expr
));
3261 /* Perform classic GCSE.
3262 This is called by one_classic_gcse_pass after all the dataflow analysis
3265 The result is non-zero if a change was made. */
3273 /* Note we start at block 1. */
3276 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
3278 /* Reset tables used to keep track of what's still valid [since the
3279 start of the block]. */
3280 reset_opr_set_tables ();
3282 for (insn
= BLOCK_HEAD (bb
);
3283 insn
!= NULL
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
3284 insn
= NEXT_INSN (insn
))
3286 /* Is insn of form (set (pseudo-reg) ...)? */
3288 if (GET_CODE (insn
) == INSN
3289 && GET_CODE (PATTERN (insn
)) == SET
3290 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
3291 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_PSEUDO_REGISTER
)
3293 rtx pat
= PATTERN (insn
);
3294 rtx src
= SET_SRC (pat
);
3297 if (want_to_gcse_p (src
)
3298 /* Is the expression recorded? */
3299 && ((expr
= lookup_expr (src
)) != NULL
)
3300 /* Is the expression available [at the start of the
3302 && TEST_BIT (ae_in
[bb
], expr
->bitmap_index
)
3303 /* Are the operands unchanged since the start of the
3305 && oprs_not_set_p (src
, insn
))
3306 changed
|= handle_avail_expr (insn
, expr
);
3309 /* Keep track of everything modified by this insn. */
3310 /* ??? Need to be careful w.r.t. mods done to INSN. */
3311 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3312 mark_oprs_set (insn
);
3319 /* Top level routine to perform one classic GCSE pass.
3321 Return non-zero if a change was made. */
3324 one_classic_gcse_pass (pass
)
3329 gcse_subst_count
= 0;
3330 gcse_create_count
= 0;
3332 alloc_expr_hash_table (max_cuid
);
3333 alloc_rd_mem (n_basic_blocks
, max_cuid
);
3334 compute_expr_hash_table ();
3336 dump_hash_table (gcse_file
, "Expression", expr_hash_table
,
3337 expr_hash_table_size
, n_exprs
);
3343 alloc_avail_expr_mem (n_basic_blocks
, n_exprs
);
3345 compute_ae_kill (ae_gen
, ae_kill
);
3346 passes
= compute_available (ae_gen
, ae_kill
, ae_out
, ae_in
);
3348 fprintf (gcse_file
, "avail expr computation: %d passes\n", passes
);
3349 changed
= classic_gcse ();
3350 free_avail_expr_mem ();
3353 free_expr_hash_table ();
3357 fprintf (gcse_file
, "\n");
3358 fprintf (gcse_file
, "GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
3359 current_function_name
, pass
,
3360 bytes_used
, gcse_subst_count
, gcse_create_count
);
3366 /* Compute copy/constant propagation working variables. */
3368 /* Local properties of assignments. */
3370 static sbitmap
*cprop_pavloc
;
3371 static sbitmap
*cprop_absaltered
;
3373 /* Global properties of assignments (computed from the local properties). */
3375 static sbitmap
*cprop_avin
;
3376 static sbitmap
*cprop_avout
;
3378 /* Allocate vars used for copy/const propagation.
3379 N_BLOCKS is the number of basic blocks.
3380 N_SETS is the number of sets. */
3383 alloc_cprop_mem (n_blocks
, n_sets
)
3384 int n_blocks
, n_sets
;
3386 cprop_pavloc
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3387 cprop_absaltered
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3389 cprop_avin
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3390 cprop_avout
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3393 /* Free vars used by copy/const propagation. */
3398 free (cprop_pavloc
);
3399 free (cprop_absaltered
);
3404 /* For each block, compute whether X is transparent.
3405 X is either an expression or an assignment [though we don't care which,
3406 for this context an assignment is treated as an expression].
3407 For each block where an element of X is modified, set (SET_P == 1) or reset
3408 (SET_P == 0) the INDX bit in BMAP. */
3411 compute_transp (x
, indx
, bmap
, set_p
)
3421 /* repeat is used to turn tail-recursion into iteration. */
3427 code
= GET_CODE (x
);
3433 int regno
= REGNO (x
);
3437 if (regno
< FIRST_PSEUDO_REGISTER
)
3439 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3440 if (TEST_BIT (reg_set_in_block
[bb
], regno
))
3441 SET_BIT (bmap
[bb
], indx
);
3445 for (r
= reg_set_table
[regno
]; r
!= NULL
; r
= r
->next
)
3447 bb
= BLOCK_NUM (r
->insn
);
3448 SET_BIT (bmap
[bb
], indx
);
3454 if (regno
< FIRST_PSEUDO_REGISTER
)
3456 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3457 if (TEST_BIT (reg_set_in_block
[bb
], regno
))
3458 RESET_BIT (bmap
[bb
], indx
);
3462 for (r
= reg_set_table
[regno
]; r
!= NULL
; r
= r
->next
)
3464 bb
= BLOCK_NUM (r
->insn
);
3465 RESET_BIT (bmap
[bb
], indx
);
3475 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3476 if (mem_set_in_block
[bb
])
3477 SET_BIT (bmap
[bb
], indx
);
3481 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3482 if (mem_set_in_block
[bb
])
3483 RESET_BIT (bmap
[bb
], indx
);
3503 i
= GET_RTX_LENGTH (code
) - 1;
3504 fmt
= GET_RTX_FORMAT (code
);
3509 rtx tem
= XEXP (x
, i
);
3511 /* If we are about to do the last recursive call
3512 needed at this level, change it into iteration.
3513 This function is called enough to be worth it. */
3519 compute_transp (tem
, indx
, bmap
, set_p
);
3521 else if (fmt
[i
] == 'E')
3524 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3525 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, set_p
);
3530 /* Compute the available expressions at the start and end of each
3531 basic block for cprop. This particular dataflow equation is
3532 used often enough that we might want to generalize it and make
3533 as a subroutine for other global optimizations that need available
3534 in/out information. */
3536 compute_cprop_avinout ()
3538 int bb
, changed
, passes
;
3540 sbitmap_zero (cprop_avin
[0]);
3541 sbitmap_vector_ones (cprop_avout
, n_basic_blocks
);
3548 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3551 sbitmap_intersection_of_preds (cprop_avin
[bb
], cprop_avout
, bb
);
3552 changed
|= sbitmap_union_of_diff (cprop_avout
[bb
],
3555 cprop_absaltered
[bb
]);
3561 fprintf (gcse_file
, "cprop avail expr computation: %d passes\n", passes
);
3564 /* Top level routine to do the dataflow analysis needed by copy/const
3568 compute_cprop_data ()
3570 compute_local_properties (cprop_absaltered
, cprop_pavloc
, NULL
, 1);
3571 compute_cprop_avinout ();
3574 /* Copy/constant propagation. */
3576 /* Maximum number of register uses in an insn that we handle. */
3579 /* Table of uses found in an insn.
3580 Allocated statically to avoid alloc/free complexity and overhead. */
3581 static struct reg_use reg_use_table
[MAX_USES
];
3583 /* Index into `reg_use_table' while building it. */
3584 static int reg_use_count
;
3586 /* Set up a list of register numbers used in INSN.
3587 The found uses are stored in `reg_use_table'.
3588 `reg_use_count' is initialized to zero before entry, and
3589 contains the number of uses in the table upon exit.
3591 ??? If a register appears multiple times we will record it multiple
3592 times. This doesn't hurt anything but it will slow things down. */
3602 /* repeat is used to turn tail-recursion into iteration. */
3608 code
= GET_CODE (x
);
3612 if (reg_use_count
== MAX_USES
)
3614 reg_use_table
[reg_use_count
].reg_rtx
= x
;
3632 case ASM_INPUT
: /*FIXME*/
3636 if (GET_CODE (SET_DEST (x
)) == MEM
)
3637 find_used_regs (SET_DEST (x
));
3645 /* Recursively scan the operands of this expression. */
3647 fmt
= GET_RTX_FORMAT (code
);
3648 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3652 /* If we are about to do the last recursive call
3653 needed at this level, change it into iteration.
3654 This function is called enough to be worth it. */
3660 find_used_regs (XEXP (x
, i
));
3662 else if (fmt
[i
] == 'E')
3665 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3666 find_used_regs (XVECEXP (x
, i
, j
));
3671 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3672 Returns non-zero is successful. */
3675 try_replace_reg (from
, to
, insn
)
3678 /* If this fails we could try to simplify the result of the
3679 replacement and attempt to recognize the simplified insn.
3681 But we need a general simplify_rtx that doesn't have pass
3682 specific state variables. I'm not aware of one at the moment. */
3683 return validate_replace_src (from
, to
, insn
);
3686 /* Find a set of REGNO that is available on entry to INSN's block.
3687 Returns NULL if not found. */
3689 static struct expr
*
3690 find_avail_set (regno
, insn
)
3694 /* SET1 contains the last set found that can be returned to the caller for
3695 use in a substitution. */
3696 struct expr
*set1
= 0;
3698 /* Loops are not possible here. To get a loop we would need two sets
3699 available at the start of the block containing INSN. ie we would
3700 need two sets like this available at the start of the block:
3702 (set (reg X) (reg Y))
3703 (set (reg Y) (reg X))
3705 This can not happen since the set of (reg Y) would have killed the
3706 set of (reg X) making it unavailable at the start of this block. */
3710 struct expr
*set
= lookup_set (regno
, NULL_RTX
);
3712 /* Find a set that is available at the start of the block
3713 which contains INSN. */
3716 if (TEST_BIT (cprop_avin
[BLOCK_NUM (insn
)], set
->bitmap_index
))
3718 set
= next_set (regno
, set
);
3721 /* If no available set was found we've reached the end of the
3722 (possibly empty) copy chain. */
3726 if (GET_CODE (set
->expr
) != SET
)
3729 src
= SET_SRC (set
->expr
);
3731 /* We know the set is available.
3732 Now check that SRC is ANTLOC (i.e. none of the source operands
3733 have changed since the start of the block).
3735 If the source operand changed, we may still use it for the next
3736 iteration of this loop, but we may not use it for substitutions. */
3737 if (CONSTANT_P (src
) || oprs_not_set_p (src
, insn
))
3740 /* If the source of the set is anything except a register, then
3741 we have reached the end of the copy chain. */
3742 if (GET_CODE (src
) != REG
)
3745 /* Follow the copy chain, ie start another iteration of the loop
3746 and see if we have an available copy into SRC. */
3747 regno
= REGNO (src
);
3750 /* SET1 holds the last set that was available and anticipatable at
3755 /* Subroutine of cprop_insn that tries to propagate constants into
3756 JUMP_INSNS. INSN must be a conditional jump; COPY is a copy of it
3757 that we can use for substitutions.
3758 REG_USED is the use we will try to replace, SRC is the constant we
3759 will try to substitute for it.
3760 Returns nonzero if a change was made. */
3762 cprop_jump (insn
, copy
, reg_used
, src
)
3764 struct reg_use
*reg_used
;
3767 rtx set
= PATTERN (copy
);
3770 /* Replace the register with the appropriate constant. */
3771 replace_rtx (SET_SRC (set
), reg_used
->reg_rtx
, src
);
3773 temp
= simplify_ternary_operation (GET_CODE (SET_SRC (set
)),
3774 GET_MODE (SET_SRC (set
)),
3775 GET_MODE (XEXP (SET_SRC (set
), 0)),
3776 XEXP (SET_SRC (set
), 0),
3777 XEXP (SET_SRC (set
), 1),
3778 XEXP (SET_SRC (set
), 2));
3780 /* If no simplification can be made, then try the next
3785 SET_SRC (set
) = temp
;
3787 /* That may have changed the structure of TEMP, so
3788 force it to be rerecognized if it has not turned
3789 into a nop or unconditional jump. */
3791 INSN_CODE (copy
) = -1;
3792 if ((SET_DEST (set
) == pc_rtx
3793 && (SET_SRC (set
) == pc_rtx
3794 || GET_CODE (SET_SRC (set
)) == LABEL_REF
))
3795 || recog (PATTERN (copy
), copy
, NULL
) >= 0)
3797 /* This has either become an unconditional jump
3798 or a nop-jump. We'd like to delete nop jumps
3799 here, but doing so confuses gcse. So we just
3800 make the replacement and let later passes
3802 PATTERN (insn
) = set
;
3803 INSN_CODE (insn
) = -1;
3805 /* One less use of the label this insn used to jump to
3806 if we turned this into a NOP jump. */
3807 if (SET_SRC (set
) == pc_rtx
&& JUMP_LABEL (insn
) != 0)
3808 --LABEL_NUSES (JUMP_LABEL (insn
));
3810 /* If this has turned into an unconditional jump,
3811 then put a barrier after it so that the unreachable
3812 code will be deleted. */
3813 if (GET_CODE (SET_SRC (set
)) == LABEL_REF
)
3814 emit_barrier_after (insn
);
3816 run_jump_opt_after_gcse
= 1;
3819 if (gcse_file
!= NULL
)
3821 int regno
= REGNO (reg_used
->reg_rtx
);
3822 fprintf (gcse_file
, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3823 regno
, INSN_UID (insn
));
3824 print_rtl (gcse_file
, src
);
3825 fprintf (gcse_file
, "\n");
3833 /* Subroutine of cprop_insn that tries to propagate constants into
3834 JUMP_INSNS for machines that have CC0. INSN is a single set that
3835 stores into CC0; the insn following it is a conditional jump.
3836 REG_USED is the use we will try to replace, SRC is the constant we
3837 will try to substitute for it.
3838 Returns nonzero if a change was made. */
3840 cprop_cc0_jump (insn
, reg_used
, src
)
3842 struct reg_use
*reg_used
;
3845 rtx jump
= NEXT_INSN (insn
);
3846 rtx copy
= copy_rtx (jump
);
3847 rtx set
= PATTERN (copy
);
3849 /* We need to copy the source of the cc0 setter, as cprop_jump is going to
3850 substitute into it. */
3851 replace_rtx (SET_SRC (set
), cc0_rtx
, copy_rtx (SET_SRC (PATTERN (insn
))));
3852 if (! cprop_jump (jump
, copy
, reg_used
, src
))
3855 /* If we succeeded, delete the cc0 setter. */
3856 PUT_CODE (insn
, NOTE
);
3857 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3858 NOTE_SOURCE_FILE (insn
) = 0;
3863 /* Perform constant and copy propagation on INSN.
3864 The result is non-zero if a change was made. */
3867 cprop_insn (insn
, alter_jumps
)
3871 struct reg_use
*reg_used
;
3874 /* Only propagate into SETs. Note that a conditional jump is a
3875 SET with pc_rtx as the destination. */
3876 if ((GET_CODE (insn
) != INSN
3877 && GET_CODE (insn
) != JUMP_INSN
)
3878 || GET_CODE (PATTERN (insn
)) != SET
)
3882 find_used_regs (PATTERN (insn
));
3884 reg_used
= ®_use_table
[0];
3885 for ( ; reg_use_count
> 0; reg_used
++, reg_use_count
--)
3889 int regno
= REGNO (reg_used
->reg_rtx
);
3891 /* Ignore registers created by GCSE.
3892 We do this because ... */
3893 if (regno
>= max_gcse_regno
)
3896 /* If the register has already been set in this block, there's
3897 nothing we can do. */
3898 if (! oprs_not_set_p (reg_used
->reg_rtx
, insn
))
3901 /* Find an assignment that sets reg_used and is available
3902 at the start of the block. */
3903 set
= find_avail_set (regno
, insn
);
3908 /* ??? We might be able to handle PARALLELs. Later. */
3909 if (GET_CODE (pat
) != SET
)
3911 src
= SET_SRC (pat
);
3913 /* Constant propagation. */
3914 if (GET_CODE (src
) == CONST_INT
|| GET_CODE (src
) == CONST_DOUBLE
3915 || GET_CODE (src
) == SYMBOL_REF
)
3917 /* Handle normal insns first. */
3918 if (GET_CODE (insn
) == INSN
3919 && try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
3923 if (gcse_file
!= NULL
)
3925 fprintf (gcse_file
, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3926 regno
, INSN_UID (insn
));
3927 print_rtl (gcse_file
, src
);
3928 fprintf (gcse_file
, "\n");
3931 /* The original insn setting reg_used may or may not now be
3932 deletable. We leave the deletion to flow. */
3935 /* Try to propagate a CONST_INT into a conditional jump.
3936 We're pretty specific about what we will handle in this
3937 code, we can extend this as necessary over time.
3939 Right now the insn in question must look like
3940 (set (pc) (if_then_else ...)) */
3941 else if (alter_jumps
3942 && GET_CODE (insn
) == JUMP_INSN
3943 && condjump_p (insn
)
3944 && ! simplejump_p (insn
))
3945 changed
|= cprop_jump (insn
, copy_rtx (insn
), reg_used
, src
);
3947 /* Similar code for machines that use a pair of CC0 setter and
3948 conditional jump insn. */
3949 else if (alter_jumps
3950 && GET_CODE (PATTERN (insn
)) == SET
3951 && SET_DEST (PATTERN (insn
)) == cc0_rtx
3952 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3953 && condjump_p (NEXT_INSN (insn
))
3954 && ! simplejump_p (NEXT_INSN (insn
)))
3955 changed
|= cprop_cc0_jump (insn
, reg_used
, src
);
3958 else if (GET_CODE (src
) == REG
3959 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
3960 && REGNO (src
) != regno
)
3962 if (try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
3966 if (gcse_file
!= NULL
)
3968 fprintf (gcse_file
, "COPY-PROP: Replacing reg %d in insn %d with reg %d\n",
3969 regno
, INSN_UID (insn
), REGNO (src
));
3972 /* The original insn setting reg_used may or may not now be
3973 deletable. We leave the deletion to flow. */
3974 /* FIXME: If it turns out that the insn isn't deletable,
3975 then we may have unnecessarily extended register lifetimes
3976 and made things worse. */
3984 /* Forward propagate copies.
3985 This includes copies and constants.
3986 Return non-zero if a change was made. */
3995 /* Note we start at block 1. */
3998 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
4000 /* Reset tables used to keep track of what's still valid [since the
4001 start of the block]. */
4002 reset_opr_set_tables ();
4004 for (insn
= BLOCK_HEAD (bb
);
4005 insn
!= NULL
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
4006 insn
= NEXT_INSN (insn
))
4008 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4010 changed
|= cprop_insn (insn
, alter_jumps
);
4012 /* Keep track of everything modified by this insn. */
4013 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4014 call mark_oprs_set if we turned the insn into a NOTE. */
4015 if (GET_CODE (insn
) != NOTE
)
4016 mark_oprs_set (insn
);
4021 if (gcse_file
!= NULL
)
4022 fprintf (gcse_file
, "\n");
4027 /* Perform one copy/constant propagation pass.
4028 F is the first insn in the function.
4029 PASS is the pass count. */
4032 one_cprop_pass (pass
, alter_jumps
)
4038 const_prop_count
= 0;
4039 copy_prop_count
= 0;
4041 alloc_set_hash_table (max_cuid
);
4042 compute_set_hash_table ();
4044 dump_hash_table (gcse_file
, "SET", set_hash_table
, set_hash_table_size
,
4048 alloc_cprop_mem (n_basic_blocks
, n_sets
);
4049 compute_cprop_data ();
4050 changed
= cprop (alter_jumps
);
4053 free_set_hash_table ();
4057 fprintf (gcse_file
, "CPROP of %s, pass %d: %d bytes needed, %d const props, %d copy props\n",
4058 current_function_name
, pass
,
4059 bytes_used
, const_prop_count
, copy_prop_count
);
4060 fprintf (gcse_file
, "\n");
4066 /* Compute PRE+LCM working variables. */
4068 /* Local properties of expressions. */
4069 /* Nonzero for expressions that are transparent in the block. */
4070 static sbitmap
*transp
;
4072 /* Nonzero for expressions that are transparent at the end of the block.
4073 This is only zero for expressions killed by abnormal critical edge
4074 created by a calls. */
4075 static sbitmap
*transpout
;
4077 /* Nonzero for expressions that are computed (available) in the block. */
4078 static sbitmap
*comp
;
4080 /* Nonzero for expressions that are locally anticipatable in the block. */
4081 static sbitmap
*antloc
;
4083 /* Nonzero for expressions where this block is an optimal computation
4085 static sbitmap
*pre_optimal
;
4087 /* Nonzero for expressions which are redundant in a particular block. */
4088 static sbitmap
*pre_redundant
;
4090 /* Nonzero for expressions which should be inserted on a specific edge. */
4091 static sbitmap
*pre_insert_map
;
4093 /* Nonzero for expressions which should be deleted in a specific block. */
4094 static sbitmap
*pre_delete_map
;
4096 /* Contains the edge_list returned by pre_edge_lcm. */
4097 static struct edge_list
*edge_list
;
4099 static sbitmap
*temp_bitmap
;
4101 /* Redundant insns. */
4102 static sbitmap pre_redundant_insns
;
4104 /* Allocate vars used for PRE analysis. */
4107 alloc_pre_mem (n_blocks
, n_exprs
)
4108 int n_blocks
, n_exprs
;
4110 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4111 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4112 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4114 temp_bitmap
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4116 pre_redundant
= NULL
;
4117 pre_insert_map
= NULL
;
4118 pre_delete_map
= NULL
;
4122 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4123 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4124 /* pre_insert and pre_delete are allocated later. */
4127 /* Free vars used for PRE analysis. */
4139 free (pre_redundant
);
4141 free (pre_insert_map
);
4143 free (pre_delete_map
);
4156 transp
= comp
= antloc
= NULL
;
4157 pre_optimal
= pre_redundant
= pre_insert_map
= pre_delete_map
= NULL
;
4158 transpout
= ae_in
= ae_out
= ae_kill
= NULL
;
4163 /* Top level routine to do the dataflow analysis needed by PRE. */
4168 compute_local_properties (transp
, comp
, antloc
, 0);
4169 compute_transpout ();
4170 sbitmap_vector_zero (ae_kill
, n_basic_blocks
);
4171 compute_ae_kill (comp
, ae_kill
);
4172 edge_list
= pre_edge_lcm (gcse_file
, n_exprs
, transp
, comp
, antloc
,
4173 ae_kill
, &pre_insert_map
, &pre_delete_map
);
4179 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4182 VISITED is a pointer to a working buffer for tracking which BB's have
4183 been visited. It is NULL for the top-level call.
4185 CHECK_PRE_COMP controls whether or not we check for a computation of
4188 We treat reaching expressions that go through blocks containing the same
4189 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4190 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4191 2 as not reaching. The intent is to improve the probability of finding
4192 only one reaching expression and to reduce register lifetimes by picking
4193 the closest such expression. */
4196 pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, check_pre_comp
, visited
)
4205 for (pred
= BASIC_BLOCK (bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
4207 int pred_bb
= pred
->src
->index
;
4209 if (pred
->src
== ENTRY_BLOCK_PTR
4210 /* Has predecessor has already been visited? */
4211 || visited
[pred_bb
])
4213 /* Nothing to do. */
4215 /* Does this predecessor generate this expression? */
4216 else if ((!check_pre_comp
&& occr_bb
== pred_bb
)
4217 || TEST_BIT (comp
[pred_bb
], expr
->bitmap_index
))
4219 /* Is this the occurrence we're looking for?
4220 Note that there's only one generating occurrence per block
4221 so we just need to check the block number. */
4222 if (occr_bb
== pred_bb
)
4224 visited
[pred_bb
] = 1;
4226 /* Ignore this predecessor if it kills the expression. */
4227 else if (! TEST_BIT (transp
[pred_bb
], expr
->bitmap_index
))
4228 visited
[pred_bb
] = 1;
4229 /* Neither gen nor kill. */
4232 visited
[pred_bb
] = 1;
4233 if (pre_expr_reaches_here_p_work (occr_bb
, expr
, pred_bb
,
4234 check_pre_comp
, visited
))
4239 /* All paths have been checked. */
4243 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4244 memory allocated for that function is returned. */
4247 pre_expr_reaches_here_p (occr_bb
, expr
, bb
, check_pre_comp
)
4254 char * visited
= (char *) xcalloc (n_basic_blocks
, 1);
4256 rval
= pre_expr_reaches_here_p_work(occr_bb
, expr
, bb
, check_pre_comp
,
4265 /* Given an expr, generate RTL which we can insert at the end of a BB,
4266 or on an edge. Set the block number of any insns generated to
4270 process_insert_insn (expr
)
4273 rtx reg
= expr
->reaching_reg
;
4274 rtx pat
, copied_expr
;
4278 copied_expr
= copy_rtx (expr
->expr
);
4279 emit_move_insn (reg
, copied_expr
);
4280 first_new_insn
= get_insns ();
4281 pat
= gen_sequence ();
4287 /* Add EXPR to the end of basic block BB.
4289 This is used by both the PRE and code hoisting.
4291 For PRE, we want to verify that the expr is either transparent
4292 or locally anticipatable in the target block. This check makes
4293 no sense for code hoisting. */
4296 insert_insn_end_bb (expr
, bb
, pre
)
4301 rtx insn
= BLOCK_END (bb
);
4303 rtx reg
= expr
->reaching_reg
;
4304 int regno
= REGNO (reg
);
4307 pat
= process_insert_insn (expr
);
4309 /* If the last insn is a jump, insert EXPR in front [taking care to
4310 handle cc0, etc. properly]. */
4312 if (GET_CODE (insn
) == JUMP_INSN
)
4318 /* If this is a jump table, then we can't insert stuff here. Since
4319 we know the previous real insn must be the tablejump, we insert
4320 the new instruction just before the tablejump. */
4321 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4322 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
4323 insn
= prev_real_insn (insn
);
4326 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4327 if cc0 isn't set. */
4328 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
4330 insn
= XEXP (note
, 0);
4333 rtx maybe_cc0_setter
= prev_nonnote_insn (insn
);
4334 if (maybe_cc0_setter
4335 && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter
)) == 'i'
4336 && sets_cc0_p (PATTERN (maybe_cc0_setter
)))
4337 insn
= maybe_cc0_setter
;
4340 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4341 new_insn
= emit_insn_before (pat
, insn
);
4342 if (BLOCK_HEAD (bb
) == insn
)
4343 BLOCK_HEAD (bb
) = new_insn
;
4345 /* Likewise if the last insn is a call, as will happen in the presence
4346 of exception handling. */
4347 else if (GET_CODE (insn
) == CALL_INSN
)
4349 HARD_REG_SET parm_regs
;
4353 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4354 we search backward and place the instructions before the first
4355 parameter is loaded. Do this for everyone for consistency and a
4356 presumtion that we'll get better code elsewhere as well. */
4358 /* It should always be the case that we can put these instructions
4359 anywhere in the basic block with performing PRE optimizations.
4362 && !TEST_BIT (antloc
[bb
], expr
->bitmap_index
)
4363 && !TEST_BIT (transp
[bb
], expr
->bitmap_index
))
4366 /* Since different machines initialize their parameter registers
4367 in different orders, assume nothing. Collect the set of all
4368 parameter registers. */
4369 CLEAR_HARD_REG_SET (parm_regs
);
4371 for (p
= CALL_INSN_FUNCTION_USAGE (insn
); p
; p
= XEXP (p
, 1))
4372 if (GET_CODE (XEXP (p
, 0)) == USE
4373 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
4375 int regno
= REGNO (XEXP (XEXP (p
, 0), 0));
4376 if (regno
>= FIRST_PSEUDO_REGISTER
)
4378 SET_HARD_REG_BIT (parm_regs
, regno
);
4382 /* Search backward for the first set of a register in this set. */
4383 while (nparm_regs
&& BLOCK_HEAD (bb
) != insn
)
4385 insn
= PREV_INSN (insn
);
4386 p
= single_set (insn
);
4387 if (p
&& GET_CODE (SET_DEST (p
)) == REG
4388 && REGNO (SET_DEST (p
)) < FIRST_PSEUDO_REGISTER
4389 && TEST_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
))))
4391 CLEAR_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
)));
4396 /* If we found all the parameter loads, then we want to insert
4397 before the first parameter load.
4399 If we did not find all the parameter loads, then we might have
4400 stopped on the head of the block, which could be a CODE_LABEL.
4401 If we inserted before the CODE_LABEL, then we would be putting
4402 the insn in the wrong basic block. In that case, put the insn
4403 after the CODE_LABEL.
4405 ?!? Do we need to account for NOTE_INSN_BASIC_BLOCK here? */
4406 if (GET_CODE (insn
) != CODE_LABEL
)
4408 new_insn
= emit_insn_before (pat
, insn
);
4409 if (BLOCK_HEAD (bb
) == insn
)
4410 BLOCK_HEAD (bb
) = new_insn
;
4414 new_insn
= emit_insn_after (pat
, insn
);
4419 new_insn
= emit_insn_after (pat
, insn
);
4420 BLOCK_END (bb
) = new_insn
;
4423 /* Keep block number table up to date.
4424 Note, PAT could be a multiple insn sequence, we have to make
4425 sure that each insn in the sequence is handled. */
4426 if (GET_CODE (pat
) == SEQUENCE
)
4430 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4432 rtx insn
= XVECEXP (pat
, 0, i
);
4433 set_block_num (insn
, bb
);
4434 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4435 add_label_notes (PATTERN (insn
), new_insn
);
4436 record_set_insn
= insn
;
4437 note_stores (PATTERN (insn
), record_set_info
);
4442 add_label_notes (SET_SRC (pat
), new_insn
);
4443 set_block_num (new_insn
, bb
);
4444 /* Keep register set table up to date. */
4445 record_one_set (regno
, new_insn
);
4448 gcse_create_count
++;
4452 fprintf (gcse_file
, "PRE/HOIST: end of bb %d, insn %d, copying expression %d to reg %d\n",
4453 bb
, INSN_UID (new_insn
), expr
->bitmap_index
, regno
);
4457 /* Insert partially redundant expressions on edges in the CFG to make
4458 the expressions fully redundant. */
4461 pre_edge_insert (edge_list
, index_map
)
4462 struct edge_list
*edge_list
;
4463 struct expr
**index_map
;
4465 int e
, i
, num_edges
, set_size
, did_insert
= 0;
4468 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4469 if it reaches any of the deleted expressions. */
4471 set_size
= pre_insert_map
[0]->size
;
4472 num_edges
= NUM_EDGES (edge_list
);
4473 inserted
= sbitmap_vector_alloc (num_edges
, n_exprs
);
4474 sbitmap_vector_zero (inserted
, num_edges
);
4476 for (e
= 0; e
< num_edges
; e
++)
4479 basic_block pred
= INDEX_EDGE_PRED_BB (edge_list
, e
);
4480 int bb
= pred
->index
;
4482 for (i
= indx
= 0; i
< set_size
; i
++, indx
+= SBITMAP_ELT_BITS
)
4484 SBITMAP_ELT_TYPE insert
= pre_insert_map
[e
]->elms
[i
];
4487 for (j
= indx
; insert
&& j
< n_exprs
; j
++, insert
>>= 1)
4489 if ((insert
& 1) != 0 && index_map
[j
]->reaching_reg
!= NULL_RTX
)
4491 struct expr
*expr
= index_map
[j
];
4494 /* Now look at each deleted occurence of this expression. */
4495 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4497 if (! occr
->deleted_p
)
4500 /* Insert this expression on this edge if if it would
4501 reach the deleted occurence in BB. */
4502 if (!TEST_BIT (inserted
[e
], j
)
4503 && (bb
== ENTRY_BLOCK
4504 || pre_expr_reaches_here_p (bb
, expr
,
4505 BLOCK_NUM (occr
->insn
), 0)))
4508 edge eg
= INDEX_EDGE (edge_list
, e
);
4509 /* We can't insert anything on an abnormal
4510 and critical edge, so we insert the
4511 insn at the end of the previous block. There
4512 are several alternatives detailed in
4513 Morgans book P277 (sec 10.5) for handling
4514 this situation. This one is easiest for now. */
4516 if ((eg
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
4518 insert_insn_end_bb (index_map
[j
], bb
, 0);
4522 insn
= process_insert_insn (index_map
[j
]);
4523 insert_insn_on_edge (insn
, eg
);
4528 "PRE/HOIST: edge (%d,%d), copy expression %d\n",
4530 INDEX_EDGE_SUCC_BB (edge_list
, e
)->index
, expr
->bitmap_index
);
4532 SET_BIT (inserted
[e
], j
);
4534 gcse_create_count
++;
4544 /* Copy the result of INSN to REG.
4545 INDX is the expression number. */
4548 pre_insert_copy_insn (expr
, insn
)
4552 rtx reg
= expr
->reaching_reg
;
4553 int regno
= REGNO (reg
);
4554 int indx
= expr
->bitmap_index
;
4555 rtx set
= single_set (insn
);
4557 int bb
= BLOCK_NUM (insn
);
4561 new_insn
= emit_insn_after (gen_rtx_SET (VOIDmode
, reg
, SET_DEST (set
)),
4563 /* Keep block number table up to date. */
4564 set_block_num (new_insn
, bb
);
4565 /* Keep register set table up to date. */
4566 record_one_set (regno
, new_insn
);
4567 if (insn
== BLOCK_END (bb
))
4568 BLOCK_END (bb
) = new_insn
;
4570 gcse_create_count
++;
4574 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4575 BLOCK_NUM (insn
), INSN_UID (new_insn
), indx
,
4576 INSN_UID (insn
), regno
);
4579 /* Copy available expressions that reach the redundant expression
4580 to `reaching_reg'. */
4583 pre_insert_copies ()
4587 /* For each available expression in the table, copy the result to
4588 `reaching_reg' if the expression reaches a deleted one.
4590 ??? The current algorithm is rather brute force.
4591 Need to do some profiling. */
4593 for (i
= 0; i
< expr_hash_table_size
; i
++)
4597 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4601 /* If the basic block isn't reachable, PPOUT will be TRUE.
4602 However, we don't want to insert a copy here because the
4603 expression may not really be redundant. So only insert
4604 an insn if the expression was deleted.
4605 This test also avoids further processing if the expression
4606 wasn't deleted anywhere. */
4607 if (expr
->reaching_reg
== NULL
)
4610 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4614 if (! occr
->deleted_p
)
4617 for (avail
= expr
->avail_occr
; avail
!= NULL
; avail
= avail
->next
)
4619 rtx insn
= avail
->insn
;
4621 /* No need to handle this one if handled already. */
4622 if (avail
->copied_p
)
4624 /* Don't handle this one if it's a redundant one. */
4625 if (TEST_BIT (pre_redundant_insns
, INSN_CUID (insn
)))
4627 /* Or if the expression doesn't reach the deleted one. */
4628 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail
->insn
), expr
,
4629 BLOCK_NUM (occr
->insn
),1))
4632 /* Copy the result of avail to reaching_reg. */
4633 pre_insert_copy_insn (expr
, insn
);
4634 avail
->copied_p
= 1;
4641 /* Delete redundant computations.
4642 Deletion is done by changing the insn to copy the `reaching_reg' of
4643 the expression into the result of the SET. It is left to later passes
4644 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4646 Returns non-zero if a change is made. */
4653 /* Compute the expressions which are redundant and need to be replaced by
4654 copies from the reaching reg to the target reg. */
4655 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
4656 sbitmap_copy (temp_bitmap
[bb
], pre_delete_map
[bb
]);
4659 for (i
= 0; i
< expr_hash_table_size
; i
++)
4663 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4666 int indx
= expr
->bitmap_index
;
4668 /* We only need to search antic_occr since we require
4671 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4673 rtx insn
= occr
->insn
;
4675 int bb
= BLOCK_NUM (insn
);
4677 if (TEST_BIT (temp_bitmap
[bb
], indx
))
4679 set
= single_set (insn
);
4683 /* Create a pseudo-reg to store the result of reaching
4684 expressions into. Get the mode for the new pseudo
4685 from the mode of the original destination pseudo. */
4686 if (expr
->reaching_reg
== NULL
)
4688 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
4690 /* In theory this should never fail since we're creating
4693 However, on the x86 some of the movXX patterns actually
4694 contain clobbers of scratch regs. This may cause the
4695 insn created by validate_change to not match any pattern
4696 and thus cause validate_change to fail. */
4697 if (validate_change (insn
, &SET_SRC (set
),
4698 expr
->reaching_reg
, 0))
4700 occr
->deleted_p
= 1;
4701 SET_BIT (pre_redundant_insns
, INSN_CUID (insn
));
4709 "PRE: redundant insn %d (expression %d) in bb %d, reaching reg is %d\n",
4710 INSN_UID (insn
), indx
, bb
, REGNO (expr
->reaching_reg
));
4720 /* Perform GCSE optimizations using PRE.
4721 This is called by one_pre_gcse_pass after all the dataflow analysis
4724 This is based on the original Morel-Renvoise paper Fred Chow's thesis,
4725 and lazy code motion from Knoop, Ruthing and Steffen as described in
4726 Advanced Compiler Design and Implementation.
4728 ??? A new pseudo reg is created to hold the reaching expression.
4729 The nice thing about the classical approach is that it would try to
4730 use an existing reg. If the register can't be adequately optimized
4731 [i.e. we introduce reload problems], one could add a pass here to
4732 propagate the new register through the block.
4734 ??? We don't handle single sets in PARALLELs because we're [currently]
4735 not able to copy the rest of the parallel when we insert copies to create
4736 full redundancies from partial redundancies. However, there's no reason
4737 why we can't handle PARALLELs in the cases where there are no partial
4745 struct expr
**index_map
;
4747 /* Compute a mapping from expression number (`bitmap_index') to
4748 hash table entry. */
4750 index_map
= xcalloc (n_exprs
, sizeof (struct expr
*));
4751 for (i
= 0; i
< expr_hash_table_size
; i
++)
4755 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4756 index_map
[expr
->bitmap_index
] = expr
;
4759 /* Reset bitmap used to track which insns are redundant. */
4760 pre_redundant_insns
= sbitmap_alloc (max_cuid
);
4761 sbitmap_zero (pre_redundant_insns
);
4763 /* Delete the redundant insns first so that
4764 - we know what register to use for the new insns and for the other
4765 ones with reaching expressions
4766 - we know which insns are redundant when we go to create copies */
4767 changed
= pre_delete ();
4769 did_insert
= pre_edge_insert (edge_list
, index_map
);
4770 /* In other places with reaching expressions, copy the expression to the
4771 specially allocated pseudo-reg that reaches the redundant expr. */
4772 pre_insert_copies ();
4775 commit_edge_insertions ();
4780 free (pre_redundant_insns
);
4785 /* Top level routine to perform one PRE GCSE pass.
4787 Return non-zero if a change was made. */
4790 one_pre_gcse_pass (pass
)
4795 gcse_subst_count
= 0;
4796 gcse_create_count
= 0;
4798 alloc_expr_hash_table (max_cuid
);
4799 add_noreturn_fake_exit_edges ();
4800 compute_expr_hash_table ();
4802 dump_hash_table (gcse_file
, "Expression", expr_hash_table
,
4803 expr_hash_table_size
, n_exprs
);
4806 alloc_pre_mem (n_basic_blocks
, n_exprs
);
4807 compute_pre_data ();
4808 changed
|= pre_gcse ();
4809 free_edge_list (edge_list
);
4812 remove_fake_edges ();
4813 free_expr_hash_table ();
4817 fprintf (gcse_file
, "\n");
4818 fprintf (gcse_file
, "PRE GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
4819 current_function_name
, pass
,
4820 bytes_used
, gcse_subst_count
, gcse_create_count
);
4826 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4827 We have to add REG_LABEL notes, because the following loop optimization
4828 pass requires them. */
4830 /* ??? This is very similar to the loop.c add_label_notes function. We
4831 could probably share code here. */
4833 /* ??? If there was a jump optimization pass after gcse and before loop,
4834 then we would not need to do this here, because jump would add the
4835 necessary REG_LABEL notes. */
4838 add_label_notes (x
, insn
)
4842 enum rtx_code code
= GET_CODE (x
);
4846 if (code
== LABEL_REF
&& !LABEL_REF_NONLOCAL_P (x
))
4848 /* This code used to ignore labels that referred to dispatch tables to
4849 avoid flow generating (slighly) worse code.
4851 We no longer ignore such label references (see LABEL_REF handling in
4852 mark_jump_label for additional information). */
4853 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_LABEL
, XEXP (x
, 0),
4858 fmt
= GET_RTX_FORMAT (code
);
4859 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
4862 add_label_notes (XEXP (x
, i
), insn
);
4863 else if (fmt
[i
] == 'E')
4864 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4865 add_label_notes (XVECEXP (x
, i
, j
), insn
);
4869 /* Compute transparent outgoing information for each block.
4871 An expression is transparent to an edge unless it is killed by
4872 the edge itself. This can only happen with abnormal control flow,
4873 when the edge is traversed through a call. This happens with
4874 non-local labels and exceptions.
4876 This would not be necessary if we split the edge. While this is
4877 normally impossible for abnormal critical edges, with some effort
4878 it should be possible with exception handling, since we still have
4879 control over which handler should be invoked. But due to increased
4880 EH table sizes, this may not be worthwhile. */
4883 compute_transpout ()
4887 sbitmap_vector_ones (transpout
, n_basic_blocks
);
4889 for (bb
= 0; bb
< n_basic_blocks
; ++bb
)
4893 /* Note that flow inserted a nop a the end of basic blocks that
4894 end in call instructions for reasons other than abnormal
4896 if (GET_CODE (BLOCK_END (bb
)) != CALL_INSN
)
4899 for (i
= 0; i
< expr_hash_table_size
; i
++)
4902 for (expr
= expr_hash_table
[i
]; expr
; expr
= expr
->next_same_hash
)
4903 if (GET_CODE (expr
->expr
) == MEM
)
4905 rtx addr
= XEXP (expr
->expr
, 0);
4907 if (GET_CODE (addr
) == SYMBOL_REF
4908 && CONSTANT_POOL_ADDRESS_P (addr
))
4911 /* ??? Optimally, we would use interprocedural alias
4912 analysis to determine if this mem is actually killed
4914 RESET_BIT (transpout
[bb
], expr
->bitmap_index
);
4920 /* Removal of useless null pointer checks */
4922 /* These need to be file static for communication between
4923 invalidate_nonnull_info and delete_null_pointer_checks. */
4924 static int current_block
;
4925 static sbitmap
*nonnull_local
;
4926 static sbitmap
*nonnull_killed
;
4928 /* Called via note_stores. X is set by SETTER. If X is a register we must
4929 invalidate nonnull_local and set nonnull_killed.
4931 We ignore hard registers. */
4933 invalidate_nonnull_info (x
, setter
)
4935 rtx setter ATTRIBUTE_UNUSED
;
4940 while (GET_CODE (x
) == SUBREG
)
4943 /* Ignore anything that is not a register or is a hard register. */
4944 if (GET_CODE (x
) != REG
4945 || REGNO (x
) < FIRST_PSEUDO_REGISTER
)
4950 RESET_BIT (nonnull_local
[current_block
], regno
);
4951 SET_BIT (nonnull_killed
[current_block
], regno
);
4955 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
4958 This is conceptually similar to global constant/copy propagation and
4959 classic global CSE (it even uses the same dataflow equations as cprop).
4961 If a register is used as memory address with the form (mem (reg)), then we
4962 know that REG can not be zero at that point in the program. Any instruction
4963 which sets REG "kills" this property.
4965 So, if every path leading to a conditional branch has an available memory
4966 reference of that form, then we know the register can not have the value
4967 zero at the conditional branch.
4969 So we merely need to compute the local properies and propagate that data
4970 around the cfg, then optimize where possible.
4972 We run this pass two times. Once before CSE, then again after CSE. This
4973 has proven to be the most profitable approach. It is rare for new
4974 optimization opportunities of this nature to appear after the first CSE
4977 This could probably be integrated with global cprop with a little work. */
4980 delete_null_pointer_checks (f
)
4983 int_list_ptr
*s_preds
, *s_succs
;
4984 int *num_preds
, *num_succs
;
4986 sbitmap
*nonnull_avin
, *nonnull_avout
;
4988 /* First break the program into basic blocks. */
4989 find_basic_blocks (f
, max_reg_num (), NULL
, 1);
4991 /* If we have only a single block, then there's nothing to do. */
4992 if (n_basic_blocks
<= 1)
4994 /* Free storage allocated by find_basic_blocks. */
4995 free_basic_block_vars (0);
4999 /* Trying to perform global optimizations on flow graphs which have
5000 a high connectivity will take a long time and is unlikely to be
5001 particularly useful.
5003 In normal circumstances a cfg should have about twice has many edges
5004 as blocks. But we do not want to punish small functions which have
5005 a couple switch statements. So we require a relatively large number
5006 of basic blocks and the ratio of edges to blocks to be high. */
5007 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
5009 /* Free storage allocated by find_basic_blocks. */
5010 free_basic_block_vars (0);
5014 /* We need predecessor/successor lists as well as pred/succ counts for
5015 each basic block. */
5016 s_preds
= (int_list_ptr
*) gmalloc (n_basic_blocks
* sizeof (int_list_ptr
));
5017 s_succs
= (int_list_ptr
*) gmalloc (n_basic_blocks
* sizeof (int_list_ptr
));
5018 num_preds
= (int *) gmalloc (n_basic_blocks
* sizeof (int));
5019 num_succs
= (int *) gmalloc (n_basic_blocks
* sizeof (int));
5020 compute_preds_succs (s_preds
, s_succs
, num_preds
, num_succs
);
5022 /* Allocate bitmaps to hold local and global properties. */
5023 nonnull_local
= sbitmap_vector_alloc (n_basic_blocks
, max_reg_num ());
5024 nonnull_killed
= sbitmap_vector_alloc (n_basic_blocks
, max_reg_num ());
5025 nonnull_avin
= sbitmap_vector_alloc (n_basic_blocks
, max_reg_num ());
5026 nonnull_avout
= sbitmap_vector_alloc (n_basic_blocks
, max_reg_num ());
5028 /* Compute local properties, nonnull and killed. A register will have
5029 the nonnull property if at the end of the current block its value is
5030 known to be nonnull. The killed property indicates that somewhere in
5031 the block any information we had about the register is killed.
5033 Note that a register can have both properties in a single block. That
5034 indicates that it's killed, then later in the block a new value is
5036 sbitmap_vector_zero (nonnull_local
, n_basic_blocks
);
5037 sbitmap_vector_zero (nonnull_killed
, n_basic_blocks
);
5038 for (current_block
= 0; current_block
< n_basic_blocks
; current_block
++)
5040 rtx insn
, stop_insn
;
5042 /* Scan each insn in the basic block looking for memory references and
5044 stop_insn
= NEXT_INSN (BLOCK_END (current_block
));
5045 for (insn
= BLOCK_HEAD (current_block
);
5047 insn
= NEXT_INSN (insn
))
5051 /* Ignore anything that is not a normal insn. */
5052 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
5055 /* Basically ignore anything that is not a simple SET. We do have
5056 to make sure to invalidate nonnull_local and set nonnull_killed
5057 for such insns though. */
5058 set
= single_set (insn
);
5061 note_stores (PATTERN (insn
), invalidate_nonnull_info
);
5065 /* See if we've got a useable memory load. We handle it first
5066 in case it uses its address register as a dest (which kills
5067 the nonnull property). */
5068 if (GET_CODE (SET_SRC (set
)) == MEM
5069 && GET_CODE (XEXP (SET_SRC (set
), 0)) == REG
5070 && REGNO (XEXP (SET_SRC (set
), 0)) >= FIRST_PSEUDO_REGISTER
)
5071 SET_BIT (nonnull_local
[current_block
],
5072 REGNO (XEXP (SET_SRC (set
), 0)));
5074 /* Now invalidate stuff clobbered by this insn. */
5075 note_stores (PATTERN (insn
), invalidate_nonnull_info
);
5077 /* And handle stores, we do these last since any sets in INSN can
5078 not kill the nonnull property if it is derived from a MEM
5079 appearing in a SET_DEST. */
5080 if (GET_CODE (SET_DEST (set
)) == MEM
5081 && GET_CODE (XEXP (SET_DEST (set
), 0)) == REG
)
5082 SET_BIT (nonnull_local
[current_block
],
5083 REGNO (XEXP (SET_DEST (set
), 0)));
5087 /* Now compute global properties based on the local properties. This
5088 is a classic global availablity algorithm. */
5089 sbitmap_zero (nonnull_avin
[0]);
5090 sbitmap_vector_ones (nonnull_avout
, n_basic_blocks
);
5096 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5099 sbitmap_intersect_of_predecessors (nonnull_avin
[bb
],
5100 nonnull_avout
, bb
, s_preds
);
5102 changed
|= sbitmap_union_of_diff (nonnull_avout
[bb
],
5105 nonnull_killed
[bb
]);
5109 /* Now look at each bb and see if it ends with a compare of a value
5111 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5113 rtx last_insn
= BLOCK_END (bb
);
5114 rtx condition
, earliest
, reg
;
5115 int compare_and_branch
;
5117 /* We only want conditional branches. */
5118 if (GET_CODE (last_insn
) != JUMP_INSN
5119 || !condjump_p (last_insn
)
5120 || simplejump_p (last_insn
))
5123 /* LAST_INSN is a conditional jump. Get its condition. */
5124 condition
= get_condition (last_insn
, &earliest
);
5126 /* If we were unable to get the condition, or it is not a equality
5127 comparison against zero then there's nothing we can do. */
5129 || (GET_CODE (condition
) != NE
&& GET_CODE (condition
) != EQ
)
5130 || GET_CODE (XEXP (condition
, 1)) != CONST_INT
5131 || XEXP (condition
, 1) != CONST0_RTX (GET_MODE (XEXP (condition
, 0))))
5134 /* We must be checking a register against zero. */
5135 reg
= XEXP (condition
, 0);
5136 if (GET_CODE (reg
) != REG
)
5139 /* Is the register known to have a nonzero value? */
5140 if (!TEST_BIT (nonnull_avout
[bb
], REGNO (reg
)))
5143 /* Try to compute whether the compare/branch at the loop end is one or
5144 two instructions. */
5145 if (earliest
== last_insn
)
5146 compare_and_branch
= 1;
5147 else if (earliest
== prev_nonnote_insn (last_insn
))
5148 compare_and_branch
= 2;
5152 /* We know the register in this comparison is nonnull at exit from
5153 this block. We can optimize this comparison. */
5154 if (GET_CODE (condition
) == NE
)
5158 new_jump
= emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn
)),
5160 JUMP_LABEL (new_jump
) = JUMP_LABEL (last_insn
);
5161 LABEL_NUSES (JUMP_LABEL (new_jump
))++;
5162 emit_barrier_after (new_jump
);
5164 delete_insn (last_insn
);
5165 if (compare_and_branch
== 2)
5166 delete_insn (earliest
);
5169 /* Free storage allocated by find_basic_blocks. */
5170 free_basic_block_vars (0);
5172 /* Free our local predecessor/successor lists. */
5179 free (nonnull_local
);
5180 free (nonnull_killed
);
5181 free (nonnull_avin
);
5182 free (nonnull_avout
);
5185 /* Code Hoisting variables and subroutines. */
5187 /* Very busy expressions. */
5188 static sbitmap
*hoist_vbein
;
5189 static sbitmap
*hoist_vbeout
;
5191 /* Hoistable expressions. */
5192 static sbitmap
*hoist_exprs
;
5194 /* Dominator bitmaps. */
5195 static sbitmap
*dominators
;
5196 static sbitmap
*post_dominators
;
5198 /* ??? We could compute post dominators and run this algorithm in
5199 reverse to to perform tail merging, doing so would probably be
5200 more effective than the tail merging code in jump.c.
5202 It's unclear if tail merging could be run in parallel with
5203 code hoisting. It would be nice. */
5205 /* Allocate vars used for code hoisting analysis. */
5208 alloc_code_hoist_mem (n_blocks
, n_exprs
)
5209 int n_blocks
, n_exprs
;
5211 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5212 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5213 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5215 hoist_vbein
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5216 hoist_vbeout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5217 hoist_exprs
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5218 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5220 dominators
= sbitmap_vector_alloc (n_blocks
, n_blocks
);
5221 post_dominators
= sbitmap_vector_alloc (n_blocks
, n_blocks
);
5224 /* Free vars used for code hoisting analysis. */
5227 free_code_hoist_mem ()
5234 free (hoist_vbeout
);
5239 free (post_dominators
);
5242 /* Compute the very busy expressions at entry/exit from each block.
5244 An expression is very busy if all paths from a given point
5245 compute the expression. */
5248 compute_code_hoist_vbeinout ()
5250 int bb
, changed
, passes
;
5252 sbitmap_vector_zero (hoist_vbeout
, n_basic_blocks
);
5253 sbitmap_vector_zero (hoist_vbein
, n_basic_blocks
);
5260 /* We scan the blocks in the reverse order to speed up
5262 for (bb
= n_basic_blocks
- 1; bb
>= 0; bb
--)
5264 changed
|= sbitmap_a_or_b_and_c (hoist_vbein
[bb
], antloc
[bb
],
5265 hoist_vbeout
[bb
], transp
[bb
]);
5266 if (bb
!= n_basic_blocks
- 1)
5267 sbitmap_intersection_of_succs (hoist_vbeout
[bb
], hoist_vbein
, bb
);
5273 fprintf (gcse_file
, "hoisting vbeinout computation: %d passes\n", passes
);
5276 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5279 compute_code_hoist_data ()
5281 compute_local_properties (transp
, comp
, antloc
, 0);
5282 compute_transpout ();
5283 compute_code_hoist_vbeinout ();
5284 compute_flow_dominators (dominators
, post_dominators
);
5286 fprintf (gcse_file
, "\n");
5289 /* Determine if the expression identified by EXPR_INDEX would
5290 reach BB unimpared if it was placed at the end of EXPR_BB.
5292 It's unclear exactly what Muchnick meant by "unimpared". It seems
5293 to me that the expression must either be computed or transparent in
5294 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5295 would allow the expression to be hoisted out of loops, even if
5296 the expression wasn't a loop invariant.
5298 Contrast this to reachability for PRE where an expression is
5299 considered reachable if *any* path reaches instead of *all*
5303 hoist_expr_reaches_here_p (expr_bb
, expr_index
, bb
, visited
)
5310 int visited_allocated_locally
= 0;
5313 if (visited
== NULL
)
5315 visited_allocated_locally
= 1;
5316 visited
= xcalloc (n_basic_blocks
, 1);
5319 visited
[expr_bb
] = 1;
5320 for (pred
= BASIC_BLOCK (bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
5322 int pred_bb
= pred
->src
->index
;
5324 if (pred
->src
== ENTRY_BLOCK_PTR
)
5326 else if (visited
[pred_bb
])
5328 /* Does this predecessor generate this expression? */
5329 else if (TEST_BIT (comp
[pred_bb
], expr_index
))
5331 else if (! TEST_BIT (transp
[pred_bb
], expr_index
))
5336 visited
[pred_bb
] = 1;
5337 if (! hoist_expr_reaches_here_p (expr_bb
, expr_index
,
5342 if (visited_allocated_locally
)
5344 return (pred
== NULL
);
5347 /* Actually perform code hoisting. */
5351 int bb
, dominated
, i
;
5352 struct expr
**index_map
;
5354 sbitmap_vector_zero (hoist_exprs
, n_basic_blocks
);
5356 /* Compute a mapping from expression number (`bitmap_index') to
5357 hash table entry. */
5359 index_map
= xcalloc (n_exprs
, sizeof (struct expr
*));
5360 for (i
= 0; i
< expr_hash_table_size
; i
++)
5364 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5365 index_map
[expr
->bitmap_index
] = expr
;
5368 /* Walk over each basic block looking for potentially hoistable
5369 expressions, nothing gets hoisted from the entry block. */
5370 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5373 int insn_inserted_p
;
5375 /* Examine each expression that is very busy at the exit of this
5376 block. These are the potentially hoistable expressions. */
5377 for (i
= 0; i
< hoist_vbeout
[bb
]->n_bits
; i
++)
5380 if (TEST_BIT (hoist_vbeout
[bb
], i
)
5381 && TEST_BIT (transpout
[bb
], i
))
5383 /* We've found a potentially hoistable expression, now
5384 we look at every block BB dominates to see if it
5385 computes the expression. */
5386 for (dominated
= 0; dominated
< n_basic_blocks
; dominated
++)
5388 /* Ignore self dominance. */
5390 || ! TEST_BIT (dominators
[dominated
], bb
))
5393 /* We've found a dominated block, now see if it computes
5394 the busy expression and whether or not moving that
5395 expression to the "beginning" of that block is safe. */
5396 if (!TEST_BIT (antloc
[dominated
], i
))
5399 /* Note if the expression would reach the dominated block
5400 unimpared if it was placed at the end of BB.
5402 Keep track of how many times this expression is hoistable
5403 from a dominated block into BB. */
5404 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
5408 /* If we found more than one hoistable occurence of this
5409 expression, then note it in the bitmap of expressions to
5410 hoist. It makes no sense to hoist things which are computed
5411 in only one BB, and doing so tends to pessimize register
5412 allocation. One could increase this value to try harder
5413 to avoid any possible code expansion due to register
5414 allocation issues; however experiments have shown that
5415 the vast majority of hoistable expressions are only movable
5416 from two successors, so raising this threshhold is likely
5417 to nullify any benefit we get from code hoisting. */
5420 SET_BIT (hoist_exprs
[bb
], i
);
5426 /* If we found nothing to hoist, then quit now. */
5430 /* Loop over all the hoistable expressions. */
5431 for (i
= 0; i
< hoist_exprs
[bb
]->n_bits
; i
++)
5433 /* We want to insert the expression into BB only once, so
5434 note when we've inserted it. */
5435 insn_inserted_p
= 0;
5437 /* These tests should be the same as the tests above. */
5438 if (TEST_BIT (hoist_vbeout
[bb
], i
))
5440 /* We've found a potentially hoistable expression, now
5441 we look at every block BB dominates to see if it
5442 computes the expression. */
5443 for (dominated
= 0; dominated
< n_basic_blocks
; dominated
++)
5445 /* Ignore self dominance. */
5447 || ! TEST_BIT (dominators
[dominated
], bb
))
5450 /* We've found a dominated block, now see if it computes
5451 the busy expression and whether or not moving that
5452 expression to the "beginning" of that block is safe. */
5453 if (!TEST_BIT (antloc
[dominated
], i
))
5456 /* The expression is computed in the dominated block and
5457 it would be safe to compute it at the start of the
5458 dominated block. Now we have to determine if the
5459 expresion would reach the dominated block if it was
5460 placed at the end of BB. */
5461 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
5463 struct expr
*expr
= index_map
[i
];
5464 struct occr
*occr
= expr
->antic_occr
;
5469 /* Find the right occurence of this expression. */
5470 while (BLOCK_NUM (occr
->insn
) != dominated
&& occr
)
5473 /* Should never happen. */
5479 set
= single_set (insn
);
5483 /* Create a pseudo-reg to store the result of reaching
5484 expressions into. Get the mode for the new pseudo
5485 from the mode of the original destination pseudo. */
5486 if (expr
->reaching_reg
== NULL
)
5488 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
5490 /* In theory this should never fail since we're creating
5493 However, on the x86 some of the movXX patterns actually
5494 contain clobbers of scratch regs. This may cause the
5495 insn created by validate_change to not match any
5496 pattern and thus cause validate_change to fail. */
5497 if (validate_change (insn
, &SET_SRC (set
),
5498 expr
->reaching_reg
, 0))
5500 occr
->deleted_p
= 1;
5501 if (!insn_inserted_p
)
5503 insert_insn_end_bb (index_map
[i
], bb
, 0);
5504 insn_inserted_p
= 1;
5515 /* Top level routine to perform one code hoisting (aka unification) pass
5517 Return non-zero if a change was made. */
5520 one_code_hoisting_pass ()
5524 alloc_expr_hash_table (max_cuid
);
5525 compute_expr_hash_table ();
5527 dump_hash_table (gcse_file
, "Code Hosting Expressions", expr_hash_table
,
5528 expr_hash_table_size
, n_exprs
);
5531 alloc_code_hoist_mem (n_basic_blocks
, n_exprs
);
5532 compute_code_hoist_data ();
5534 free_code_hoist_mem ();
5536 free_expr_hash_table ();