1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
43 Global Optimization by Suppression of Partial Redundancies
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
115 Rice University Ph.D. thesis, Apr. 1996
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
125 Advanced Compiler Design and Implementation
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
153 #include "hard-reg-set.h"
156 #include "insn-config.h"
158 #include "basic-block.h"
160 #include "function.h"
164 #define obstack_chunk_alloc gmalloc
165 #define obstack_chunk_free free
167 /* Maximum number of passes to perform. */
170 /* Propagate flow information through back edges and thus enable PRE's
171 moving loop invariant calculations out of loops.
173 Originally this tended to create worse overall code, but several
174 improvements during the development of PRE seem to have made following
175 back edges generally a win.
177 Note much of the loop invariant code motion done here would normally
178 be done by loop.c, which has more heuristics for when to move invariants
179 out of loops. At some point we might need to move some of those
180 heuristics into gcse.c. */
181 #define FOLLOW_BACK_EDGES 1
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
281 static FILE *gcse_file
;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse
;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr
;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack
;
302 /* Non-zero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p
[(int) NUM_MACHINE_MODES
];
307 /* Non-zero if can_copy_p has been initialized. */
308 static int can_copy_init_p
;
310 struct reg_use
{rtx reg_rtx
; };
312 /* Hash table of expressions. */
316 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
318 /* Index in the available expression bitmaps. */
320 /* Next entry with the same hash. */
321 struct expr
*next_same_hash
;
322 /* List of anticipatable occurrences in basic blocks in the function.
323 An "anticipatable occurrence" is one that is the first occurrence in the
324 basic block, the operands are not modified in the basic block prior
325 to the occurrence and the output is not used between the start of
326 the block and the occurrence. */
327 struct occr
*antic_occr
;
328 /* List of available occurrence in basic blocks in the function.
329 An "available occurrence" is one that is the last occurrence in the
330 basic block and the operands are not modified by following statements in
331 the basic block [including this insn]. */
332 struct occr
*avail_occr
;
333 /* Non-null if the computation is PRE redundant.
334 The value is the newly created pseudo-reg to record a copy of the
335 expression in all the places that reach the redundant copy. */
339 /* Occurrence of an expression.
340 There is one per basic block. If a pattern appears more than once the
341 last appearance is used [or first for anticipatable expressions]. */
345 /* Next occurrence of this expression. */
347 /* The insn that computes the expression. */
349 /* Non-zero if this [anticipatable] occurrence has been deleted. */
351 /* Non-zero if this [available] occurrence has been copied to
353 /* ??? This is mutually exclusive with deleted_p, so they could share
358 /* Expression and copy propagation hash tables.
359 Each hash table is an array of buckets.
360 ??? It is known that if it were an array of entries, structure elements
361 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
362 not clear whether in the final analysis a sufficient amount of memory would
363 be saved as the size of the available expression bitmaps would be larger
364 [one could build a mapping table without holes afterwards though].
365 Someday I'll perform the computation and figure it out. */
367 /* Total size of the expression hash table, in elements. */
368 static int expr_hash_table_size
;
370 This is an array of `expr_hash_table_size' elements. */
371 static struct expr
**expr_hash_table
;
373 /* Total size of the copy propagation hash table, in elements. */
374 static int set_hash_table_size
;
377 This is an array of `set_hash_table_size' elements. */
378 static struct expr
**set_hash_table
;
380 /* Mapping of uids to cuids.
381 Only real insns get cuids. */
382 static int *uid_cuid
;
384 /* Highest UID in UID_CUID. */
387 /* Get the cuid of an insn. */
388 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
390 /* Number of cuids. */
393 /* Mapping of cuids to insns. */
394 static rtx
*cuid_insn
;
396 /* Get insn from cuid. */
397 #define CUID_INSN(CUID) (cuid_insn[CUID])
399 /* Maximum register number in function prior to doing gcse + 1.
400 Registers created during this pass have regno >= max_gcse_regno.
401 This is named with "gcse" to not collide with global of same name. */
402 static unsigned int max_gcse_regno
;
404 /* Maximum number of cse-able expressions found. */
407 /* Maximum number of assignments for copy propagation found. */
410 /* Table of registers that are modified.
412 For each register, each element is a list of places where the pseudo-reg
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
417 a bitmap instead of the lists `reg_set_table' uses].
419 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
420 num-regs) [however perhaps it may be useful to keep the data as is]. One
421 advantage of recording things this way is that `reg_set_table' is fairly
422 sparse with respect to pseudo regs but for hard regs could be fairly dense
423 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
424 up functions like compute_transp since in the case of pseudo-regs we only
425 need to iterate over the number of times a pseudo-reg is set, not over the
426 number of basic blocks [clearly there is a bit of a slow down in the cases
427 where a pseudo is set more than once in a block, however it is believed
428 that the net effect is to speed things up]. This isn't done for hard-regs
429 because recording call-clobbered hard-regs in `reg_set_table' at each
430 function call can consume a fair bit of memory, and iterating over
431 hard-regs stored this way in compute_transp will be more expensive. */
433 typedef struct reg_set
435 /* The next setting of this register. */
436 struct reg_set
*next
;
437 /* The insn where it was set. */
441 static reg_set
**reg_set_table
;
443 /* Size of `reg_set_table'.
444 The table starts out at max_gcse_regno + slop, and is enlarged as
446 static int reg_set_table_size
;
448 /* Amount to grow `reg_set_table' by when it's full. */
449 #define REG_SET_TABLE_SLOP 100
451 /* Bitmap containing one bit for each register in the program.
452 Used when performing GCSE to track which registers have been set since
453 the start of the basic block. */
454 static sbitmap reg_set_bitmap
;
456 /* For each block, a bitmap of registers set in the block.
457 This is used by expr_killed_p and compute_transp.
458 It is computed during hash table computation and not by compute_sets
459 as it includes registers added since the last pass (or between cprop and
460 gcse) and it's currently not easy to realloc sbitmap vectors. */
461 static sbitmap
*reg_set_in_block
;
463 /* For each block, non-zero if memory is set in that block.
464 This is computed during hash table computation and is used by
465 expr_killed_p and compute_transp.
466 ??? Handling of memory is very simple, we don't make any attempt
467 to optimize things (later).
468 ??? This can be computed by compute_sets since the information
470 static char *mem_set_in_block
;
472 /* Various variables for statistics gathering. */
474 /* Memory used in a pass.
475 This isn't intended to be absolutely precise. Its intent is only
476 to keep an eye on memory usage. */
477 static int bytes_used
;
479 /* GCSE substitutions made. */
480 static int gcse_subst_count
;
481 /* Number of copy instructions created. */
482 static int gcse_create_count
;
483 /* Number of constants propagated. */
484 static int const_prop_count
;
485 /* Number of copys propagated. */
486 static int copy_prop_count
;
488 /* These variables are used by classic GCSE.
489 Normally they'd be defined a bit later, but `rd_gen' needs to
490 be declared sooner. */
492 /* A bitmap of all ones for implementing the algorithm for available
493 expressions and reaching definitions. */
494 /* ??? Available expression bitmaps have a different size than reaching
495 definition bitmaps. This should be the larger of the two, however, it
496 is not currently used for reaching definitions. */
497 static sbitmap u_bitmap
;
499 /* Each block has a bitmap of each type.
500 The length of each blocks bitmap is:
502 max_cuid - for reaching definitions
503 n_exprs - for available expressions
505 Thus we view the bitmaps as 2 dimensional arrays. i.e.
506 rd_kill[block_num][cuid_num]
507 ae_kill[block_num][expr_num] */
509 /* For reaching defs */
510 static sbitmap
*rd_kill
, *rd_gen
, *reaching_defs
, *rd_out
;
512 /* for available exprs */
513 static sbitmap
*ae_kill
, *ae_gen
, *ae_in
, *ae_out
;
515 /* Objects of this type are passed around by the null-pointer check
517 struct null_pointer_info
519 /* The basic block being processed. */
521 /* The first register to be handled in this pass. */
522 unsigned int min_reg
;
523 /* One greater than the last register to be handled in this pass. */
524 unsigned int max_reg
;
525 sbitmap
*nonnull_local
;
526 sbitmap
*nonnull_killed
;
529 static void compute_can_copy
PARAMS ((void));
530 static char *gmalloc
PARAMS ((unsigned int));
531 static char *grealloc
PARAMS ((char *, unsigned int));
532 static char *gcse_alloc
PARAMS ((unsigned long));
533 static void alloc_gcse_mem
PARAMS ((rtx
));
534 static void free_gcse_mem
PARAMS ((void));
535 static void alloc_reg_set_mem
PARAMS ((int));
536 static void free_reg_set_mem
PARAMS ((void));
537 static int get_bitmap_width
PARAMS ((int, int, int));
538 static void record_one_set
PARAMS ((int, rtx
));
539 static void record_set_info
PARAMS ((rtx
, rtx
, void *));
540 static void compute_sets
PARAMS ((rtx
));
541 static void hash_scan_insn
PARAMS ((rtx
, int, int));
542 static void hash_scan_set
PARAMS ((rtx
, rtx
, int));
543 static void hash_scan_clobber
PARAMS ((rtx
, rtx
));
544 static void hash_scan_call
PARAMS ((rtx
, rtx
));
545 static int want_to_gcse_p
PARAMS ((rtx
));
546 static int oprs_unchanged_p
PARAMS ((rtx
, rtx
, int));
547 static int oprs_anticipatable_p
PARAMS ((rtx
, rtx
));
548 static int oprs_available_p
PARAMS ((rtx
, rtx
));
549 static void insert_expr_in_table
PARAMS ((rtx
, enum machine_mode
, rtx
,
551 static void insert_set_in_table
PARAMS ((rtx
, rtx
));
552 static unsigned int hash_expr
PARAMS ((rtx
, enum machine_mode
, int *, int));
553 static unsigned int hash_expr_1
PARAMS ((rtx
, enum machine_mode
, int *));
554 static unsigned int hash_set
PARAMS ((int, int));
555 static int expr_equiv_p
PARAMS ((rtx
, rtx
));
556 static void record_last_reg_set_info
PARAMS ((rtx
, int));
557 static void record_last_mem_set_info
PARAMS ((rtx
));
558 static void record_last_set_info
PARAMS ((rtx
, rtx
, void *));
559 static void compute_hash_table
PARAMS ((int));
560 static void alloc_set_hash_table
PARAMS ((int));
561 static void free_set_hash_table
PARAMS ((void));
562 static void compute_set_hash_table
PARAMS ((void));
563 static void alloc_expr_hash_table
PARAMS ((int));
564 static void free_expr_hash_table
PARAMS ((void));
565 static void compute_expr_hash_table
PARAMS ((void));
566 static void dump_hash_table
PARAMS ((FILE *, const char *, struct expr
**,
568 static struct expr
*lookup_expr
PARAMS ((rtx
));
569 static struct expr
*lookup_set
PARAMS ((unsigned int, rtx
));
570 static struct expr
*next_set
PARAMS ((unsigned int, struct expr
*));
571 static void reset_opr_set_tables
PARAMS ((void));
572 static int oprs_not_set_p
PARAMS ((rtx
, rtx
));
573 static void mark_call
PARAMS ((rtx
));
574 static void mark_set
PARAMS ((rtx
, rtx
));
575 static void mark_clobber
PARAMS ((rtx
, rtx
));
576 static void mark_oprs_set
PARAMS ((rtx
));
577 static void alloc_cprop_mem
PARAMS ((int, int));
578 static void free_cprop_mem
PARAMS ((void));
579 static void compute_transp
PARAMS ((rtx
, int, sbitmap
*, int));
580 static void compute_transpout
PARAMS ((void));
581 static void compute_local_properties
PARAMS ((sbitmap
*, sbitmap
*, sbitmap
*,
583 static void compute_cprop_data
PARAMS ((void));
584 static void find_used_regs
PARAMS ((rtx
));
585 static int try_replace_reg
PARAMS ((rtx
, rtx
, rtx
));
586 static struct expr
*find_avail_set
PARAMS ((int, rtx
));
587 static int cprop_jump
PARAMS ((rtx
, rtx
, struct reg_use
*, rtx
));
589 static int cprop_cc0_jump
PARAMS ((rtx
, struct reg_use
*, rtx
));
591 static int cprop_insn
PARAMS ((rtx
, int));
592 static int cprop
PARAMS ((int));
593 static int one_cprop_pass
PARAMS ((int, int));
594 static void alloc_pre_mem
PARAMS ((int, int));
595 static void free_pre_mem
PARAMS ((void));
596 static void compute_pre_data
PARAMS ((void));
597 static int pre_expr_reaches_here_p
PARAMS ((int, struct expr
*, int));
598 static void insert_insn_end_bb
PARAMS ((struct expr
*, int, int));
599 static void pre_insert_copy_insn
PARAMS ((struct expr
*, rtx
));
600 static void pre_insert_copies
PARAMS ((void));
601 static int pre_delete
PARAMS ((void));
602 static int pre_gcse
PARAMS ((void));
603 static int one_pre_gcse_pass
PARAMS ((int));
604 static void add_label_notes
PARAMS ((rtx
, rtx
));
605 static void alloc_code_hoist_mem
PARAMS ((int, int));
606 static void free_code_hoist_mem
PARAMS ((void));
607 static void compute_code_hoist_vbeinout
PARAMS ((void));
608 static void compute_code_hoist_data
PARAMS ((void));
609 static int hoist_expr_reaches_here_p
PARAMS ((int, int, int, char *));
610 static void hoist_code
PARAMS ((void));
611 static int one_code_hoisting_pass
PARAMS ((void));
612 static void alloc_rd_mem
PARAMS ((int, int));
613 static void free_rd_mem
PARAMS ((void));
614 static void handle_rd_kill_set
PARAMS ((rtx
, int, int));
615 static void compute_kill_rd
PARAMS ((void));
616 static void compute_rd
PARAMS ((void));
617 static void alloc_avail_expr_mem
PARAMS ((int, int));
618 static void free_avail_expr_mem
PARAMS ((void));
619 static void compute_ae_gen
PARAMS ((void));
620 static int expr_killed_p
PARAMS ((rtx
, int));
621 static void compute_ae_kill
PARAMS ((sbitmap
*, sbitmap
*));
622 static int expr_reaches_here_p
PARAMS ((struct occr
*, struct expr
*,
624 static rtx computing_insn
PARAMS ((struct expr
*, rtx
));
625 static int def_reaches_here_p
PARAMS ((rtx
, rtx
));
626 static int can_disregard_other_sets
PARAMS ((struct reg_set
**, rtx
, int));
627 static int handle_avail_expr
PARAMS ((rtx
, struct expr
*));
628 static int classic_gcse
PARAMS ((void));
629 static int one_classic_gcse_pass
PARAMS ((int));
630 static void invalidate_nonnull_info
PARAMS ((rtx
, rtx
, void *));
631 static void delete_null_pointer_checks_1
PARAMS ((unsigned int *, sbitmap
*,
633 struct null_pointer_info
*));
634 static rtx process_insert_insn
PARAMS ((struct expr
*));
635 static int pre_edge_insert
PARAMS ((struct edge_list
*, struct expr
**));
636 static int expr_reaches_here_p_work
PARAMS ((struct occr
*, struct expr
*,
638 static int pre_expr_reaches_here_p_work
PARAMS ((int, struct expr
*,
641 /* Entry point for global common subexpression elimination.
642 F is the first instruction in the function. */
650 /* Bytes used at start of pass. */
651 int initial_bytes_used
;
652 /* Maximum number of bytes used by a pass. */
654 /* Point to release obstack data from for each pass. */
655 char *gcse_obstack_bottom
;
657 /* We do not construct an accurate cfg in functions which call
658 setjmp, so just punt to be safe. */
659 if (current_function_calls_setjmp
)
662 /* Assume that we do not need to run jump optimizations after gcse. */
663 run_jump_opt_after_gcse
= 0;
665 /* For calling dump_foo fns from gdb. */
666 debug_stderr
= stderr
;
669 /* Identify the basic block information for this function, including
670 successors and predecessors. */
671 max_gcse_regno
= max_reg_num ();
674 dump_flow_info (file
);
676 /* Return if there's nothing to do. */
677 if (n_basic_blocks
<= 1)
680 /* Trying to perform global optimizations on flow graphs which have
681 a high connectivity will take a long time and is unlikely to be
684 In normal circumstances a cfg should have about twice has many edges
685 as blocks. But we do not want to punish small functions which have
686 a couple switch statements. So we require a relatively large number
687 of basic blocks and the ratio of edges to blocks to be high. */
688 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
691 /* See what modes support reg/reg copy operations. */
692 if (! can_copy_init_p
)
698 gcc_obstack_init (&gcse_obstack
);
701 /* Record where pseudo-registers are set. This data is kept accurate
702 during each pass. ??? We could also record hard-reg information here
703 [since it's unchanging], however it is currently done during hash table
706 It may be tempting to compute MEM set information here too, but MEM sets
707 will be subject to code motion one day and thus we need to compute
708 information about memory sets when we build the hash tables. */
710 alloc_reg_set_mem (max_gcse_regno
);
714 initial_bytes_used
= bytes_used
;
716 gcse_obstack_bottom
= gcse_alloc (1);
718 while (changed
&& pass
< MAX_PASSES
)
722 fprintf (file
, "GCSE pass %d\n\n", pass
+ 1);
724 /* Initialize bytes_used to the space for the pred/succ lists,
725 and the reg_set_table data. */
726 bytes_used
= initial_bytes_used
;
728 /* Each pass may create new registers, so recalculate each time. */
729 max_gcse_regno
= max_reg_num ();
733 /* Don't allow constant propagation to modify jumps
735 changed
= one_cprop_pass (pass
+ 1, 0);
738 changed
|= one_classic_gcse_pass (pass
+ 1);
741 changed
|= one_pre_gcse_pass (pass
+ 1);
743 alloc_reg_set_mem (max_reg_num ());
745 run_jump_opt_after_gcse
= 1;
748 if (max_pass_bytes
< bytes_used
)
749 max_pass_bytes
= bytes_used
;
751 /* Free up memory, then reallocate for code hoisting. We can
752 not re-use the existing allocated memory because the tables
753 will not have info for the insns or registers created by
754 partial redundancy elimination. */
757 /* It does not make sense to run code hoisting unless we optimizing
758 for code size -- it rarely makes programs faster, and can make
759 them bigger if we did partial redundancy elimination (when optimizing
760 for space, we use a classic gcse algorithm instead of partial
761 redundancy algorithms). */
764 max_gcse_regno
= max_reg_num ();
766 changed
|= one_code_hoisting_pass ();
769 if (max_pass_bytes
< bytes_used
)
770 max_pass_bytes
= bytes_used
;
775 fprintf (file
, "\n");
779 obstack_free (&gcse_obstack
, gcse_obstack_bottom
);
783 /* Do one last pass of copy propagation, including cprop into
784 conditional jumps. */
786 max_gcse_regno
= max_reg_num ();
788 /* This time, go ahead and allow cprop to alter jumps. */
789 one_cprop_pass (pass
+ 1, 1);
794 fprintf (file
, "GCSE of %s: %d basic blocks, ",
795 current_function_name
, n_basic_blocks
);
796 fprintf (file
, "%d pass%s, %d bytes\n\n",
797 pass
, pass
> 1 ? "es" : "", max_pass_bytes
);
800 obstack_free (&gcse_obstack
, NULL_PTR
);
802 return run_jump_opt_after_gcse
;
805 /* Misc. utilities. */
807 /* Compute which modes support reg/reg copy operations. */
813 #ifndef AVOID_CCMODE_COPIES
816 char *free_point
= (char *) oballoc (1);
818 bzero (can_copy_p
, NUM_MACHINE_MODES
);
821 for (i
= 0; i
< NUM_MACHINE_MODES
; i
++)
822 if (GET_MODE_CLASS (i
) == MODE_CC
)
824 #ifdef AVOID_CCMODE_COPIES
827 reg
= gen_rtx_REG ((enum machine_mode
) i
, LAST_VIRTUAL_REGISTER
+ 1);
828 insn
= emit_insn (gen_rtx_SET (VOIDmode
, reg
, reg
));
829 if (recog (PATTERN (insn
), insn
, NULL_PTR
) >= 0)
838 /* Free the objects we just allocated. */
842 /* Cover function to xmalloc to record bytes allocated. */
849 return xmalloc (size
);
852 /* Cover function to xrealloc.
853 We don't record the additional size since we don't know it.
854 It won't affect memory usage stats much anyway. */
861 return xrealloc (ptr
, size
);
864 /* Cover function to obstack_alloc.
865 We don't need to record the bytes allocated here since
866 obstack_chunk_alloc is set to gmalloc. */
872 return (char *) obstack_alloc (&gcse_obstack
, size
);
875 /* Allocate memory for the cuid mapping array,
876 and reg/memory set tracking tables.
878 This is called at the start of each pass. */
887 /* Find the largest UID and create a mapping from UIDs to CUIDs.
888 CUIDs are like UIDs except they increase monotonically, have no gaps,
889 and only apply to real insns. */
891 max_uid
= get_max_uid ();
892 n
= (max_uid
+ 1) * sizeof (int);
893 uid_cuid
= (int *) gmalloc (n
);
894 bzero ((char *) uid_cuid
, n
);
895 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
897 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
898 INSN_CUID (insn
) = i
++;
900 INSN_CUID (insn
) = i
;
903 /* Create a table mapping cuids to insns. */
906 n
= (max_cuid
+ 1) * sizeof (rtx
);
907 cuid_insn
= (rtx
*) gmalloc (n
);
908 bzero ((char *) cuid_insn
, n
);
909 for (insn
= f
, i
= 0; insn
; insn
= NEXT_INSN (insn
))
910 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
911 CUID_INSN (i
++) = insn
;
913 /* Allocate vars to track sets of regs. */
914 reg_set_bitmap
= (sbitmap
) sbitmap_alloc (max_gcse_regno
);
916 /* Allocate vars to track sets of regs, memory per block. */
917 reg_set_in_block
= (sbitmap
*) sbitmap_vector_alloc (n_basic_blocks
,
919 mem_set_in_block
= (char *) gmalloc (n_basic_blocks
);
922 /* Free memory allocated by alloc_gcse_mem. */
930 free (reg_set_bitmap
);
932 free (reg_set_in_block
);
933 free (mem_set_in_block
);
936 /* Many of the global optimization algorithms work by solving dataflow
937 equations for various expressions. Initially, some local value is
938 computed for each expression in each block. Then, the values across the
939 various blocks are combined (by following flow graph edges) to arrive at
940 global values. Conceptually, each set of equations is independent. We
941 may therefore solve all the equations in parallel, solve them one at a
942 time, or pick any intermediate approach.
944 When you're going to need N two-dimensional bitmaps, each X (say, the
945 number of blocks) by Y (say, the number of expressions), call this
946 function. It's not important what X and Y represent; only that Y
947 correspond to the things that can be done in parallel. This function will
948 return an appropriate chunking factor C; you should solve C sets of
949 equations in parallel. By going through this function, we can easily
950 trade space against time; by solving fewer equations in parallel we use
954 get_bitmap_width (n
, x
, y
)
959 /* It's not really worth figuring out *exactly* how much memory will
960 be used by a particular choice. The important thing is to get
961 something approximately right. */
962 size_t max_bitmap_memory
= 10 * 1024 * 1024;
964 /* The number of bytes we'd use for a single column of minimum
966 size_t column_size
= n
* x
* sizeof (SBITMAP_ELT_TYPE
);
968 /* Often, it's reasonable just to solve all the equations in
970 if (column_size
* SBITMAP_SET_SIZE (y
) <= max_bitmap_memory
)
973 /* Otherwise, pick the largest width we can, without going over the
975 return SBITMAP_ELT_BITS
* ((max_bitmap_memory
+ column_size
- 1)
979 /* Compute the local properties of each recorded expression.
981 Local properties are those that are defined by the block, irrespective of
984 An expression is transparent in a block if its operands are not modified
987 An expression is computed (locally available) in a block if it is computed
988 at least once and expression would contain the same value if the
989 computation was moved to the end of the block.
991 An expression is locally anticipatable in a block if it is computed at
992 least once and expression would contain the same value if the computation
993 was moved to the beginning of the block.
995 We call this routine for cprop, pre and code hoisting. They all compute
996 basically the same information and thus can easily share this code.
998 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
999 properties. If NULL, then it is not necessary to compute or record that
1000 particular property.
1002 SETP controls which hash table to look at. If zero, this routine looks at
1003 the expr hash table; if nonzero this routine looks at the set hash table.
1004 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1008 compute_local_properties (transp
, comp
, antloc
, setp
)
1014 int i
, hash_table_size
;
1015 struct expr
**hash_table
;
1017 /* Initialize any bitmaps that were passed in. */
1021 sbitmap_vector_zero (transp
, n_basic_blocks
);
1023 sbitmap_vector_ones (transp
, n_basic_blocks
);
1027 sbitmap_vector_zero (comp
, n_basic_blocks
);
1029 sbitmap_vector_zero (antloc
, n_basic_blocks
);
1031 /* We use the same code for cprop, pre and hoisting. For cprop
1032 we care about the set hash table, for pre and hoisting we
1033 care about the expr hash table. */
1034 hash_table_size
= setp
? set_hash_table_size
: expr_hash_table_size
;
1035 hash_table
= setp
? set_hash_table
: expr_hash_table
;
1037 for (i
= 0; i
< hash_table_size
; i
++)
1041 for (expr
= hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1043 int indx
= expr
->bitmap_index
;
1046 /* The expression is transparent in this block if it is not killed.
1047 We start by assuming all are transparent [none are killed], and
1048 then reset the bits for those that are. */
1050 compute_transp (expr
->expr
, indx
, transp
, setp
);
1052 /* The occurrences recorded in antic_occr are exactly those that
1053 we want to set to non-zero in ANTLOC. */
1055 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
1057 SET_BIT (antloc
[BLOCK_NUM (occr
->insn
)], indx
);
1059 /* While we're scanning the table, this is a good place to
1061 occr
->deleted_p
= 0;
1064 /* The occurrences recorded in avail_occr are exactly those that
1065 we want to set to non-zero in COMP. */
1067 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
1069 SET_BIT (comp
[BLOCK_NUM (occr
->insn
)], indx
);
1071 /* While we're scanning the table, this is a good place to
1076 /* While we're scanning the table, this is a good place to
1078 expr
->reaching_reg
= 0;
1083 /* Register set information.
1085 `reg_set_table' records where each register is set or otherwise
1088 static struct obstack reg_set_obstack
;
1091 alloc_reg_set_mem (n_regs
)
1096 reg_set_table_size
= n_regs
+ REG_SET_TABLE_SLOP
;
1097 n
= reg_set_table_size
* sizeof (struct reg_set
*);
1098 reg_set_table
= (struct reg_set
**) gmalloc (n
);
1099 bzero ((char *) reg_set_table
, n
);
1101 gcc_obstack_init (®_set_obstack
);
1107 free (reg_set_table
);
1108 obstack_free (®_set_obstack
, NULL_PTR
);
1111 /* Record REGNO in the reg_set table. */
1114 record_one_set (regno
, insn
)
1118 /* allocate a new reg_set element and link it onto the list */
1119 struct reg_set
*new_reg_info
, *reg_info_ptr1
, *reg_info_ptr2
;
1121 /* If the table isn't big enough, enlarge it. */
1122 if (regno
>= reg_set_table_size
)
1124 int new_size
= regno
+ REG_SET_TABLE_SLOP
;
1127 = (struct reg_set
**) grealloc ((char *) reg_set_table
,
1128 new_size
* sizeof (struct reg_set
*));
1129 bzero ((char *) (reg_set_table
+ reg_set_table_size
),
1130 (new_size
- reg_set_table_size
) * sizeof (struct reg_set
*));
1131 reg_set_table_size
= new_size
;
1134 new_reg_info
= (struct reg_set
*) obstack_alloc (®_set_obstack
,
1135 sizeof (struct reg_set
));
1136 bytes_used
+= sizeof (struct reg_set
);
1137 new_reg_info
->insn
= insn
;
1138 new_reg_info
->next
= NULL
;
1139 if (reg_set_table
[regno
] == NULL
)
1140 reg_set_table
[regno
] = new_reg_info
;
1143 reg_info_ptr1
= reg_info_ptr2
= reg_set_table
[regno
];
1144 /* ??? One could keep a "last" pointer to speed this up. */
1145 while (reg_info_ptr1
!= NULL
)
1147 reg_info_ptr2
= reg_info_ptr1
;
1148 reg_info_ptr1
= reg_info_ptr1
->next
;
1151 reg_info_ptr2
->next
= new_reg_info
;
1155 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1156 an insn. The DATA is really the instruction in which the SET is
1160 record_set_info (dest
, setter
, data
)
1161 rtx dest
, setter ATTRIBUTE_UNUSED
;
1164 rtx record_set_insn
= (rtx
) data
;
1166 if (GET_CODE (dest
) == REG
&& REGNO (dest
) >= FIRST_PSEUDO_REGISTER
)
1167 record_one_set (REGNO (dest
), record_set_insn
);
1170 /* Scan the function and record each set of each pseudo-register.
1172 This is called once, at the start of the gcse pass. See the comments for
1173 `reg_set_table' for further documenation. */
1181 for (insn
= f
; insn
!= 0; insn
= NEXT_INSN (insn
))
1182 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
1183 note_stores (PATTERN (insn
), record_set_info
, insn
);
1186 /* Hash table support. */
1188 /* For each register, the cuid of the first/last insn in the block to set it,
1189 or -1 if not set. */
1190 #define NEVER_SET -1
1191 static int *reg_first_set
;
1192 static int *reg_last_set
;
1194 /* While computing "first/last set" info, this is the CUID of first/last insn
1195 to set memory or -1 if not set. `mem_last_set' is also used when
1196 performing GCSE to record whether memory has been set since the beginning
1199 Note that handling of memory is very simple, we don't make any attempt
1200 to optimize things (later). */
1201 static int mem_first_set
;
1202 static int mem_last_set
;
1204 /* Perform a quick check whether X, the source of a set, is something
1205 we want to consider for GCSE. */
1211 switch (GET_CODE (x
))
1227 /* Return non-zero if the operands of expression X are unchanged from the
1228 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1229 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1232 oprs_unchanged_p (x
, insn
, avail_p
)
1243 code
= GET_CODE (x
);
1248 return (reg_last_set
[REGNO (x
)] == NEVER_SET
1249 || reg_last_set
[REGNO (x
)] < INSN_CUID (insn
));
1251 return (reg_first_set
[REGNO (x
)] == NEVER_SET
1252 || reg_first_set
[REGNO (x
)] >= INSN_CUID (insn
));
1255 if (avail_p
&& mem_last_set
!= NEVER_SET
1256 && mem_last_set
>= INSN_CUID (insn
))
1258 else if (! avail_p
&& mem_first_set
!= NEVER_SET
1259 && mem_first_set
< INSN_CUID (insn
))
1262 return oprs_unchanged_p (XEXP (x
, 0), insn
, avail_p
);
1285 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1289 /* If we are about to do the last recursive call needed at this
1290 level, change it into iteration. This function is called enough
1293 return oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
);
1295 else if (! oprs_unchanged_p (XEXP (x
, i
), insn
, avail_p
))
1298 else if (fmt
[i
] == 'E')
1299 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1300 if (! oprs_unchanged_p (XVECEXP (x
, i
, j
), insn
, avail_p
))
1307 /* Return non-zero if the operands of expression X are unchanged from
1308 the start of INSN's basic block up to but not including INSN. */
1311 oprs_anticipatable_p (x
, insn
)
1314 return oprs_unchanged_p (x
, insn
, 0);
1317 /* Return non-zero if the operands of expression X are unchanged from
1318 INSN to the end of INSN's basic block. */
1321 oprs_available_p (x
, insn
)
1324 return oprs_unchanged_p (x
, insn
, 1);
1327 /* Hash expression X.
1329 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1330 indicating if a volatile operand is found or if the expression contains
1331 something we don't want to insert in the table.
1333 ??? One might want to merge this with canon_hash. Later. */
1336 hash_expr (x
, mode
, do_not_record_p
, hash_table_size
)
1338 enum machine_mode mode
;
1339 int *do_not_record_p
;
1340 int hash_table_size
;
1344 *do_not_record_p
= 0;
1346 hash
= hash_expr_1 (x
, mode
, do_not_record_p
);
1347 return hash
% hash_table_size
;
1350 /* Subroutine of hash_expr to do the actual work. */
1353 hash_expr_1 (x
, mode
, do_not_record_p
)
1355 enum machine_mode mode
;
1356 int *do_not_record_p
;
1363 /* Used to turn recursion into iteration. We can't rely on GCC's
1364 tail-recursion eliminatio since we need to keep accumulating values
1371 code
= GET_CODE (x
);
1375 hash
+= ((unsigned int) REG
<< 7) + REGNO (x
);
1379 hash
+= (((unsigned int) CONST_INT
<< 7) + (unsigned int) mode
1380 + (unsigned int) INTVAL (x
));
1384 /* This is like the general case, except that it only counts
1385 the integers representing the constant. */
1386 hash
+= (unsigned int) code
+ (unsigned int) GET_MODE (x
);
1387 if (GET_MODE (x
) != VOIDmode
)
1388 for (i
= 2; i
< GET_RTX_LENGTH (CONST_DOUBLE
); i
++)
1389 hash
+= (unsigned int) XWINT (x
, i
);
1391 hash
+= ((unsigned int) CONST_DOUBLE_LOW (x
)
1392 + (unsigned int) CONST_DOUBLE_HIGH (x
));
1395 /* Assume there is only one rtx object for any given label. */
1397 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1398 differences and differences between each stage's debugging dumps. */
1399 hash
+= (((unsigned int) LABEL_REF
<< 7)
1400 + CODE_LABEL_NUMBER (XEXP (x
, 0)));
1405 /* Don't hash on the symbol's address to avoid bootstrap differences.
1406 Different hash values may cause expressions to be recorded in
1407 different orders and thus different registers to be used in the
1408 final assembler. This also avoids differences in the dump files
1409 between various stages. */
1411 const unsigned char *p
= (const unsigned char *) XSTR (x
, 0);
1414 h
+= (h
<< 7) + *p
++; /* ??? revisit */
1416 hash
+= ((unsigned int) SYMBOL_REF
<< 7) + h
;
1421 if (MEM_VOLATILE_P (x
))
1423 *do_not_record_p
= 1;
1427 hash
+= (unsigned int) MEM
;
1428 hash
+= MEM_ALIAS_SET (x
);
1439 case UNSPEC_VOLATILE
:
1440 *do_not_record_p
= 1;
1444 if (MEM_VOLATILE_P (x
))
1446 *do_not_record_p
= 1;
1454 hash
+= (unsigned) code
+ (unsigned) GET_MODE (x
);
1455 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
1459 /* If we are about to do the last recursive call
1460 needed at this level, change it into iteration.
1461 This function is called enough to be worth it. */
1468 hash
+= hash_expr_1 (XEXP (x
, i
), 0, do_not_record_p
);
1469 if (*do_not_record_p
)
1473 else if (fmt
[i
] == 'E')
1474 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1476 hash
+= hash_expr_1 (XVECEXP (x
, i
, j
), 0, do_not_record_p
);
1477 if (*do_not_record_p
)
1481 else if (fmt
[i
] == 's')
1483 register const unsigned char *p
=
1484 (const unsigned char *) XSTR (x
, i
);
1490 else if (fmt
[i
] == 'i')
1491 hash
+= (unsigned int) XINT (x
, i
);
1499 /* Hash a set of register REGNO.
1501 Sets are hashed on the register that is set. This simplifies the PRE copy
1504 ??? May need to make things more elaborate. Later, as necessary. */
1507 hash_set (regno
, hash_table_size
)
1509 int hash_table_size
;
1514 return hash
% hash_table_size
;
1517 /* Return non-zero if exp1 is equivalent to exp2.
1518 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1525 register enum rtx_code code
;
1526 register const char *fmt
;
1531 if (x
== 0 || y
== 0)
1534 code
= GET_CODE (x
);
1535 if (code
!= GET_CODE (y
))
1538 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1539 if (GET_MODE (x
) != GET_MODE (y
))
1549 return INTVAL (x
) == INTVAL (y
);
1552 return XEXP (x
, 0) == XEXP (y
, 0);
1555 return XSTR (x
, 0) == XSTR (y
, 0);
1558 return REGNO (x
) == REGNO (y
);
1561 /* Can't merge two expressions in different alias sets, since we can
1562 decide that the expression is transparent in a block when it isn't,
1563 due to it being set with the different alias set. */
1564 if (MEM_ALIAS_SET (x
) != MEM_ALIAS_SET (y
))
1568 /* For commutative operations, check both orders. */
1576 return ((expr_equiv_p (XEXP (x
, 0), XEXP (y
, 0))
1577 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 1)))
1578 || (expr_equiv_p (XEXP (x
, 0), XEXP (y
, 1))
1579 && expr_equiv_p (XEXP (x
, 1), XEXP (y
, 0))));
1585 /* Compare the elements. If any pair of corresponding elements
1586 fail to match, return 0 for the whole thing. */
1588 fmt
= GET_RTX_FORMAT (code
);
1589 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1594 if (! expr_equiv_p (XEXP (x
, i
), XEXP (y
, i
)))
1599 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1601 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1602 if (! expr_equiv_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1607 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1612 if (XINT (x
, i
) != XINT (y
, i
))
1617 if (XWINT (x
, i
) != XWINT (y
, i
))
1632 /* Insert expression X in INSN in the hash table.
1633 If it is already present, record it as the last occurrence in INSN's
1636 MODE is the mode of the value X is being stored into.
1637 It is only used if X is a CONST_INT.
1639 ANTIC_P is non-zero if X is an anticipatable expression.
1640 AVAIL_P is non-zero if X is an available expression. */
1643 insert_expr_in_table (x
, mode
, insn
, antic_p
, avail_p
)
1645 enum machine_mode mode
;
1647 int antic_p
, avail_p
;
1649 int found
, do_not_record_p
;
1651 struct expr
*cur_expr
, *last_expr
= NULL
;
1652 struct occr
*antic_occr
, *avail_occr
;
1653 struct occr
*last_occr
= NULL
;
1655 hash
= hash_expr (x
, mode
, &do_not_record_p
, expr_hash_table_size
);
1657 /* Do not insert expression in table if it contains volatile operands,
1658 or if hash_expr determines the expression is something we don't want
1659 to or can't handle. */
1660 if (do_not_record_p
)
1663 cur_expr
= expr_hash_table
[hash
];
1666 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1668 /* If the expression isn't found, save a pointer to the end of
1670 last_expr
= cur_expr
;
1671 cur_expr
= cur_expr
->next_same_hash
;
1676 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
1677 bytes_used
+= sizeof (struct expr
);
1678 if (expr_hash_table
[hash
] == NULL
)
1679 /* This is the first pattern that hashed to this index. */
1680 expr_hash_table
[hash
] = cur_expr
;
1682 /* Add EXPR to end of this hash chain. */
1683 last_expr
->next_same_hash
= cur_expr
;
1685 /* Set the fields of the expr element. */
1687 cur_expr
->bitmap_index
= n_exprs
++;
1688 cur_expr
->next_same_hash
= NULL
;
1689 cur_expr
->antic_occr
= NULL
;
1690 cur_expr
->avail_occr
= NULL
;
1693 /* Now record the occurrence(s). */
1696 antic_occr
= cur_expr
->antic_occr
;
1698 /* Search for another occurrence in the same basic block. */
1699 while (antic_occr
&& BLOCK_NUM (antic_occr
->insn
) != BLOCK_NUM (insn
))
1701 /* If an occurrence isn't found, save a pointer to the end of
1703 last_occr
= antic_occr
;
1704 antic_occr
= antic_occr
->next
;
1708 /* Found another instance of the expression in the same basic block.
1709 Prefer the currently recorded one. We want the first one in the
1710 block and the block is scanned from start to end. */
1711 ; /* nothing to do */
1714 /* First occurrence of this expression in this basic block. */
1715 antic_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1716 bytes_used
+= sizeof (struct occr
);
1717 /* First occurrence of this expression in any block? */
1718 if (cur_expr
->antic_occr
== NULL
)
1719 cur_expr
->antic_occr
= antic_occr
;
1721 last_occr
->next
= antic_occr
;
1723 antic_occr
->insn
= insn
;
1724 antic_occr
->next
= NULL
;
1730 avail_occr
= cur_expr
->avail_occr
;
1732 /* Search for another occurrence in the same basic block. */
1733 while (avail_occr
&& BLOCK_NUM (avail_occr
->insn
) != BLOCK_NUM (insn
))
1735 /* If an occurrence isn't found, save a pointer to the end of
1737 last_occr
= avail_occr
;
1738 avail_occr
= avail_occr
->next
;
1742 /* Found another instance of the expression in the same basic block.
1743 Prefer this occurrence to the currently recorded one. We want
1744 the last one in the block and the block is scanned from start
1746 avail_occr
->insn
= insn
;
1749 /* First occurrence of this expression in this basic block. */
1750 avail_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1751 bytes_used
+= sizeof (struct occr
);
1753 /* First occurrence of this expression in any block? */
1754 if (cur_expr
->avail_occr
== NULL
)
1755 cur_expr
->avail_occr
= avail_occr
;
1757 last_occr
->next
= avail_occr
;
1759 avail_occr
->insn
= insn
;
1760 avail_occr
->next
= NULL
;
1765 /* Insert pattern X in INSN in the hash table.
1766 X is a SET of a reg to either another reg or a constant.
1767 If it is already present, record it as the last occurrence in INSN's
1771 insert_set_in_table (x
, insn
)
1777 struct expr
*cur_expr
, *last_expr
= NULL
;
1778 struct occr
*cur_occr
, *last_occr
= NULL
;
1780 if (GET_CODE (x
) != SET
1781 || GET_CODE (SET_DEST (x
)) != REG
)
1784 hash
= hash_set (REGNO (SET_DEST (x
)), set_hash_table_size
);
1786 cur_expr
= set_hash_table
[hash
];
1789 while (cur_expr
&& 0 == (found
= expr_equiv_p (cur_expr
->expr
, x
)))
1791 /* If the expression isn't found, save a pointer to the end of
1793 last_expr
= cur_expr
;
1794 cur_expr
= cur_expr
->next_same_hash
;
1799 cur_expr
= (struct expr
*) gcse_alloc (sizeof (struct expr
));
1800 bytes_used
+= sizeof (struct expr
);
1801 if (set_hash_table
[hash
] == NULL
)
1802 /* This is the first pattern that hashed to this index. */
1803 set_hash_table
[hash
] = cur_expr
;
1805 /* Add EXPR to end of this hash chain. */
1806 last_expr
->next_same_hash
= cur_expr
;
1808 /* Set the fields of the expr element.
1809 We must copy X because it can be modified when copy propagation is
1810 performed on its operands. */
1811 /* ??? Should this go in a different obstack? */
1812 cur_expr
->expr
= copy_rtx (x
);
1813 cur_expr
->bitmap_index
= n_sets
++;
1814 cur_expr
->next_same_hash
= NULL
;
1815 cur_expr
->antic_occr
= NULL
;
1816 cur_expr
->avail_occr
= NULL
;
1819 /* Now record the occurrence. */
1820 cur_occr
= cur_expr
->avail_occr
;
1822 /* Search for another occurrence in the same basic block. */
1823 while (cur_occr
&& BLOCK_NUM (cur_occr
->insn
) != BLOCK_NUM (insn
))
1825 /* If an occurrence isn't found, save a pointer to the end of
1827 last_occr
= cur_occr
;
1828 cur_occr
= cur_occr
->next
;
1832 /* Found another instance of the expression in the same basic block.
1833 Prefer this occurrence to the currently recorded one. We want the
1834 last one in the block and the block is scanned from start to end. */
1835 cur_occr
->insn
= insn
;
1838 /* First occurrence of this expression in this basic block. */
1839 cur_occr
= (struct occr
*) gcse_alloc (sizeof (struct occr
));
1840 bytes_used
+= sizeof (struct occr
);
1842 /* First occurrence of this expression in any block? */
1843 if (cur_expr
->avail_occr
== NULL
)
1844 cur_expr
->avail_occr
= cur_occr
;
1846 last_occr
->next
= cur_occr
;
1848 cur_occr
->insn
= insn
;
1849 cur_occr
->next
= NULL
;
1853 /* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
1854 non-zero, this is for the assignment hash table, otherwise it is for the
1855 expression hash table. */
1858 hash_scan_set (pat
, insn
, set_p
)
1862 rtx src
= SET_SRC (pat
);
1863 rtx dest
= SET_DEST (pat
);
1865 if (GET_CODE (src
) == CALL
)
1866 hash_scan_call (src
, insn
);
1868 if (GET_CODE (dest
) == REG
)
1870 int regno
= REGNO (dest
);
1873 /* Only record sets of pseudo-regs in the hash table. */
1875 && regno
>= FIRST_PSEUDO_REGISTER
1876 /* Don't GCSE something if we can't do a reg/reg copy. */
1877 && can_copy_p
[GET_MODE (dest
)]
1878 /* Is SET_SRC something we want to gcse? */
1879 && want_to_gcse_p (src
))
1881 /* An expression is not anticipatable if its operands are
1882 modified before this insn. */
1883 int antic_p
= oprs_anticipatable_p (src
, insn
);
1884 /* An expression is not available if its operands are
1885 subsequently modified, including this insn. */
1886 int avail_p
= oprs_available_p (src
, insn
);
1888 insert_expr_in_table (src
, GET_MODE (dest
), insn
, antic_p
, avail_p
);
1891 /* Record sets for constant/copy propagation. */
1893 && regno
>= FIRST_PSEUDO_REGISTER
1894 && ((GET_CODE (src
) == REG
1895 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
1896 && can_copy_p
[GET_MODE (dest
)])
1897 || GET_CODE (src
) == CONST_INT
1898 || GET_CODE (src
) == SYMBOL_REF
1899 || GET_CODE (src
) == CONST_DOUBLE
)
1900 /* A copy is not available if its src or dest is subsequently
1901 modified. Here we want to search from INSN+1 on, but
1902 oprs_available_p searches from INSN on. */
1903 && (insn
== BLOCK_END (BLOCK_NUM (insn
))
1904 || ((tmp
= next_nonnote_insn (insn
)) != NULL_RTX
1905 && oprs_available_p (pat
, tmp
))))
1906 insert_set_in_table (pat
, insn
);
1911 hash_scan_clobber (x
, insn
)
1912 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
1914 /* Currently nothing to do. */
1918 hash_scan_call (x
, insn
)
1919 rtx x ATTRIBUTE_UNUSED
, insn ATTRIBUTE_UNUSED
;
1921 /* Currently nothing to do. */
1924 /* Process INSN and add hash table entries as appropriate.
1926 Only available expressions that set a single pseudo-reg are recorded.
1928 Single sets in a PARALLEL could be handled, but it's an extra complication
1929 that isn't dealt with right now. The trick is handling the CLOBBERs that
1930 are also in the PARALLEL. Later.
1932 If SET_P is non-zero, this is for the assignment hash table,
1933 otherwise it is for the expression hash table.
1934 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1935 not record any expressions. */
1938 hash_scan_insn (insn
, set_p
, in_libcall_block
)
1941 int in_libcall_block
;
1943 rtx pat
= PATTERN (insn
);
1946 /* Pick out the sets of INSN and for other forms of instructions record
1947 what's been modified. */
1949 if (GET_CODE (pat
) == SET
&& ! in_libcall_block
)
1951 /* Ignore obvious no-ops. */
1952 if (SET_SRC (pat
) != SET_DEST (pat
))
1953 hash_scan_set (pat
, insn
, set_p
);
1955 else if (GET_CODE (pat
) == PARALLEL
)
1956 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1958 rtx x
= XVECEXP (pat
, 0, i
);
1960 if (GET_CODE (x
) == SET
)
1962 if (GET_CODE (SET_SRC (x
)) == CALL
)
1963 hash_scan_call (SET_SRC (x
), insn
);
1965 else if (GET_CODE (x
) == CLOBBER
)
1966 hash_scan_clobber (x
, insn
);
1967 else if (GET_CODE (x
) == CALL
)
1968 hash_scan_call (x
, insn
);
1971 else if (GET_CODE (pat
) == CLOBBER
)
1972 hash_scan_clobber (pat
, insn
);
1973 else if (GET_CODE (pat
) == CALL
)
1974 hash_scan_call (pat
, insn
);
1978 dump_hash_table (file
, name
, table
, table_size
, total_size
)
1981 struct expr
**table
;
1982 int table_size
, total_size
;
1985 /* Flattened out table, so it's printed in proper order. */
1986 struct expr
**flat_table
;
1987 unsigned int *hash_val
;
1991 = (struct expr
**) xcalloc (total_size
, sizeof (struct expr
*));
1992 hash_val
= (unsigned int *) xmalloc (total_size
* sizeof (unsigned int));
1994 for (i
= 0; i
< table_size
; i
++)
1995 for (expr
= table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
1997 flat_table
[expr
->bitmap_index
] = expr
;
1998 hash_val
[expr
->bitmap_index
] = i
;
2001 fprintf (file
, "%s hash table (%d buckets, %d entries)\n",
2002 name
, table_size
, total_size
);
2004 for (i
= 0; i
< total_size
; i
++)
2005 if (flat_table
[i
] != 0)
2007 expr
= flat_table
[i
];
2008 fprintf (file
, "Index %d (hash value %d)\n ",
2009 expr
->bitmap_index
, hash_val
[i
]);
2010 print_rtl (file
, expr
->expr
);
2011 fprintf (file
, "\n");
2014 fprintf (file
, "\n");
2020 /* Record register first/last/block set information for REGNO in INSN.
2022 reg_first_set records the first place in the block where the register
2023 is set and is used to compute "anticipatability".
2025 reg_last_set records the last place in the block where the register
2026 is set and is used to compute "availability".
2028 reg_set_in_block records whether the register is set in the block
2029 and is used to compute "transparency". */
2032 record_last_reg_set_info (insn
, regno
)
2036 if (reg_first_set
[regno
] == NEVER_SET
)
2037 reg_first_set
[regno
] = INSN_CUID (insn
);
2039 reg_last_set
[regno
] = INSN_CUID (insn
);
2040 SET_BIT (reg_set_in_block
[BLOCK_NUM (insn
)], regno
);
2043 /* Record memory first/last/block set information for INSN. */
2046 record_last_mem_set_info (insn
)
2049 if (mem_first_set
== NEVER_SET
)
2050 mem_first_set
= INSN_CUID (insn
);
2052 mem_last_set
= INSN_CUID (insn
);
2053 mem_set_in_block
[BLOCK_NUM (insn
)] = 1;
2056 /* Called from compute_hash_table via note_stores to handle one
2057 SET or CLOBBER in an insn. DATA is really the instruction in which
2058 the SET is taking place. */
2061 record_last_set_info (dest
, setter
, data
)
2062 rtx dest
, setter ATTRIBUTE_UNUSED
;
2065 rtx last_set_insn
= (rtx
) data
;
2067 if (GET_CODE (dest
) == SUBREG
)
2068 dest
= SUBREG_REG (dest
);
2070 if (GET_CODE (dest
) == REG
)
2071 record_last_reg_set_info (last_set_insn
, REGNO (dest
));
2072 else if (GET_CODE (dest
) == MEM
2073 /* Ignore pushes, they clobber nothing. */
2074 && ! push_operand (dest
, GET_MODE (dest
)))
2075 record_last_mem_set_info (last_set_insn
);
2078 /* Top level function to create an expression or assignment hash table.
2080 Expression entries are placed in the hash table if
2081 - they are of the form (set (pseudo-reg) src),
2082 - src is something we want to perform GCSE on,
2083 - none of the operands are subsequently modified in the block
2085 Assignment entries are placed in the hash table if
2086 - they are of the form (set (pseudo-reg) src),
2087 - src is something we want to perform const/copy propagation on,
2088 - none of the operands or target are subsequently modified in the block
2090 Currently src must be a pseudo-reg or a const_int.
2092 F is the first insn.
2093 SET_P is non-zero for computing the assignment hash table. */
2096 compute_hash_table (set_p
)
2101 /* While we compute the hash table we also compute a bit array of which
2102 registers are set in which blocks.
2103 We also compute which blocks set memory, in the absence of aliasing
2104 support [which is TODO].
2105 ??? This isn't needed during const/copy propagation, but it's cheap to
2107 sbitmap_vector_zero (reg_set_in_block
, n_basic_blocks
);
2108 bzero ((char *) mem_set_in_block
, n_basic_blocks
);
2110 /* Some working arrays used to track first and last set in each block. */
2111 /* ??? One could use alloca here, but at some size a threshold is crossed
2112 beyond which one should use malloc. Are we at that threshold here? */
2113 reg_first_set
= (int *) gmalloc (max_gcse_regno
* sizeof (int));
2114 reg_last_set
= (int *) gmalloc (max_gcse_regno
* sizeof (int));
2116 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2120 int in_libcall_block
;
2123 /* First pass over the instructions records information used to
2124 determine when registers and memory are first and last set.
2125 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2126 could be moved to compute_sets since they currently don't change. */
2128 for (i
= 0; i
< max_gcse_regno
; i
++)
2129 reg_first_set
[i
] = reg_last_set
[i
] = NEVER_SET
;
2131 mem_first_set
= NEVER_SET
;
2132 mem_last_set
= NEVER_SET
;
2134 for (insn
= BLOCK_HEAD (bb
);
2135 insn
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
2136 insn
= NEXT_INSN (insn
))
2138 #ifdef NON_SAVING_SETJMP
2139 if (NON_SAVING_SETJMP
&& GET_CODE (insn
) == NOTE
2140 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_SETJMP
)
2142 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2143 record_last_reg_set_info (insn
, regno
);
2148 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
2151 if (GET_CODE (insn
) == CALL_INSN
)
2153 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2154 if ((call_used_regs
[regno
]
2155 && regno
!= STACK_POINTER_REGNUM
2156 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2157 && regno
!= HARD_FRAME_POINTER_REGNUM
2159 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2160 && ! (regno
== ARG_POINTER_REGNUM
&& fixed_regs
[regno
])
2162 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2163 && ! (regno
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2166 && regno
!= FRAME_POINTER_REGNUM
)
2167 || global_regs
[regno
])
2168 record_last_reg_set_info (insn
, regno
);
2170 if (! CONST_CALL_P (insn
))
2171 record_last_mem_set_info (insn
);
2174 note_stores (PATTERN (insn
), record_last_set_info
, insn
);
2177 /* The next pass builds the hash table. */
2179 for (insn
= BLOCK_HEAD (bb
), in_libcall_block
= 0;
2180 insn
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
2181 insn
= NEXT_INSN (insn
))
2182 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
2184 if (find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
2185 in_libcall_block
= 1;
2186 else if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
2187 in_libcall_block
= 0;
2188 hash_scan_insn (insn
, set_p
, in_libcall_block
);
2192 free (reg_first_set
);
2193 free (reg_last_set
);
2195 /* Catch bugs early. */
2196 reg_first_set
= reg_last_set
= 0;
2199 /* Allocate space for the set hash table.
2200 N_INSNS is the number of instructions in the function.
2201 It is used to determine the number of buckets to use. */
2204 alloc_set_hash_table (n_insns
)
2209 set_hash_table_size
= n_insns
/ 4;
2210 if (set_hash_table_size
< 11)
2211 set_hash_table_size
= 11;
2213 /* Attempt to maintain efficient use of hash table.
2214 Making it an odd number is simplest for now.
2215 ??? Later take some measurements. */
2216 set_hash_table_size
|= 1;
2217 n
= set_hash_table_size
* sizeof (struct expr
*);
2218 set_hash_table
= (struct expr
**) gmalloc (n
);
2221 /* Free things allocated by alloc_set_hash_table. */
2224 free_set_hash_table ()
2226 free (set_hash_table
);
2229 /* Compute the hash table for doing copy/const propagation. */
2232 compute_set_hash_table ()
2234 /* Initialize count of number of entries in hash table. */
2236 bzero ((char *) set_hash_table
,
2237 set_hash_table_size
* sizeof (struct expr
*));
2239 compute_hash_table (1);
2242 /* Allocate space for the expression hash table.
2243 N_INSNS is the number of instructions in the function.
2244 It is used to determine the number of buckets to use. */
2247 alloc_expr_hash_table (n_insns
)
2252 expr_hash_table_size
= n_insns
/ 2;
2253 /* Make sure the amount is usable. */
2254 if (expr_hash_table_size
< 11)
2255 expr_hash_table_size
= 11;
2257 /* Attempt to maintain efficient use of hash table.
2258 Making it an odd number is simplest for now.
2259 ??? Later take some measurements. */
2260 expr_hash_table_size
|= 1;
2261 n
= expr_hash_table_size
* sizeof (struct expr
*);
2262 expr_hash_table
= (struct expr
**) gmalloc (n
);
2265 /* Free things allocated by alloc_expr_hash_table. */
2268 free_expr_hash_table ()
2270 free (expr_hash_table
);
2273 /* Compute the hash table for doing GCSE. */
2276 compute_expr_hash_table ()
2278 /* Initialize count of number of entries in hash table. */
2280 bzero ((char *) expr_hash_table
,
2281 expr_hash_table_size
* sizeof (struct expr
*));
2283 compute_hash_table (0);
2286 /* Expression tracking support. */
2288 /* Lookup pattern PAT in the expression table.
2289 The result is a pointer to the table entry, or NULL if not found. */
2291 static struct expr
*
2295 int do_not_record_p
;
2296 unsigned int hash
= hash_expr (pat
, GET_MODE (pat
), &do_not_record_p
,
2297 expr_hash_table_size
);
2300 if (do_not_record_p
)
2303 expr
= expr_hash_table
[hash
];
2305 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2306 expr
= expr
->next_same_hash
;
2311 /* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2312 matches it, otherwise return the first entry for REGNO. The result is a
2313 pointer to the table entry, or NULL if not found. */
2315 static struct expr
*
2316 lookup_set (regno
, pat
)
2320 unsigned int hash
= hash_set (regno
, set_hash_table_size
);
2323 expr
= set_hash_table
[hash
];
2327 while (expr
&& ! expr_equiv_p (expr
->expr
, pat
))
2328 expr
= expr
->next_same_hash
;
2332 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
)
2333 expr
= expr
->next_same_hash
;
2339 /* Return the next entry for REGNO in list EXPR. */
2341 static struct expr
*
2342 next_set (regno
, expr
)
2347 expr
= expr
->next_same_hash
;
2348 while (expr
&& REGNO (SET_DEST (expr
->expr
)) != regno
);
2353 /* Reset tables used to keep track of what's still available [since the
2354 start of the block]. */
2357 reset_opr_set_tables ()
2359 /* Maintain a bitmap of which regs have been set since beginning of
2361 sbitmap_zero (reg_set_bitmap
);
2363 /* Also keep a record of the last instruction to modify memory.
2364 For now this is very trivial, we only record whether any memory
2365 location has been modified. */
2369 /* Return non-zero if the operands of X are not set before INSN in
2370 INSN's basic block. */
2373 oprs_not_set_p (x
, insn
)
2383 code
= GET_CODE (x
);
2398 if (mem_last_set
!= 0)
2401 return oprs_not_set_p (XEXP (x
, 0), insn
);
2404 return ! TEST_BIT (reg_set_bitmap
, REGNO (x
));
2410 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2414 /* If we are about to do the last recursive call
2415 needed at this level, change it into iteration.
2416 This function is called enough to be worth it. */
2418 return oprs_not_set_p (XEXP (x
, i
), insn
);
2420 if (! oprs_not_set_p (XEXP (x
, i
), insn
))
2423 else if (fmt
[i
] == 'E')
2424 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2425 if (! oprs_not_set_p (XVECEXP (x
, i
, j
), insn
))
2432 /* Mark things set by a CALL. */
2438 mem_last_set
= INSN_CUID (insn
);
2441 /* Mark things set by a SET. */
2444 mark_set (pat
, insn
)
2447 rtx dest
= SET_DEST (pat
);
2449 while (GET_CODE (dest
) == SUBREG
2450 || GET_CODE (dest
) == ZERO_EXTRACT
2451 || GET_CODE (dest
) == SIGN_EXTRACT
2452 || GET_CODE (dest
) == STRICT_LOW_PART
)
2453 dest
= XEXP (dest
, 0);
2455 if (GET_CODE (dest
) == REG
)
2456 SET_BIT (reg_set_bitmap
, REGNO (dest
));
2457 else if (GET_CODE (dest
) == MEM
)
2458 mem_last_set
= INSN_CUID (insn
);
2460 if (GET_CODE (SET_SRC (pat
)) == CALL
)
2464 /* Record things set by a CLOBBER. */
2467 mark_clobber (pat
, insn
)
2470 rtx clob
= XEXP (pat
, 0);
2472 while (GET_CODE (clob
) == SUBREG
|| GET_CODE (clob
) == STRICT_LOW_PART
)
2473 clob
= XEXP (clob
, 0);
2475 if (GET_CODE (clob
) == REG
)
2476 SET_BIT (reg_set_bitmap
, REGNO (clob
));
2478 mem_last_set
= INSN_CUID (insn
);
2481 /* Record things set by INSN.
2482 This data is used by oprs_not_set_p. */
2485 mark_oprs_set (insn
)
2488 rtx pat
= PATTERN (insn
);
2491 if (GET_CODE (pat
) == SET
)
2492 mark_set (pat
, insn
);
2493 else if (GET_CODE (pat
) == PARALLEL
)
2494 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
2496 rtx x
= XVECEXP (pat
, 0, i
);
2498 if (GET_CODE (x
) == SET
)
2500 else if (GET_CODE (x
) == CLOBBER
)
2501 mark_clobber (x
, insn
);
2502 else if (GET_CODE (x
) == CALL
)
2506 else if (GET_CODE (pat
) == CLOBBER
)
2507 mark_clobber (pat
, insn
);
2508 else if (GET_CODE (pat
) == CALL
)
2513 /* Classic GCSE reaching definition support. */
2515 /* Allocate reaching def variables. */
2518 alloc_rd_mem (n_blocks
, n_insns
)
2519 int n_blocks
, n_insns
;
2521 rd_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2522 sbitmap_vector_zero (rd_kill
, n_basic_blocks
);
2524 rd_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2525 sbitmap_vector_zero (rd_gen
, n_basic_blocks
);
2527 reaching_defs
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2528 sbitmap_vector_zero (reaching_defs
, n_basic_blocks
);
2530 rd_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_insns
);
2531 sbitmap_vector_zero (rd_out
, n_basic_blocks
);
2534 /* Free reaching def variables. */
2541 free (reaching_defs
);
2545 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2548 handle_rd_kill_set (insn
, regno
, bb
)
2552 struct reg_set
*this_reg
;
2554 for (this_reg
= reg_set_table
[regno
]; this_reg
; this_reg
= this_reg
->next
)
2555 if (BLOCK_NUM (this_reg
->insn
) != BLOCK_NUM (insn
))
2556 SET_BIT (rd_kill
[bb
], INSN_CUID (this_reg
->insn
));
2559 /* Compute the set of kill's for reaching definitions. */
2568 For each set bit in `gen' of the block (i.e each insn which
2569 generates a definition in the block)
2570 Call the reg set by the insn corresponding to that bit regx
2571 Look at the linked list starting at reg_set_table[regx]
2572 For each setting of regx in the linked list, which is not in
2574 Set the bit in `kill' corresponding to that insn. */
2575 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2576 for (cuid
= 0; cuid
< max_cuid
; cuid
++)
2577 if (TEST_BIT (rd_gen
[bb
], cuid
))
2579 rtx insn
= CUID_INSN (cuid
);
2580 rtx pat
= PATTERN (insn
);
2582 if (GET_CODE (insn
) == CALL_INSN
)
2584 for (regno
= 0; regno
< FIRST_PSEUDO_REGISTER
; regno
++)
2586 if ((call_used_regs
[regno
]
2587 && regno
!= STACK_POINTER_REGNUM
2588 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2589 && regno
!= HARD_FRAME_POINTER_REGNUM
2591 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2592 && ! (regno
== ARG_POINTER_REGNUM
2593 && fixed_regs
[regno
])
2595 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2596 && ! (regno
== PIC_OFFSET_TABLE_REGNUM
&& flag_pic
)
2598 && regno
!= FRAME_POINTER_REGNUM
)
2599 || global_regs
[regno
])
2600 handle_rd_kill_set (insn
, regno
, bb
);
2604 if (GET_CODE (pat
) == PARALLEL
)
2606 for (i
= XVECLEN (pat
, 0) - 1; i
>= 0; i
--)
2608 enum rtx_code code
= GET_CODE (XVECEXP (pat
, 0, i
));
2610 if ((code
== SET
|| code
== CLOBBER
)
2611 && GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0)) == REG
)
2612 handle_rd_kill_set (insn
,
2613 REGNO (XEXP (XVECEXP (pat
, 0, i
), 0)),
2617 else if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == REG
)
2618 /* Each setting of this register outside of this block
2619 must be marked in the set of kills in this block. */
2620 handle_rd_kill_set (insn
, REGNO (SET_DEST (pat
)), bb
);
2624 /* Compute the reaching definitions as in
2625 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2626 Chapter 10. It is the same algorithm as used for computing available
2627 expressions but applied to the gens and kills of reaching definitions. */
2632 int bb
, changed
, passes
;
2634 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2635 sbitmap_copy (rd_out
[bb
] /*dst*/, rd_gen
[bb
] /*src*/);
2642 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2644 sbitmap_union_of_preds (reaching_defs
[bb
], rd_out
, bb
);
2645 changed
|= sbitmap_union_of_diff (rd_out
[bb
], rd_gen
[bb
],
2646 reaching_defs
[bb
], rd_kill
[bb
]);
2652 fprintf (gcse_file
, "reaching def computation: %d passes\n", passes
);
2655 /* Classic GCSE available expression support. */
2657 /* Allocate memory for available expression computation. */
2660 alloc_avail_expr_mem (n_blocks
, n_exprs
)
2661 int n_blocks
, n_exprs
;
2663 ae_kill
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2664 sbitmap_vector_zero (ae_kill
, n_basic_blocks
);
2666 ae_gen
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2667 sbitmap_vector_zero (ae_gen
, n_basic_blocks
);
2669 ae_in
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2670 sbitmap_vector_zero (ae_in
, n_basic_blocks
);
2672 ae_out
= (sbitmap
*) sbitmap_vector_alloc (n_blocks
, n_exprs
);
2673 sbitmap_vector_zero (ae_out
, n_basic_blocks
);
2675 u_bitmap
= (sbitmap
) sbitmap_alloc (n_exprs
);
2676 sbitmap_ones (u_bitmap
);
2680 free_avail_expr_mem ()
2689 /* Compute the set of available expressions generated in each basic block. */
2698 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2699 This is all we have to do because an expression is not recorded if it
2700 is not available, and the only expressions we want to work with are the
2701 ones that are recorded. */
2702 for (i
= 0; i
< expr_hash_table_size
; i
++)
2703 for (expr
= expr_hash_table
[i
]; expr
!= 0; expr
= expr
->next_same_hash
)
2704 for (occr
= expr
->avail_occr
; occr
!= 0; occr
= occr
->next
)
2705 SET_BIT (ae_gen
[BLOCK_NUM (occr
->insn
)], expr
->bitmap_index
);
2708 /* Return non-zero if expression X is killed in BB. */
2711 expr_killed_p (x
, bb
)
2722 code
= GET_CODE (x
);
2726 return TEST_BIT (reg_set_in_block
[bb
], REGNO (x
));
2729 if (mem_set_in_block
[bb
])
2732 return expr_killed_p (XEXP (x
, 0), bb
);
2749 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
2753 /* If we are about to do the last recursive call
2754 needed at this level, change it into iteration.
2755 This function is called enough to be worth it. */
2757 return expr_killed_p (XEXP (x
, i
), bb
);
2758 else if (expr_killed_p (XEXP (x
, i
), bb
))
2761 else if (fmt
[i
] == 'E')
2762 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2763 if (expr_killed_p (XVECEXP (x
, i
, j
), bb
))
2770 /* Compute the set of available expressions killed in each basic block. */
2773 compute_ae_kill (ae_gen
, ae_kill
)
2774 sbitmap
*ae_gen
, *ae_kill
;
2779 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
2780 for (i
= 0; i
< expr_hash_table_size
; i
++)
2781 for (expr
= expr_hash_table
[i
]; expr
; expr
= expr
->next_same_hash
)
2783 /* Skip EXPR if generated in this block. */
2784 if (TEST_BIT (ae_gen
[bb
], expr
->bitmap_index
))
2787 if (expr_killed_p (expr
->expr
, bb
))
2788 SET_BIT (ae_kill
[bb
], expr
->bitmap_index
);
2792 /* Actually perform the Classic GCSE optimizations. */
2794 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2796 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2797 as a positive reach. We want to do this when there are two computations
2798 of the expression in the block.
2800 VISITED is a pointer to a working buffer for tracking which BB's have
2801 been visited. It is NULL for the top-level call.
2803 We treat reaching expressions that go through blocks containing the same
2804 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2805 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2806 2 as not reaching. The intent is to improve the probability of finding
2807 only one reaching expression and to reduce register lifetimes by picking
2808 the closest such expression. */
2811 expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
)
2815 int check_self_loop
;
2820 for (pred
= BASIC_BLOCK(bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
2822 int pred_bb
= pred
->src
->index
;
2824 if (visited
[pred_bb
])
2825 /* This predecessor has already been visited. Nothing to do. */
2827 else if (pred_bb
== bb
)
2829 /* BB loops on itself. */
2831 && TEST_BIT (ae_gen
[pred_bb
], expr
->bitmap_index
)
2832 && BLOCK_NUM (occr
->insn
) == pred_bb
)
2835 visited
[pred_bb
] = 1;
2838 /* Ignore this predecessor if it kills the expression. */
2839 else if (TEST_BIT (ae_kill
[pred_bb
], expr
->bitmap_index
))
2840 visited
[pred_bb
] = 1;
2842 /* Does this predecessor generate this expression? */
2843 else if (TEST_BIT (ae_gen
[pred_bb
], expr
->bitmap_index
))
2845 /* Is this the occurrence we're looking for?
2846 Note that there's only one generating occurrence per block
2847 so we just need to check the block number. */
2848 if (BLOCK_NUM (occr
->insn
) == pred_bb
)
2851 visited
[pred_bb
] = 1;
2854 /* Neither gen nor kill. */
2857 visited
[pred_bb
] = 1;
2858 if (expr_reaches_here_p_work (occr
, expr
, pred_bb
, check_self_loop
,
2865 /* All paths have been checked. */
2869 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
2870 memory allocated for that function is returned. */
2873 expr_reaches_here_p (occr
, expr
, bb
, check_self_loop
)
2877 int check_self_loop
;
2880 char *visited
= (char *) xcalloc (n_basic_blocks
, 1);
2882 rval
= expr_reaches_here_p_work (occr
, expr
, bb
, check_self_loop
, visited
);
2888 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2889 If there is more than one such instruction, return NULL.
2891 Called only by handle_avail_expr. */
2894 computing_insn (expr
, insn
)
2898 int bb
= BLOCK_NUM (insn
);
2900 if (expr
->avail_occr
->next
== NULL
)
2902 if (BLOCK_NUM (expr
->avail_occr
->insn
) == bb
)
2903 /* The available expression is actually itself
2904 (i.e. a loop in the flow graph) so do nothing. */
2907 /* (FIXME) Case that we found a pattern that was created by
2908 a substitution that took place. */
2909 return expr
->avail_occr
->insn
;
2913 /* Pattern is computed more than once.
2914 Search backwards from this insn to see how many of these
2915 computations actually reach this insn. */
2917 rtx insn_computes_expr
= NULL
;
2920 for (occr
= expr
->avail_occr
; occr
!= NULL
; occr
= occr
->next
)
2922 if (BLOCK_NUM (occr
->insn
) == bb
)
2924 /* The expression is generated in this block.
2925 The only time we care about this is when the expression
2926 is generated later in the block [and thus there's a loop].
2927 We let the normal cse pass handle the other cases. */
2928 if (INSN_CUID (insn
) < INSN_CUID (occr
->insn
)
2929 && expr_reaches_here_p (occr
, expr
, bb
, 1))
2935 insn_computes_expr
= occr
->insn
;
2938 else if (expr_reaches_here_p (occr
, expr
, bb
, 0))
2944 insn_computes_expr
= occr
->insn
;
2948 if (insn_computes_expr
== NULL
)
2951 return insn_computes_expr
;
2955 /* Return non-zero if the definition in DEF_INSN can reach INSN.
2956 Only called by can_disregard_other_sets. */
2959 def_reaches_here_p (insn
, def_insn
)
2964 if (TEST_BIT (reaching_defs
[BLOCK_NUM (insn
)], INSN_CUID (def_insn
)))
2967 if (BLOCK_NUM (insn
) == BLOCK_NUM (def_insn
))
2969 if (INSN_CUID (def_insn
) < INSN_CUID (insn
))
2971 if (GET_CODE (PATTERN (def_insn
)) == PARALLEL
)
2973 else if (GET_CODE (PATTERN (def_insn
)) == CLOBBER
)
2974 reg
= XEXP (PATTERN (def_insn
), 0);
2975 else if (GET_CODE (PATTERN (def_insn
)) == SET
)
2976 reg
= SET_DEST (PATTERN (def_insn
));
2980 return ! reg_set_between_p (reg
, NEXT_INSN (def_insn
), insn
);
2989 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
2990 value returned is the number of definitions that reach INSN. Returning a
2991 value of zero means that [maybe] more than one definition reaches INSN and
2992 the caller can't perform whatever optimization it is trying. i.e. it is
2993 always safe to return zero. */
2996 can_disregard_other_sets (addr_this_reg
, insn
, for_combine
)
2997 struct reg_set
**addr_this_reg
;
3001 int number_of_reaching_defs
= 0;
3002 struct reg_set
*this_reg
;
3004 for (this_reg
= *addr_this_reg
; this_reg
!= 0; this_reg
= this_reg
->next
)
3005 if (def_reaches_here_p (insn
, this_reg
->insn
))
3007 number_of_reaching_defs
++;
3008 /* Ignore parallels for now. */
3009 if (GET_CODE (PATTERN (this_reg
->insn
)) == PARALLEL
)
3013 && (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
3014 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3015 SET_SRC (PATTERN (insn
)))))
3016 /* A setting of the reg to a different value reaches INSN. */
3019 if (number_of_reaching_defs
> 1)
3021 /* If in this setting the value the register is being set to is
3022 equal to the previous value the register was set to and this
3023 setting reaches the insn we are trying to do the substitution
3024 on then we are ok. */
3025 if (GET_CODE (PATTERN (this_reg
->insn
)) == CLOBBER
)
3027 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg
->insn
)),
3028 SET_SRC (PATTERN (insn
))))
3032 *addr_this_reg
= this_reg
;
3035 return number_of_reaching_defs
;
3038 /* Expression computed by insn is available and the substitution is legal,
3039 so try to perform the substitution.
3041 The result is non-zero if any changes were made. */
3044 handle_avail_expr (insn
, expr
)
3048 rtx pat
, insn_computes_expr
;
3050 struct reg_set
*this_reg
;
3051 int found_setting
, use_src
;
3054 /* We only handle the case where one computation of the expression
3055 reaches this instruction. */
3056 insn_computes_expr
= computing_insn (expr
, insn
);
3057 if (insn_computes_expr
== NULL
)
3063 /* At this point we know only one computation of EXPR outside of this
3064 block reaches this insn. Now try to find a register that the
3065 expression is computed into. */
3066 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr
))) == REG
)
3068 /* This is the case when the available expression that reaches
3069 here has already been handled as an available expression. */
3070 unsigned int regnum_for_replacing
3071 = REGNO (SET_SRC (PATTERN (insn_computes_expr
)));
3073 /* If the register was created by GCSE we can't use `reg_set_table',
3074 however we know it's set only once. */
3075 if (regnum_for_replacing
>= max_gcse_regno
3076 /* If the register the expression is computed into is set only once,
3077 or only one set reaches this insn, we can use it. */
3078 || (((this_reg
= reg_set_table
[regnum_for_replacing
]),
3079 this_reg
->next
== NULL
)
3080 || can_disregard_other_sets (&this_reg
, insn
, 0)))
3089 unsigned int regnum_for_replacing
3090 = REGNO (SET_DEST (PATTERN (insn_computes_expr
)));
3092 /* This shouldn't happen. */
3093 if (regnum_for_replacing
>= max_gcse_regno
)
3096 this_reg
= reg_set_table
[regnum_for_replacing
];
3098 /* If the register the expression is computed into is set only once,
3099 or only one set reaches this insn, use it. */
3100 if (this_reg
->next
== NULL
3101 || can_disregard_other_sets (&this_reg
, insn
, 0))
3107 pat
= PATTERN (insn
);
3109 to
= SET_SRC (PATTERN (insn_computes_expr
));
3111 to
= SET_DEST (PATTERN (insn_computes_expr
));
3112 changed
= validate_change (insn
, &SET_SRC (pat
), to
, 0);
3114 /* We should be able to ignore the return code from validate_change but
3115 to play it safe we check. */
3119 if (gcse_file
!= NULL
)
3121 fprintf (gcse_file
, "GCSE: Replacing the source in insn %d with",
3123 fprintf (gcse_file
, " reg %d %s insn %d\n",
3124 REGNO (to
), use_src
? "from" : "set in",
3125 INSN_UID (insn_computes_expr
));
3130 /* The register that the expr is computed into is set more than once. */
3131 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3133 /* Insert an insn after insnx that copies the reg set in insnx
3134 into a new pseudo register call this new register REGN.
3135 From insnb until end of basic block or until REGB is set
3136 replace all uses of REGB with REGN. */
3139 to
= gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr
))));
3141 /* Generate the new insn. */
3142 /* ??? If the change fails, we return 0, even though we created
3143 an insn. I think this is ok. */
3145 = emit_insn_after (gen_rtx_SET (VOIDmode
, to
,
3147 (insn_computes_expr
))),
3148 insn_computes_expr
);
3150 /* Keep block number table up to date. */
3151 set_block_num (new_insn
, BLOCK_NUM (insn_computes_expr
));
3153 /* Keep register set table up to date. */
3154 record_one_set (REGNO (to
), new_insn
);
3156 gcse_create_count
++;
3157 if (gcse_file
!= NULL
)
3159 fprintf (gcse_file
, "GCSE: Creating insn %d to copy value of reg %d",
3160 INSN_UID (NEXT_INSN (insn_computes_expr
)),
3161 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr
)))));
3162 fprintf (gcse_file
, ", computed in insn %d,\n",
3163 INSN_UID (insn_computes_expr
));
3164 fprintf (gcse_file
, " into newly allocated reg %d\n",
3168 pat
= PATTERN (insn
);
3170 /* Do register replacement for INSN. */
3171 changed
= validate_change (insn
, &SET_SRC (pat
),
3173 (NEXT_INSN (insn_computes_expr
))),
3176 /* We should be able to ignore the return code from validate_change but
3177 to play it safe we check. */
3181 if (gcse_file
!= NULL
)
3184 "GCSE: Replacing the source in insn %d with reg %d ",
3186 REGNO (SET_DEST (PATTERN (NEXT_INSN
3187 (insn_computes_expr
)))));
3188 fprintf (gcse_file
, "set in insn %d\n",
3189 INSN_UID (insn_computes_expr
));
3197 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3198 the dataflow analysis has been done.
3200 The result is non-zero if a change was made. */
3208 /* Note we start at block 1. */
3211 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
3213 /* Reset tables used to keep track of what's still valid [since the
3214 start of the block]. */
3215 reset_opr_set_tables ();
3217 for (insn
= BLOCK_HEAD (bb
);
3218 insn
!= NULL
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
3219 insn
= NEXT_INSN (insn
))
3221 /* Is insn of form (set (pseudo-reg) ...)? */
3222 if (GET_CODE (insn
) == INSN
3223 && GET_CODE (PATTERN (insn
)) == SET
3224 && GET_CODE (SET_DEST (PATTERN (insn
))) == REG
3225 && REGNO (SET_DEST (PATTERN (insn
))) >= FIRST_PSEUDO_REGISTER
)
3227 rtx pat
= PATTERN (insn
);
3228 rtx src
= SET_SRC (pat
);
3231 if (want_to_gcse_p (src
)
3232 /* Is the expression recorded? */
3233 && ((expr
= lookup_expr (src
)) != NULL
)
3234 /* Is the expression available [at the start of the
3236 && TEST_BIT (ae_in
[bb
], expr
->bitmap_index
)
3237 /* Are the operands unchanged since the start of the
3239 && oprs_not_set_p (src
, insn
))
3240 changed
|= handle_avail_expr (insn
, expr
);
3243 /* Keep track of everything modified by this insn. */
3244 /* ??? Need to be careful w.r.t. mods done to INSN. */
3245 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3246 mark_oprs_set (insn
);
3253 /* Top level routine to perform one classic GCSE pass.
3255 Return non-zero if a change was made. */
3258 one_classic_gcse_pass (pass
)
3263 gcse_subst_count
= 0;
3264 gcse_create_count
= 0;
3266 alloc_expr_hash_table (max_cuid
);
3267 alloc_rd_mem (n_basic_blocks
, max_cuid
);
3268 compute_expr_hash_table ();
3270 dump_hash_table (gcse_file
, "Expression", expr_hash_table
,
3271 expr_hash_table_size
, n_exprs
);
3277 alloc_avail_expr_mem (n_basic_blocks
, n_exprs
);
3279 compute_ae_kill (ae_gen
, ae_kill
);
3280 compute_available (ae_gen
, ae_kill
, ae_out
, ae_in
);
3281 changed
= classic_gcse ();
3282 free_avail_expr_mem ();
3286 free_expr_hash_table ();
3290 fprintf (gcse_file
, "\n");
3291 fprintf (gcse_file
, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3292 current_function_name
, pass
, bytes_used
, gcse_subst_count
);
3293 fprintf (gcse_file
, "%d insns created\n", gcse_create_count
);
3299 /* Compute copy/constant propagation working variables. */
3301 /* Local properties of assignments. */
3302 static sbitmap
*cprop_pavloc
;
3303 static sbitmap
*cprop_absaltered
;
3305 /* Global properties of assignments (computed from the local properties). */
3306 static sbitmap
*cprop_avin
;
3307 static sbitmap
*cprop_avout
;
3309 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3310 basic blocks. N_SETS is the number of sets. */
3313 alloc_cprop_mem (n_blocks
, n_sets
)
3314 int n_blocks
, n_sets
;
3316 cprop_pavloc
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3317 cprop_absaltered
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3319 cprop_avin
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3320 cprop_avout
= sbitmap_vector_alloc (n_blocks
, n_sets
);
3323 /* Free vars used by copy/const propagation. */
3328 free (cprop_pavloc
);
3329 free (cprop_absaltered
);
3334 /* For each block, compute whether X is transparent. X is either an
3335 expression or an assignment [though we don't care which, for this context
3336 an assignment is treated as an expression]. For each block where an
3337 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3341 compute_transp (x
, indx
, bmap
, set_p
)
3352 /* repeat is used to turn tail-recursion into iteration since GCC
3353 can't do it when there's no return value. */
3359 code
= GET_CODE (x
);
3365 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3367 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3368 if (TEST_BIT (reg_set_in_block
[bb
], REGNO (x
)))
3369 SET_BIT (bmap
[bb
], indx
);
3373 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3374 SET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3379 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3381 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3382 if (TEST_BIT (reg_set_in_block
[bb
], REGNO (x
)))
3383 RESET_BIT (bmap
[bb
], indx
);
3387 for (r
= reg_set_table
[REGNO (x
)]; r
!= NULL
; r
= r
->next
)
3388 RESET_BIT (bmap
[BLOCK_NUM (r
->insn
)], indx
);
3397 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3398 if (mem_set_in_block
[bb
])
3399 SET_BIT (bmap
[bb
], indx
);
3403 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
3404 if (mem_set_in_block
[bb
])
3405 RESET_BIT (bmap
[bb
], indx
);
3426 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3430 /* If we are about to do the last recursive call
3431 needed at this level, change it into iteration.
3432 This function is called enough to be worth it. */
3439 compute_transp (XEXP (x
, i
), indx
, bmap
, set_p
);
3441 else if (fmt
[i
] == 'E')
3442 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3443 compute_transp (XVECEXP (x
, i
, j
), indx
, bmap
, set_p
);
3447 /* Top level routine to do the dataflow analysis needed by copy/const
3451 compute_cprop_data ()
3453 compute_local_properties (cprop_absaltered
, cprop_pavloc
, NULL
, 1);
3454 compute_available (cprop_pavloc
, cprop_absaltered
,
3455 cprop_avout
, cprop_avin
);
3458 /* Copy/constant propagation. */
3460 /* Maximum number of register uses in an insn that we handle. */
3463 /* Table of uses found in an insn.
3464 Allocated statically to avoid alloc/free complexity and overhead. */
3465 static struct reg_use reg_use_table
[MAX_USES
];
3467 /* Index into `reg_use_table' while building it. */
3468 static int reg_use_count
;
3470 /* Set up a list of register numbers used in INSN. The found uses are stored
3471 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3472 and contains the number of uses in the table upon exit.
3474 ??? If a register appears multiple times we will record it multiple times.
3475 This doesn't hurt anything but it will slow things down. */
3485 /* repeat is used to turn tail-recursion into iteration since GCC
3486 can't do it when there's no return value. */
3492 code
= GET_CODE (x
);
3496 if (reg_use_count
== MAX_USES
)
3499 reg_use_table
[reg_use_count
].reg_rtx
= x
;
3517 case ASM_INPUT
: /*FIXME*/
3521 if (GET_CODE (SET_DEST (x
)) == MEM
)
3522 find_used_regs (SET_DEST (x
));
3530 /* Recursively scan the operands of this expression. */
3532 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
3536 /* If we are about to do the last recursive call
3537 needed at this level, change it into iteration.
3538 This function is called enough to be worth it. */
3545 find_used_regs (XEXP (x
, i
));
3547 else if (fmt
[i
] == 'E')
3548 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3549 find_used_regs (XVECEXP (x
, i
, j
));
3553 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3554 Returns non-zero is successful. */
3557 try_replace_reg (from
, to
, insn
)
3565 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
3568 note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
3570 /* If this fails we could try to simplify the result of the
3571 replacement and attempt to recognize the simplified insn.
3573 But we need a general simplify_rtx that doesn't have pass
3574 specific state variables. I'm not aware of one at the moment. */
3576 success
= validate_replace_src (from
, to
, insn
);
3577 set
= single_set (insn
);
3579 /* We've failed to do replacement. Try to add REG_EQUAL note to not loose
3581 if (!success
&& !note
)
3586 note
= REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_EQUAL
,
3587 copy_rtx (SET_SRC (set
)),
3591 /* Always do the replacement in REQ_EQUAL and REG_EQUIV notes. Also
3592 try to simplify them. */
3597 src
= XEXP (note
, 0);
3598 replace_rtx (src
, from
, to
);
3600 /* Try to simplify resulting note. */
3601 simplified
= simplify_rtx (src
);
3605 XEXP (note
, 0) = src
;
3608 /* REG_EQUAL may get simplified into register.
3609 We don't allow that. Remove that note. This code ought
3610 not to hapen, because previous code ought to syntetize
3611 reg-reg move, but be on the safe side. */
3612 else if (REG_P (src
))
3613 remove_note (insn
, note
);
3618 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3619 NULL no such set is found. */
3621 static struct expr
*
3622 find_avail_set (regno
, insn
)
3626 /* SET1 contains the last set found that can be returned to the caller for
3627 use in a substitution. */
3628 struct expr
*set1
= 0;
3630 /* Loops are not possible here. To get a loop we would need two sets
3631 available at the start of the block containing INSN. ie we would
3632 need two sets like this available at the start of the block:
3634 (set (reg X) (reg Y))
3635 (set (reg Y) (reg X))
3637 This can not happen since the set of (reg Y) would have killed the
3638 set of (reg X) making it unavailable at the start of this block. */
3642 struct expr
*set
= lookup_set (regno
, NULL_RTX
);
3644 /* Find a set that is available at the start of the block
3645 which contains INSN. */
3648 if (TEST_BIT (cprop_avin
[BLOCK_NUM (insn
)], set
->bitmap_index
))
3650 set
= next_set (regno
, set
);
3653 /* If no available set was found we've reached the end of the
3654 (possibly empty) copy chain. */
3658 if (GET_CODE (set
->expr
) != SET
)
3661 src
= SET_SRC (set
->expr
);
3663 /* We know the set is available.
3664 Now check that SRC is ANTLOC (i.e. none of the source operands
3665 have changed since the start of the block).
3667 If the source operand changed, we may still use it for the next
3668 iteration of this loop, but we may not use it for substitutions. */
3670 if (CONSTANT_P (src
) || oprs_not_set_p (src
, insn
))
3673 /* If the source of the set is anything except a register, then
3674 we have reached the end of the copy chain. */
3675 if (GET_CODE (src
) != REG
)
3678 /* Follow the copy chain, ie start another iteration of the loop
3679 and see if we have an available copy into SRC. */
3680 regno
= REGNO (src
);
3683 /* SET1 holds the last set that was available and anticipatable at
3688 /* Subroutine of cprop_insn that tries to propagate constants into
3689 JUMP_INSNS. INSN must be a conditional jump; COPY is a copy of it
3690 that we can use for substitutions.
3691 REG_USED is the use we will try to replace, SRC is the constant we
3692 will try to substitute for it.
3693 Returns nonzero if a change was made. */
3696 cprop_jump (insn
, copy
, reg_used
, src
)
3698 struct reg_use
*reg_used
;
3701 rtx set
= PATTERN (copy
);
3704 /* Replace the register with the appropriate constant. */
3705 replace_rtx (SET_SRC (set
), reg_used
->reg_rtx
, src
);
3707 temp
= simplify_ternary_operation (GET_CODE (SET_SRC (set
)),
3708 GET_MODE (SET_SRC (set
)),
3709 GET_MODE (XEXP (SET_SRC (set
), 0)),
3710 XEXP (SET_SRC (set
), 0),
3711 XEXP (SET_SRC (set
), 1),
3712 XEXP (SET_SRC (set
), 2));
3714 /* If no simplification can be made, then try the next
3719 SET_SRC (set
) = temp
;
3721 /* That may have changed the structure of TEMP, so
3722 force it to be rerecognized if it has not turned
3723 into a nop or unconditional jump. */
3725 INSN_CODE (copy
) = -1;
3726 if ((SET_DEST (set
) == pc_rtx
3727 && (SET_SRC (set
) == pc_rtx
3728 || GET_CODE (SET_SRC (set
)) == LABEL_REF
))
3729 || recog (PATTERN (copy
), copy
, NULL
) >= 0)
3731 /* This has either become an unconditional jump
3732 or a nop-jump. We'd like to delete nop jumps
3733 here, but doing so confuses gcse. So we just
3734 make the replacement and let later passes
3736 PATTERN (insn
) = set
;
3737 INSN_CODE (insn
) = -1;
3739 /* One less use of the label this insn used to jump to
3740 if we turned this into a NOP jump. */
3741 if (SET_SRC (set
) == pc_rtx
&& JUMP_LABEL (insn
) != 0)
3742 --LABEL_NUSES (JUMP_LABEL (insn
));
3744 /* If this has turned into an unconditional jump,
3745 then put a barrier after it so that the unreachable
3746 code will be deleted. */
3747 if (GET_CODE (SET_SRC (set
)) == LABEL_REF
)
3748 emit_barrier_after (insn
);
3750 run_jump_opt_after_gcse
= 1;
3753 if (gcse_file
!= NULL
)
3756 "CONST-PROP: Replacing reg %d in insn %d with constant ",
3757 REGNO (reg_used
->reg_rtx
), INSN_UID (insn
));
3758 print_rtl (gcse_file
, src
);
3759 fprintf (gcse_file
, "\n");
3769 /* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
3770 for machines that have CC0. INSN is a single set that stores into CC0;
3771 the insn following it is a conditional jump. REG_USED is the use we will
3772 try to replace, SRC is the constant we will try to substitute for it.
3773 Returns nonzero if a change was made. */
3776 cprop_cc0_jump (insn
, reg_used
, src
)
3778 struct reg_use
*reg_used
;
3781 rtx jump
= NEXT_INSN (insn
);
3782 rtx copy
= copy_rtx (jump
);
3783 rtx set
= PATTERN (copy
);
3785 /* We need to copy the source of the cc0 setter, as cprop_jump is going to
3786 substitute into it. */
3787 replace_rtx (SET_SRC (set
), cc0_rtx
, copy_rtx (SET_SRC (PATTERN (insn
))));
3788 if (! cprop_jump (jump
, copy
, reg_used
, src
))
3791 /* If we succeeded, delete the cc0 setter. */
3792 PUT_CODE (insn
, NOTE
);
3793 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED
;
3794 NOTE_SOURCE_FILE (insn
) = 0;
3799 /* Perform constant and copy propagation on INSN.
3800 The result is non-zero if a change was made. */
3803 cprop_insn (insn
, alter_jumps
)
3807 struct reg_use
*reg_used
;
3811 /* Only propagate into SETs. Note that a conditional jump is a
3812 SET with pc_rtx as the destination. */
3813 if ((GET_CODE (insn
) != INSN
3814 && GET_CODE (insn
) != JUMP_INSN
)
3815 || GET_CODE (PATTERN (insn
)) != SET
)
3819 find_used_regs (PATTERN (insn
));
3821 note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
);
3823 note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
3825 /* We may win even when propagating constants into notes. */
3827 find_used_regs (XEXP (note
, 0));
3829 for (reg_used
= ®_use_table
[0]; reg_use_count
> 0;
3830 reg_used
++, reg_use_count
--)
3832 unsigned int regno
= REGNO (reg_used
->reg_rtx
);
3836 /* Ignore registers created by GCSE.
3837 We do this because ... */
3838 if (regno
>= max_gcse_regno
)
3841 /* If the register has already been set in this block, there's
3842 nothing we can do. */
3843 if (! oprs_not_set_p (reg_used
->reg_rtx
, insn
))
3846 /* Find an assignment that sets reg_used and is available
3847 at the start of the block. */
3848 set
= find_avail_set (regno
, insn
);
3853 /* ??? We might be able to handle PARALLELs. Later. */
3854 if (GET_CODE (pat
) != SET
)
3857 src
= SET_SRC (pat
);
3859 /* Constant propagation. */
3860 if (GET_CODE (src
) == CONST_INT
|| GET_CODE (src
) == CONST_DOUBLE
3861 || GET_CODE (src
) == SYMBOL_REF
)
3863 /* Handle normal insns first. */
3864 if (GET_CODE (insn
) == INSN
3865 && try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
3869 if (gcse_file
!= NULL
)
3871 fprintf (gcse_file
, "CONST-PROP: Replacing reg %d in ",
3873 fprintf (gcse_file
, "insn %d with constant ",
3875 print_rtl (gcse_file
, src
);
3876 fprintf (gcse_file
, "\n");
3879 /* The original insn setting reg_used may or may not now be
3880 deletable. We leave the deletion to flow. */
3883 /* Try to propagate a CONST_INT into a conditional jump.
3884 We're pretty specific about what we will handle in this
3885 code, we can extend this as necessary over time.
3887 Right now the insn in question must look like
3888 (set (pc) (if_then_else ...)) */
3889 else if (alter_jumps
3890 && GET_CODE (insn
) == JUMP_INSN
3891 && condjump_p (insn
)
3892 && ! simplejump_p (insn
))
3893 changed
|= cprop_jump (insn
, copy_rtx (insn
), reg_used
, src
);
3895 /* Similar code for machines that use a pair of CC0 setter and
3896 conditional jump insn. */
3897 else if (alter_jumps
3898 && GET_CODE (PATTERN (insn
)) == SET
3899 && SET_DEST (PATTERN (insn
)) == cc0_rtx
3900 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
3901 && condjump_p (NEXT_INSN (insn
))
3902 && ! simplejump_p (NEXT_INSN (insn
)))
3903 changed
|= cprop_cc0_jump (insn
, reg_used
, src
);
3906 else if (GET_CODE (src
) == REG
3907 && REGNO (src
) >= FIRST_PSEUDO_REGISTER
3908 && REGNO (src
) != regno
)
3910 if (try_replace_reg (reg_used
->reg_rtx
, src
, insn
))
3914 if (gcse_file
!= NULL
)
3916 fprintf (gcse_file
, "COPY-PROP: Replacing reg %d in insn %d",
3917 regno
, INSN_UID (insn
));
3918 fprintf (gcse_file
, " with reg %d\n", REGNO (src
));
3921 /* The original insn setting reg_used may or may not now be
3922 deletable. We leave the deletion to flow. */
3923 /* FIXME: If it turns out that the insn isn't deletable,
3924 then we may have unnecessarily extended register lifetimes
3925 and made things worse. */
3933 /* Forward propagate copies. This includes copies and constants. Return
3934 non-zero if a change was made. */
3943 /* Note we start at block 1. */
3946 for (bb
= 1; bb
< n_basic_blocks
; bb
++)
3948 /* Reset tables used to keep track of what's still valid [since the
3949 start of the block]. */
3950 reset_opr_set_tables ();
3952 for (insn
= BLOCK_HEAD (bb
);
3953 insn
!= NULL
&& insn
!= NEXT_INSN (BLOCK_END (bb
));
3954 insn
= NEXT_INSN (insn
))
3956 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
3958 changed
|= cprop_insn (insn
, alter_jumps
);
3960 /* Keep track of everything modified by this insn. */
3961 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
3962 call mark_oprs_set if we turned the insn into a NOTE. */
3963 if (GET_CODE (insn
) != NOTE
)
3964 mark_oprs_set (insn
);
3969 if (gcse_file
!= NULL
)
3970 fprintf (gcse_file
, "\n");
3975 /* Perform one copy/constant propagation pass.
3976 F is the first insn in the function.
3977 PASS is the pass count. */
3980 one_cprop_pass (pass
, alter_jumps
)
3986 const_prop_count
= 0;
3987 copy_prop_count
= 0;
3989 alloc_set_hash_table (max_cuid
);
3990 compute_set_hash_table ();
3992 dump_hash_table (gcse_file
, "SET", set_hash_table
, set_hash_table_size
,
3996 alloc_cprop_mem (n_basic_blocks
, n_sets
);
3997 compute_cprop_data ();
3998 changed
= cprop (alter_jumps
);
4002 free_set_hash_table ();
4006 fprintf (gcse_file
, "CPROP of %s, pass %d: %d bytes needed, ",
4007 current_function_name
, pass
, bytes_used
);
4008 fprintf (gcse_file
, "%d const props, %d copy props\n\n",
4009 const_prop_count
, copy_prop_count
);
4015 /* Compute PRE+LCM working variables. */
4017 /* Local properties of expressions. */
4018 /* Nonzero for expressions that are transparent in the block. */
4019 static sbitmap
*transp
;
4021 /* Nonzero for expressions that are transparent at the end of the block.
4022 This is only zero for expressions killed by abnormal critical edge
4023 created by a calls. */
4024 static sbitmap
*transpout
;
4026 /* Nonzero for expressions that are computed (available) in the block. */
4027 static sbitmap
*comp
;
4029 /* Nonzero for expressions that are locally anticipatable in the block. */
4030 static sbitmap
*antloc
;
4032 /* Nonzero for expressions where this block is an optimal computation
4034 static sbitmap
*pre_optimal
;
4036 /* Nonzero for expressions which are redundant in a particular block. */
4037 static sbitmap
*pre_redundant
;
4039 /* Nonzero for expressions which should be inserted on a specific edge. */
4040 static sbitmap
*pre_insert_map
;
4042 /* Nonzero for expressions which should be deleted in a specific block. */
4043 static sbitmap
*pre_delete_map
;
4045 /* Contains the edge_list returned by pre_edge_lcm. */
4046 static struct edge_list
*edge_list
;
4048 static sbitmap
*temp_bitmap
;
4050 /* Redundant insns. */
4051 static sbitmap pre_redundant_insns
;
4053 /* Allocate vars used for PRE analysis. */
4056 alloc_pre_mem (n_blocks
, n_exprs
)
4057 int n_blocks
, n_exprs
;
4059 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4060 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4061 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4062 temp_bitmap
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4065 pre_redundant
= NULL
;
4066 pre_insert_map
= NULL
;
4067 pre_delete_map
= NULL
;
4071 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4072 ae_kill
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
4074 /* pre_insert and pre_delete are allocated later. */
4077 /* Free vars used for PRE analysis. */
4090 free (pre_redundant
);
4092 free (pre_insert_map
);
4094 free (pre_delete_map
);
4107 transp
= comp
= antloc
= NULL
;
4108 pre_optimal
= pre_redundant
= pre_insert_map
= pre_delete_map
= NULL
;
4109 transpout
= ae_in
= ae_out
= ae_kill
= NULL
;
4114 /* Top level routine to do the dataflow analysis needed by PRE. */
4119 compute_local_properties (transp
, comp
, antloc
, 0);
4120 compute_transpout ();
4121 sbitmap_vector_zero (ae_kill
, n_basic_blocks
);
4122 compute_ae_kill (comp
, ae_kill
);
4123 edge_list
= pre_edge_lcm (gcse_file
, n_exprs
, transp
, comp
, antloc
,
4124 ae_kill
, &pre_insert_map
, &pre_delete_map
);
4129 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4132 VISITED is a pointer to a working buffer for tracking which BB's have
4133 been visited. It is NULL for the top-level call.
4135 We treat reaching expressions that go through blocks containing the same
4136 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4137 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4138 2 as not reaching. The intent is to improve the probability of finding
4139 only one reaching expression and to reduce register lifetimes by picking
4140 the closest such expression. */
4143 pre_expr_reaches_here_p_work (occr_bb
, expr
, bb
, visited
)
4151 for (pred
= BASIC_BLOCK (bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
4153 int pred_bb
= pred
->src
->index
;
4155 if (pred
->src
== ENTRY_BLOCK_PTR
4156 /* Has predecessor has already been visited? */
4157 || visited
[pred_bb
])
4158 ;/* Nothing to do. */
4160 /* Does this predecessor generate this expression? */
4161 else if (TEST_BIT (comp
[pred_bb
], expr
->bitmap_index
))
4163 /* Is this the occurrence we're looking for?
4164 Note that there's only one generating occurrence per block
4165 so we just need to check the block number. */
4166 if (occr_bb
== pred_bb
)
4169 visited
[pred_bb
] = 1;
4171 /* Ignore this predecessor if it kills the expression. */
4172 else if (! TEST_BIT (transp
[pred_bb
], expr
->bitmap_index
))
4173 visited
[pred_bb
] = 1;
4175 /* Neither gen nor kill. */
4178 visited
[pred_bb
] = 1;
4179 if (pre_expr_reaches_here_p_work (occr_bb
, expr
, pred_bb
, visited
))
4184 /* All paths have been checked. */
4188 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4189 memory allocated for that function is returned. */
4192 pre_expr_reaches_here_p (occr_bb
, expr
, bb
)
4198 char *visited
= (char *) xcalloc (n_basic_blocks
, 1);
4200 rval
= pre_expr_reaches_here_p_work(occr_bb
, expr
, bb
, visited
);
4207 /* Given an expr, generate RTL which we can insert at the end of a BB,
4208 or on an edge. Set the block number of any insns generated to
4212 process_insert_insn (expr
)
4215 rtx reg
= expr
->reaching_reg
;
4216 rtx pat
, copied_expr
;
4220 copied_expr
= copy_rtx (expr
->expr
);
4221 emit_move_insn (reg
, copied_expr
);
4222 first_new_insn
= get_insns ();
4223 pat
= gen_sequence ();
4229 /* Add EXPR to the end of basic block BB.
4231 This is used by both the PRE and code hoisting.
4233 For PRE, we want to verify that the expr is either transparent
4234 or locally anticipatable in the target block. This check makes
4235 no sense for code hoisting. */
4238 insert_insn_end_bb (expr
, bb
, pre
)
4243 rtx insn
= BLOCK_END (bb
);
4245 rtx reg
= expr
->reaching_reg
;
4246 int regno
= REGNO (reg
);
4250 pat
= process_insert_insn (expr
);
4252 /* If the last insn is a jump, insert EXPR in front [taking care to
4253 handle cc0, etc. properly]. */
4255 if (GET_CODE (insn
) == JUMP_INSN
)
4261 /* If this is a jump table, then we can't insert stuff here. Since
4262 we know the previous real insn must be the tablejump, we insert
4263 the new instruction just before the tablejump. */
4264 if (GET_CODE (PATTERN (insn
)) == ADDR_VEC
4265 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
)
4266 insn
= prev_real_insn (insn
);
4269 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4270 if cc0 isn't set. */
4271 note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
4273 insn
= XEXP (note
, 0);
4276 rtx maybe_cc0_setter
= prev_nonnote_insn (insn
);
4277 if (maybe_cc0_setter
4278 && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter
)) == 'i'
4279 && sets_cc0_p (PATTERN (maybe_cc0_setter
)))
4280 insn
= maybe_cc0_setter
;
4283 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4284 new_insn
= emit_block_insn_before (pat
, insn
, BASIC_BLOCK (bb
));
4287 /* Likewise if the last insn is a call, as will happen in the presence
4288 of exception handling. */
4289 else if (GET_CODE (insn
) == CALL_INSN
)
4291 HARD_REG_SET parm_regs
;
4295 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4296 we search backward and place the instructions before the first
4297 parameter is loaded. Do this for everyone for consistency and a
4298 presumtion that we'll get better code elsewhere as well.
4300 It should always be the case that we can put these instructions
4301 anywhere in the basic block with performing PRE optimizations.
4305 && !TEST_BIT (antloc
[bb
], expr
->bitmap_index
)
4306 && !TEST_BIT (transp
[bb
], expr
->bitmap_index
))
4309 /* Since different machines initialize their parameter registers
4310 in different orders, assume nothing. Collect the set of all
4311 parameter registers. */
4312 CLEAR_HARD_REG_SET (parm_regs
);
4314 for (p
= CALL_INSN_FUNCTION_USAGE (insn
); p
; p
= XEXP (p
, 1))
4315 if (GET_CODE (XEXP (p
, 0)) == USE
4316 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
4318 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
4321 SET_HARD_REG_BIT (parm_regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
4325 /* Search backward for the first set of a register in this set. */
4326 while (nparm_regs
&& BLOCK_HEAD (bb
) != insn
)
4328 insn
= PREV_INSN (insn
);
4329 p
= single_set (insn
);
4330 if (p
&& GET_CODE (SET_DEST (p
)) == REG
4331 && REGNO (SET_DEST (p
)) < FIRST_PSEUDO_REGISTER
4332 && TEST_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
))))
4334 CLEAR_HARD_REG_BIT (parm_regs
, REGNO (SET_DEST (p
)));
4339 /* If we found all the parameter loads, then we want to insert
4340 before the first parameter load.
4342 If we did not find all the parameter loads, then we might have
4343 stopped on the head of the block, which could be a CODE_LABEL.
4344 If we inserted before the CODE_LABEL, then we would be putting
4345 the insn in the wrong basic block. In that case, put the insn
4346 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4347 if (GET_CODE (insn
) == CODE_LABEL
)
4348 insn
= NEXT_INSN (insn
);
4349 else if (GET_CODE (insn
) == NOTE
4350 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BASIC_BLOCK
)
4351 insn
= NEXT_INSN (insn
);
4353 new_insn
= emit_block_insn_before (pat
, insn
, BASIC_BLOCK (bb
));
4357 new_insn
= emit_insn_after (pat
, insn
);
4358 BLOCK_END (bb
) = new_insn
;
4361 /* Keep block number table up to date.
4362 Note, PAT could be a multiple insn sequence, we have to make
4363 sure that each insn in the sequence is handled. */
4364 if (GET_CODE (pat
) == SEQUENCE
)
4366 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4368 rtx insn
= XVECEXP (pat
, 0, i
);
4370 set_block_num (insn
, bb
);
4371 if (GET_RTX_CLASS (GET_CODE (insn
)) == 'i')
4372 add_label_notes (PATTERN (insn
), new_insn
);
4374 note_stores (PATTERN (insn
), record_set_info
, insn
);
4379 add_label_notes (SET_SRC (pat
), new_insn
);
4380 set_block_num (new_insn
, bb
);
4382 /* Keep register set table up to date. */
4383 record_one_set (regno
, new_insn
);
4386 gcse_create_count
++;
4390 fprintf (gcse_file
, "PRE/HOIST: end of bb %d, insn %d, ",
4391 bb
, INSN_UID (new_insn
));
4392 fprintf (gcse_file
, "copying expression %d to reg %d\n",
4393 expr
->bitmap_index
, regno
);
4397 /* Insert partially redundant expressions on edges in the CFG to make
4398 the expressions fully redundant. */
4401 pre_edge_insert (edge_list
, index_map
)
4402 struct edge_list
*edge_list
;
4403 struct expr
**index_map
;
4405 int e
, i
, j
, num_edges
, set_size
, did_insert
= 0;
4408 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4409 if it reaches any of the deleted expressions. */
4411 set_size
= pre_insert_map
[0]->size
;
4412 num_edges
= NUM_EDGES (edge_list
);
4413 inserted
= sbitmap_vector_alloc (num_edges
, n_exprs
);
4414 sbitmap_vector_zero (inserted
, num_edges
);
4416 for (e
= 0; e
< num_edges
; e
++)
4419 basic_block pred
= INDEX_EDGE_PRED_BB (edge_list
, e
);
4420 int bb
= pred
->index
;
4422 for (i
= indx
= 0; i
< set_size
; i
++, indx
+= SBITMAP_ELT_BITS
)
4424 SBITMAP_ELT_TYPE insert
= pre_insert_map
[e
]->elms
[i
];
4426 for (j
= indx
; insert
&& j
< n_exprs
; j
++, insert
>>= 1)
4427 if ((insert
& 1) != 0 && index_map
[j
]->reaching_reg
!= NULL_RTX
)
4429 struct expr
*expr
= index_map
[j
];
4432 /* Now look at each deleted occurence of this expression. */
4433 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4435 if (! occr
->deleted_p
)
4438 /* Insert this expression on this edge if if it would
4439 reach the deleted occurence in BB. */
4440 if (!TEST_BIT (inserted
[e
], j
))
4443 edge eg
= INDEX_EDGE (edge_list
, e
);
4445 /* We can't insert anything on an abnormal and
4446 critical edge, so we insert the insn at the end of
4447 the previous block. There are several alternatives
4448 detailed in Morgans book P277 (sec 10.5) for
4449 handling this situation. This one is easiest for
4452 if ((eg
->flags
& EDGE_ABNORMAL
) == EDGE_ABNORMAL
)
4453 insert_insn_end_bb (index_map
[j
], bb
, 0);
4456 insn
= process_insert_insn (index_map
[j
]);
4457 insert_insn_on_edge (insn
, eg
);
4462 fprintf (gcse_file
, "PRE/HOIST: edge (%d,%d), ",
4464 INDEX_EDGE_SUCC_BB (edge_list
, e
)->index
);
4465 fprintf (gcse_file
, "copy expression %d\n",
4466 expr
->bitmap_index
);
4469 SET_BIT (inserted
[e
], j
);
4471 gcse_create_count
++;
4482 /* Copy the result of INSN to REG. INDX is the expression number. */
4485 pre_insert_copy_insn (expr
, insn
)
4489 rtx reg
= expr
->reaching_reg
;
4490 int regno
= REGNO (reg
);
4491 int indx
= expr
->bitmap_index
;
4492 rtx set
= single_set (insn
);
4494 int bb
= BLOCK_NUM (insn
);
4499 new_insn
= emit_insn_after (gen_rtx_SET (VOIDmode
, reg
, SET_DEST (set
)),
4502 /* Keep block number table up to date. */
4503 set_block_num (new_insn
, bb
);
4505 /* Keep register set table up to date. */
4506 record_one_set (regno
, new_insn
);
4507 if (insn
== BLOCK_END (bb
))
4508 BLOCK_END (bb
) = new_insn
;
4510 gcse_create_count
++;
4514 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4515 BLOCK_NUM (insn
), INSN_UID (new_insn
), indx
,
4516 INSN_UID (insn
), regno
);
4519 /* Copy available expressions that reach the redundant expression
4520 to `reaching_reg'. */
4523 pre_insert_copies ()
4530 /* For each available expression in the table, copy the result to
4531 `reaching_reg' if the expression reaches a deleted one.
4533 ??? The current algorithm is rather brute force.
4534 Need to do some profiling. */
4536 for (i
= 0; i
< expr_hash_table_size
; i
++)
4537 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4539 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4540 we don't want to insert a copy here because the expression may not
4541 really be redundant. So only insert an insn if the expression was
4542 deleted. This test also avoids further processing if the
4543 expression wasn't deleted anywhere. */
4544 if (expr
->reaching_reg
== NULL
)
4547 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4549 if (! occr
->deleted_p
)
4552 for (avail
= expr
->avail_occr
; avail
!= NULL
; avail
= avail
->next
)
4554 rtx insn
= avail
->insn
;
4556 /* No need to handle this one if handled already. */
4557 if (avail
->copied_p
)
4560 /* Don't handle this one if it's a redundant one. */
4561 if (TEST_BIT (pre_redundant_insns
, INSN_CUID (insn
)))
4564 /* Or if the expression doesn't reach the deleted one. */
4565 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail
->insn
), expr
,
4566 BLOCK_NUM (occr
->insn
)))
4569 /* Copy the result of avail to reaching_reg. */
4570 pre_insert_copy_insn (expr
, insn
);
4571 avail
->copied_p
= 1;
4577 /* Delete redundant computations.
4578 Deletion is done by changing the insn to copy the `reaching_reg' of
4579 the expression into the result of the SET. It is left to later passes
4580 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4582 Returns non-zero if a change is made. */
4591 /* Compute the expressions which are redundant and need to be replaced by
4592 copies from the reaching reg to the target reg. */
4593 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
4594 sbitmap_copy (temp_bitmap
[bb
], pre_delete_map
[bb
]);
4597 for (i
= 0; i
< expr_hash_table_size
; i
++)
4598 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4600 int indx
= expr
->bitmap_index
;
4602 /* We only need to search antic_occr since we require
4605 for (occr
= expr
->antic_occr
; occr
!= NULL
; occr
= occr
->next
)
4607 rtx insn
= occr
->insn
;
4609 int bb
= BLOCK_NUM (insn
);
4611 if (TEST_BIT (temp_bitmap
[bb
], indx
))
4613 set
= single_set (insn
);
4617 /* Create a pseudo-reg to store the result of reaching
4618 expressions into. Get the mode for the new pseudo from
4619 the mode of the original destination pseudo. */
4620 if (expr
->reaching_reg
== NULL
)
4622 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
4624 /* In theory this should never fail since we're creating
4627 However, on the x86 some of the movXX patterns actually
4628 contain clobbers of scratch regs. This may cause the
4629 insn created by validate_change to not match any pattern
4630 and thus cause validate_change to fail. */
4631 if (validate_change (insn
, &SET_SRC (set
),
4632 expr
->reaching_reg
, 0))
4634 occr
->deleted_p
= 1;
4635 SET_BIT (pre_redundant_insns
, INSN_CUID (insn
));
4643 "PRE: redundant insn %d (expression %d) in ",
4644 INSN_UID (insn
), indx
);
4645 fprintf (gcse_file
, "bb %d, reaching reg is %d\n",
4646 bb
, REGNO (expr
->reaching_reg
));
4655 /* Perform GCSE optimizations using PRE.
4656 This is called by one_pre_gcse_pass after all the dataflow analysis
4659 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4660 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4661 Compiler Design and Implementation.
4663 ??? A new pseudo reg is created to hold the reaching expression. The nice
4664 thing about the classical approach is that it would try to use an existing
4665 reg. If the register can't be adequately optimized [i.e. we introduce
4666 reload problems], one could add a pass here to propagate the new register
4669 ??? We don't handle single sets in PARALLELs because we're [currently] not
4670 able to copy the rest of the parallel when we insert copies to create full
4671 redundancies from partial redundancies. However, there's no reason why we
4672 can't handle PARALLELs in the cases where there are no partial
4680 struct expr
**index_map
;
4683 /* Compute a mapping from expression number (`bitmap_index') to
4684 hash table entry. */
4686 index_map
= (struct expr
**) xcalloc (n_exprs
, sizeof (struct expr
*));
4687 for (i
= 0; i
< expr_hash_table_size
; i
++)
4688 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
4689 index_map
[expr
->bitmap_index
] = expr
;
4691 /* Reset bitmap used to track which insns are redundant. */
4692 pre_redundant_insns
= sbitmap_alloc (max_cuid
);
4693 sbitmap_zero (pre_redundant_insns
);
4695 /* Delete the redundant insns first so that
4696 - we know what register to use for the new insns and for the other
4697 ones with reaching expressions
4698 - we know which insns are redundant when we go to create copies */
4700 changed
= pre_delete ();
4702 did_insert
= pre_edge_insert (edge_list
, index_map
);
4704 /* In other places with reaching expressions, copy the expression to the
4705 specially allocated pseudo-reg that reaches the redundant expr. */
4706 pre_insert_copies ();
4709 commit_edge_insertions ();
4714 free (pre_redundant_insns
);
4718 /* Top level routine to perform one PRE GCSE pass.
4720 Return non-zero if a change was made. */
4723 one_pre_gcse_pass (pass
)
4728 gcse_subst_count
= 0;
4729 gcse_create_count
= 0;
4731 alloc_expr_hash_table (max_cuid
);
4732 add_noreturn_fake_exit_edges ();
4733 compute_expr_hash_table ();
4735 dump_hash_table (gcse_file
, "Expression", expr_hash_table
,
4736 expr_hash_table_size
, n_exprs
);
4740 alloc_pre_mem (n_basic_blocks
, n_exprs
);
4741 compute_pre_data ();
4742 changed
|= pre_gcse ();
4743 free_edge_list (edge_list
);
4747 remove_fake_edges ();
4748 free_expr_hash_table ();
4752 fprintf (gcse_file
, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4753 current_function_name
, pass
, bytes_used
);
4754 fprintf (gcse_file
, "%d substs, %d insns created\n",
4755 gcse_subst_count
, gcse_create_count
);
4761 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4762 We have to add REG_LABEL notes, because the following loop optimization
4763 pass requires them. */
4765 /* ??? This is very similar to the loop.c add_label_notes function. We
4766 could probably share code here. */
4768 /* ??? If there was a jump optimization pass after gcse and before loop,
4769 then we would not need to do this here, because jump would add the
4770 necessary REG_LABEL notes. */
4773 add_label_notes (x
, insn
)
4777 enum rtx_code code
= GET_CODE (x
);
4781 if (code
== LABEL_REF
&& !LABEL_REF_NONLOCAL_P (x
))
4783 /* This code used to ignore labels that referred to dispatch tables to
4784 avoid flow generating (slighly) worse code.
4786 We no longer ignore such label references (see LABEL_REF handling in
4787 mark_jump_label for additional information). */
4789 REG_NOTES (insn
) = gen_rtx_EXPR_LIST (REG_LABEL
, XEXP (x
, 0),
4794 for (i
= GET_RTX_LENGTH (code
) - 1, fmt
= GET_RTX_FORMAT (code
); i
>= 0; i
--)
4797 add_label_notes (XEXP (x
, i
), insn
);
4798 else if (fmt
[i
] == 'E')
4799 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
4800 add_label_notes (XVECEXP (x
, i
, j
), insn
);
4804 /* Compute transparent outgoing information for each block.
4806 An expression is transparent to an edge unless it is killed by
4807 the edge itself. This can only happen with abnormal control flow,
4808 when the edge is traversed through a call. This happens with
4809 non-local labels and exceptions.
4811 This would not be necessary if we split the edge. While this is
4812 normally impossible for abnormal critical edges, with some effort
4813 it should be possible with exception handling, since we still have
4814 control over which handler should be invoked. But due to increased
4815 EH table sizes, this may not be worthwhile. */
4818 compute_transpout ()
4824 sbitmap_vector_ones (transpout
, n_basic_blocks
);
4826 for (bb
= 0; bb
< n_basic_blocks
; ++bb
)
4828 /* Note that flow inserted a nop a the end of basic blocks that
4829 end in call instructions for reasons other than abnormal
4831 if (GET_CODE (BLOCK_END (bb
)) != CALL_INSN
)
4834 for (i
= 0; i
< expr_hash_table_size
; i
++)
4835 for (expr
= expr_hash_table
[i
]; expr
; expr
= expr
->next_same_hash
)
4836 if (GET_CODE (expr
->expr
) == MEM
)
4838 if (GET_CODE (XEXP (expr
->expr
, 0)) == SYMBOL_REF
4839 && CONSTANT_POOL_ADDRESS_P (XEXP (expr
->expr
, 0)))
4842 /* ??? Optimally, we would use interprocedural alias
4843 analysis to determine if this mem is actually killed
4845 RESET_BIT (transpout
[bb
], expr
->bitmap_index
);
4850 /* Removal of useless null pointer checks */
4852 /* Called via note_stores. X is set by SETTER. If X is a register we must
4853 invalidate nonnull_local and set nonnull_killed. DATA is really a
4854 `null_pointer_info *'.
4856 We ignore hard registers. */
4859 invalidate_nonnull_info (x
, setter
, data
)
4861 rtx setter ATTRIBUTE_UNUSED
;
4865 struct null_pointer_info
*npi
= (struct null_pointer_info
*) data
;
4867 while (GET_CODE (x
) == SUBREG
)
4870 /* Ignore anything that is not a register or is a hard register. */
4871 if (GET_CODE (x
) != REG
4872 || REGNO (x
) < npi
->min_reg
4873 || REGNO (x
) >= npi
->max_reg
)
4876 regno
= REGNO (x
) - npi
->min_reg
;
4878 RESET_BIT (npi
->nonnull_local
[npi
->current_block
], regno
);
4879 SET_BIT (npi
->nonnull_killed
[npi
->current_block
], regno
);
4882 /* Do null-pointer check elimination for the registers indicated in
4883 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
4884 they are not our responsibility to free. */
4887 delete_null_pointer_checks_1 (block_reg
, nonnull_avin
, nonnull_avout
, npi
)
4888 unsigned int *block_reg
;
4889 sbitmap
*nonnull_avin
;
4890 sbitmap
*nonnull_avout
;
4891 struct null_pointer_info
*npi
;
4895 sbitmap
*nonnull_local
= npi
->nonnull_local
;
4896 sbitmap
*nonnull_killed
= npi
->nonnull_killed
;
4898 /* Compute local properties, nonnull and killed. A register will have
4899 the nonnull property if at the end of the current block its value is
4900 known to be nonnull. The killed property indicates that somewhere in
4901 the block any information we had about the register is killed.
4903 Note that a register can have both properties in a single block. That
4904 indicates that it's killed, then later in the block a new value is
4906 sbitmap_vector_zero (nonnull_local
, n_basic_blocks
);
4907 sbitmap_vector_zero (nonnull_killed
, n_basic_blocks
);
4909 for (current_block
= 0; current_block
< n_basic_blocks
; current_block
++)
4911 rtx insn
, stop_insn
;
4913 /* Set the current block for invalidate_nonnull_info. */
4914 npi
->current_block
= current_block
;
4916 /* Scan each insn in the basic block looking for memory references and
4918 stop_insn
= NEXT_INSN (BLOCK_END (current_block
));
4919 for (insn
= BLOCK_HEAD (current_block
);
4921 insn
= NEXT_INSN (insn
))
4926 /* Ignore anything that is not a normal insn. */
4927 if (GET_RTX_CLASS (GET_CODE (insn
)) != 'i')
4930 /* Basically ignore anything that is not a simple SET. We do have
4931 to make sure to invalidate nonnull_local and set nonnull_killed
4932 for such insns though. */
4933 set
= single_set (insn
);
4936 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
4940 /* See if we've got a useable memory load. We handle it first
4941 in case it uses its address register as a dest (which kills
4942 the nonnull property). */
4943 if (GET_CODE (SET_SRC (set
)) == MEM
4944 && GET_CODE ((reg
= XEXP (SET_SRC (set
), 0))) == REG
4945 && REGNO (reg
) >= npi
->min_reg
4946 && REGNO (reg
) < npi
->max_reg
)
4947 SET_BIT (nonnull_local
[current_block
],
4948 REGNO (reg
) - npi
->min_reg
);
4950 /* Now invalidate stuff clobbered by this insn. */
4951 note_stores (PATTERN (insn
), invalidate_nonnull_info
, npi
);
4953 /* And handle stores, we do these last since any sets in INSN can
4954 not kill the nonnull property if it is derived from a MEM
4955 appearing in a SET_DEST. */
4956 if (GET_CODE (SET_DEST (set
)) == MEM
4957 && GET_CODE ((reg
= XEXP (SET_DEST (set
), 0))) == REG
4958 && REGNO (reg
) >= npi
->min_reg
4959 && REGNO (reg
) < npi
->max_reg
)
4960 SET_BIT (nonnull_local
[current_block
],
4961 REGNO (reg
) - npi
->min_reg
);
4965 /* Now compute global properties based on the local properties. This
4966 is a classic global availablity algorithm. */
4967 compute_available (nonnull_local
, nonnull_killed
,
4968 nonnull_avout
, nonnull_avin
);
4970 /* Now look at each bb and see if it ends with a compare of a value
4972 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
4974 rtx last_insn
= BLOCK_END (bb
);
4975 rtx condition
, earliest
;
4976 int compare_and_branch
;
4978 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
4979 since BLOCK_REG[BB] is zero if this block did not end with a
4980 comparison against zero, this condition works. */
4981 if (block_reg
[bb
] < npi
->min_reg
4982 || block_reg
[bb
] >= npi
->max_reg
)
4985 /* LAST_INSN is a conditional jump. Get its condition. */
4986 condition
= get_condition (last_insn
, &earliest
);
4988 /* If we can't determine the condition then skip. */
4992 /* Is the register known to have a nonzero value? */
4993 if (!TEST_BIT (nonnull_avout
[bb
], block_reg
[bb
] - npi
->min_reg
))
4996 /* Try to compute whether the compare/branch at the loop end is one or
4997 two instructions. */
4998 if (earliest
== last_insn
)
4999 compare_and_branch
= 1;
5000 else if (earliest
== prev_nonnote_insn (last_insn
))
5001 compare_and_branch
= 2;
5005 /* We know the register in this comparison is nonnull at exit from
5006 this block. We can optimize this comparison. */
5007 if (GET_CODE (condition
) == NE
)
5011 new_jump
= emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn
)),
5013 JUMP_LABEL (new_jump
) = JUMP_LABEL (last_insn
);
5014 LABEL_NUSES (JUMP_LABEL (new_jump
))++;
5015 emit_barrier_after (new_jump
);
5017 delete_insn (last_insn
);
5018 if (compare_and_branch
== 2)
5019 delete_insn (earliest
);
5021 /* Don't check this block again. (Note that BLOCK_END is
5022 invalid here; we deleted the last instruction in the
5028 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5031 This is conceptually similar to global constant/copy propagation and
5032 classic global CSE (it even uses the same dataflow equations as cprop).
5034 If a register is used as memory address with the form (mem (reg)), then we
5035 know that REG can not be zero at that point in the program. Any instruction
5036 which sets REG "kills" this property.
5038 So, if every path leading to a conditional branch has an available memory
5039 reference of that form, then we know the register can not have the value
5040 zero at the conditional branch.
5042 So we merely need to compute the local properies and propagate that data
5043 around the cfg, then optimize where possible.
5045 We run this pass two times. Once before CSE, then again after CSE. This
5046 has proven to be the most profitable approach. It is rare for new
5047 optimization opportunities of this nature to appear after the first CSE
5050 This could probably be integrated with global cprop with a little work. */
5053 delete_null_pointer_checks (f
)
5056 sbitmap
*nonnull_avin
, *nonnull_avout
;
5057 unsigned int *block_reg
;
5062 struct null_pointer_info npi
;
5064 /* If we have only a single block, then there's nothing to do. */
5065 if (n_basic_blocks
<= 1)
5068 /* Trying to perform global optimizations on flow graphs which have
5069 a high connectivity will take a long time and is unlikely to be
5070 particularly useful.
5072 In normal circumstances a cfg should have about twice has many edges
5073 as blocks. But we do not want to punish small functions which have
5074 a couple switch statements. So we require a relatively large number
5075 of basic blocks and the ratio of edges to blocks to be high. */
5076 if (n_basic_blocks
> 1000 && n_edges
/ n_basic_blocks
>= 20)
5079 /* We need four bitmaps, each with a bit for each register in each
5081 max_reg
= max_reg_num ();
5082 regs_per_pass
= get_bitmap_width (4, n_basic_blocks
, max_reg
);
5084 /* Allocate bitmaps to hold local and global properties. */
5085 npi
.nonnull_local
= sbitmap_vector_alloc (n_basic_blocks
, regs_per_pass
);
5086 npi
.nonnull_killed
= sbitmap_vector_alloc (n_basic_blocks
, regs_per_pass
);
5087 nonnull_avin
= sbitmap_vector_alloc (n_basic_blocks
, regs_per_pass
);
5088 nonnull_avout
= sbitmap_vector_alloc (n_basic_blocks
, regs_per_pass
);
5090 /* Go through the basic blocks, seeing whether or not each block
5091 ends with a conditional branch whose condition is a comparison
5092 against zero. Record the register compared in BLOCK_REG. */
5093 block_reg
= (int *) xcalloc (n_basic_blocks
, sizeof (int));
5094 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5096 rtx last_insn
= BLOCK_END (bb
);
5097 rtx condition
, earliest
, reg
;
5099 /* We only want conditional branches. */
5100 if (GET_CODE (last_insn
) != JUMP_INSN
5101 || !condjump_p (last_insn
)
5102 || simplejump_p (last_insn
))
5105 /* LAST_INSN is a conditional jump. Get its condition. */
5106 condition
= get_condition (last_insn
, &earliest
);
5108 /* If we were unable to get the condition, or it is not a equality
5109 comparison against zero then there's nothing we can do. */
5111 || (GET_CODE (condition
) != NE
&& GET_CODE (condition
) != EQ
)
5112 || GET_CODE (XEXP (condition
, 1)) != CONST_INT
5113 || (XEXP (condition
, 1)
5114 != CONST0_RTX (GET_MODE (XEXP (condition
, 0)))))
5117 /* We must be checking a register against zero. */
5118 reg
= XEXP (condition
, 0);
5119 if (GET_CODE (reg
) != REG
)
5122 block_reg
[bb
] = REGNO (reg
);
5125 /* Go through the algorithm for each block of registers. */
5126 for (reg
= FIRST_PSEUDO_REGISTER
; reg
< max_reg
; reg
+= regs_per_pass
)
5129 npi
.max_reg
= MIN (reg
+ regs_per_pass
, max_reg
);
5130 delete_null_pointer_checks_1 (block_reg
, nonnull_avin
,
5131 nonnull_avout
, &npi
);
5134 /* Free the table of registers compared at the end of every block. */
5138 free (npi
.nonnull_local
);
5139 free (npi
.nonnull_killed
);
5140 free (nonnull_avin
);
5141 free (nonnull_avout
);
5144 /* Code Hoisting variables and subroutines. */
5146 /* Very busy expressions. */
5147 static sbitmap
*hoist_vbein
;
5148 static sbitmap
*hoist_vbeout
;
5150 /* Hoistable expressions. */
5151 static sbitmap
*hoist_exprs
;
5153 /* Dominator bitmaps. */
5154 static sbitmap
*dominators
;
5156 /* ??? We could compute post dominators and run this algorithm in
5157 reverse to to perform tail merging, doing so would probably be
5158 more effective than the tail merging code in jump.c.
5160 It's unclear if tail merging could be run in parallel with
5161 code hoisting. It would be nice. */
5163 /* Allocate vars used for code hoisting analysis. */
5166 alloc_code_hoist_mem (n_blocks
, n_exprs
)
5167 int n_blocks
, n_exprs
;
5169 antloc
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5170 transp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5171 comp
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5173 hoist_vbein
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5174 hoist_vbeout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5175 hoist_exprs
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5176 transpout
= sbitmap_vector_alloc (n_blocks
, n_exprs
);
5178 dominators
= sbitmap_vector_alloc (n_blocks
, n_blocks
);
5181 /* Free vars used for code hoisting analysis. */
5184 free_code_hoist_mem ()
5191 free (hoist_vbeout
);
5198 /* Compute the very busy expressions at entry/exit from each block.
5200 An expression is very busy if all paths from a given point
5201 compute the expression. */
5204 compute_code_hoist_vbeinout ()
5206 int bb
, changed
, passes
;
5208 sbitmap_vector_zero (hoist_vbeout
, n_basic_blocks
);
5209 sbitmap_vector_zero (hoist_vbein
, n_basic_blocks
);
5218 /* We scan the blocks in the reverse order to speed up
5220 for (bb
= n_basic_blocks
- 1; bb
>= 0; bb
--)
5222 changed
|= sbitmap_a_or_b_and_c (hoist_vbein
[bb
], antloc
[bb
],
5223 hoist_vbeout
[bb
], transp
[bb
]);
5224 if (bb
!= n_basic_blocks
- 1)
5225 sbitmap_intersection_of_succs (hoist_vbeout
[bb
], hoist_vbein
, bb
);
5232 fprintf (gcse_file
, "hoisting vbeinout computation: %d passes\n", passes
);
5235 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5238 compute_code_hoist_data ()
5240 compute_local_properties (transp
, comp
, antloc
, 0);
5241 compute_transpout ();
5242 compute_code_hoist_vbeinout ();
5243 compute_flow_dominators (dominators
, NULL
);
5245 fprintf (gcse_file
, "\n");
5248 /* Determine if the expression identified by EXPR_INDEX would
5249 reach BB unimpared if it was placed at the end of EXPR_BB.
5251 It's unclear exactly what Muchnick meant by "unimpared". It seems
5252 to me that the expression must either be computed or transparent in
5253 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5254 would allow the expression to be hoisted out of loops, even if
5255 the expression wasn't a loop invariant.
5257 Contrast this to reachability for PRE where an expression is
5258 considered reachable if *any* path reaches instead of *all*
5262 hoist_expr_reaches_here_p (expr_bb
, expr_index
, bb
, visited
)
5269 int visited_allocated_locally
= 0;
5272 if (visited
== NULL
)
5274 visited_allocated_locally
= 1;
5275 visited
= xcalloc (n_basic_blocks
, 1);
5278 visited
[expr_bb
] = 1;
5279 for (pred
= BASIC_BLOCK (bb
)->pred
; pred
!= NULL
; pred
= pred
->pred_next
)
5281 int pred_bb
= pred
->src
->index
;
5283 if (pred
->src
== ENTRY_BLOCK_PTR
)
5285 else if (visited
[pred_bb
])
5288 /* Does this predecessor generate this expression? */
5289 else if (TEST_BIT (comp
[pred_bb
], expr_index
))
5291 else if (! TEST_BIT (transp
[pred_bb
], expr_index
))
5297 visited
[pred_bb
] = 1;
5298 if (! hoist_expr_reaches_here_p (expr_bb
, expr_index
,
5303 if (visited_allocated_locally
)
5306 return (pred
== NULL
);
5309 /* Actually perform code hoisting. */
5314 int bb
, dominated
, i
;
5315 struct expr
**index_map
;
5318 sbitmap_vector_zero (hoist_exprs
, n_basic_blocks
);
5320 /* Compute a mapping from expression number (`bitmap_index') to
5321 hash table entry. */
5323 index_map
= (struct expr
**) xcalloc (n_exprs
, sizeof (struct expr
*));
5324 for (i
= 0; i
< expr_hash_table_size
; i
++)
5325 for (expr
= expr_hash_table
[i
]; expr
!= NULL
; expr
= expr
->next_same_hash
)
5326 index_map
[expr
->bitmap_index
] = expr
;
5328 /* Walk over each basic block looking for potentially hoistable
5329 expressions, nothing gets hoisted from the entry block. */
5330 for (bb
= 0; bb
< n_basic_blocks
; bb
++)
5333 int insn_inserted_p
;
5335 /* Examine each expression that is very busy at the exit of this
5336 block. These are the potentially hoistable expressions. */
5337 for (i
= 0; i
< hoist_vbeout
[bb
]->n_bits
; i
++)
5341 if (TEST_BIT (hoist_vbeout
[bb
], i
) && TEST_BIT (transpout
[bb
], i
))
5343 /* We've found a potentially hoistable expression, now
5344 we look at every block BB dominates to see if it
5345 computes the expression. */
5346 for (dominated
= 0; dominated
< n_basic_blocks
; dominated
++)
5348 /* Ignore self dominance. */
5350 || ! TEST_BIT (dominators
[dominated
], bb
))
5353 /* We've found a dominated block, now see if it computes
5354 the busy expression and whether or not moving that
5355 expression to the "beginning" of that block is safe. */
5356 if (!TEST_BIT (antloc
[dominated
], i
))
5359 /* Note if the expression would reach the dominated block
5360 unimpared if it was placed at the end of BB.
5362 Keep track of how many times this expression is hoistable
5363 from a dominated block into BB. */
5364 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
5368 /* If we found more than one hoistable occurence of this
5369 expression, then note it in the bitmap of expressions to
5370 hoist. It makes no sense to hoist things which are computed
5371 in only one BB, and doing so tends to pessimize register
5372 allocation. One could increase this value to try harder
5373 to avoid any possible code expansion due to register
5374 allocation issues; however experiments have shown that
5375 the vast majority of hoistable expressions are only movable
5376 from two successors, so raising this threshhold is likely
5377 to nullify any benefit we get from code hoisting. */
5380 SET_BIT (hoist_exprs
[bb
], i
);
5386 /* If we found nothing to hoist, then quit now. */
5390 /* Loop over all the hoistable expressions. */
5391 for (i
= 0; i
< hoist_exprs
[bb
]->n_bits
; i
++)
5393 /* We want to insert the expression into BB only once, so
5394 note when we've inserted it. */
5395 insn_inserted_p
= 0;
5397 /* These tests should be the same as the tests above. */
5398 if (TEST_BIT (hoist_vbeout
[bb
], i
))
5400 /* We've found a potentially hoistable expression, now
5401 we look at every block BB dominates to see if it
5402 computes the expression. */
5403 for (dominated
= 0; dominated
< n_basic_blocks
; dominated
++)
5405 /* Ignore self dominance. */
5407 || ! TEST_BIT (dominators
[dominated
], bb
))
5410 /* We've found a dominated block, now see if it computes
5411 the busy expression and whether or not moving that
5412 expression to the "beginning" of that block is safe. */
5413 if (!TEST_BIT (antloc
[dominated
], i
))
5416 /* The expression is computed in the dominated block and
5417 it would be safe to compute it at the start of the
5418 dominated block. Now we have to determine if the
5419 expresion would reach the dominated block if it was
5420 placed at the end of BB. */
5421 if (hoist_expr_reaches_here_p (bb
, i
, dominated
, NULL
))
5423 struct expr
*expr
= index_map
[i
];
5424 struct occr
*occr
= expr
->antic_occr
;
5428 /* Find the right occurence of this expression. */
5429 while (BLOCK_NUM (occr
->insn
) != dominated
&& occr
)
5432 /* Should never happen. */
5438 set
= single_set (insn
);
5442 /* Create a pseudo-reg to store the result of reaching
5443 expressions into. Get the mode for the new pseudo
5444 from the mode of the original destination pseudo. */
5445 if (expr
->reaching_reg
== NULL
)
5447 = gen_reg_rtx (GET_MODE (SET_DEST (set
)));
5449 /* In theory this should never fail since we're creating
5452 However, on the x86 some of the movXX patterns
5453 actually contain clobbers of scratch regs. This may
5454 cause the insn created by validate_change to not
5455 match any pattern and thus cause validate_change to
5457 if (validate_change (insn
, &SET_SRC (set
),
5458 expr
->reaching_reg
, 0))
5460 occr
->deleted_p
= 1;
5461 if (!insn_inserted_p
)
5463 insert_insn_end_bb (index_map
[i
], bb
, 0);
5464 insn_inserted_p
= 1;
5476 /* Top level routine to perform one code hoisting (aka unification) pass
5478 Return non-zero if a change was made. */
5481 one_code_hoisting_pass ()
5485 alloc_expr_hash_table (max_cuid
);
5486 compute_expr_hash_table ();
5488 dump_hash_table (gcse_file
, "Code Hosting Expressions", expr_hash_table
,
5489 expr_hash_table_size
, n_exprs
);
5493 alloc_code_hoist_mem (n_basic_blocks
, n_exprs
);
5494 compute_code_hoist_data ();
5496 free_code_hoist_mem ();
5499 free_expr_hash_table ();