* class.c (check_bitfield_decl): New function, split out from
[official-gcc.git] / gcc / gcse.c
blobe2d7cf7df70cc45817678f5d5feb0d1c7ccf56da
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "toplev.h"
150 #include "rtl.h"
151 #include "tm_p.h"
152 #include "regs.h"
153 #include "hard-reg-set.h"
154 #include "flags.h"
155 #include "real.h"
156 #include "insn-config.h"
157 #include "recog.h"
158 #include "basic-block.h"
159 #include "output.h"
160 #include "function.h"
161 #include "expr.h"
163 #include "obstack.h"
164 #define obstack_chunk_alloc gmalloc
165 #define obstack_chunk_free free
167 /* Maximum number of passes to perform. */
168 #define MAX_PASSES 1
170 /* Propagate flow information through back edges and thus enable PRE's
171 moving loop invariant calculations out of loops.
173 Originally this tended to create worse overall code, but several
174 improvements during the development of PRE seem to have made following
175 back edges generally a win.
177 Note much of the loop invariant code motion done here would normally
178 be done by loop.c, which has more heuristics for when to move invariants
179 out of loops. At some point we might need to move some of those
180 heuristics into gcse.c. */
181 #define FOLLOW_BACK_EDGES 1
183 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
184 are a superset of those done by GCSE.
186 We perform the following steps:
188 1) Compute basic block information.
190 2) Compute table of places where registers are set.
192 3) Perform copy/constant propagation.
194 4) Perform global cse.
196 5) Perform another pass of copy/constant propagation.
198 Two passes of copy/constant propagation are done because the first one
199 enables more GCSE and the second one helps to clean up the copies that
200 GCSE creates. This is needed more for PRE than for Classic because Classic
201 GCSE will try to use an existing register containing the common
202 subexpression rather than create a new one. This is harder to do for PRE
203 because of the code motion (which Classic GCSE doesn't do).
205 Expressions we are interested in GCSE-ing are of the form
206 (set (pseudo-reg) (expression)).
207 Function want_to_gcse_p says what these are.
209 PRE handles moving invariant expressions out of loops (by treating them as
210 partially redundant).
212 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
213 assignment) based GVN (global value numbering). L. T. Simpson's paper
214 (Rice University) on value numbering is a useful reference for this.
216 **********************
218 We used to support multiple passes but there are diminishing returns in
219 doing so. The first pass usually makes 90% of the changes that are doable.
220 A second pass can make a few more changes made possible by the first pass.
221 Experiments show any further passes don't make enough changes to justify
222 the expense.
224 A study of spec92 using an unlimited number of passes:
225 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
226 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
227 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
229 It was found doing copy propagation between each pass enables further
230 substitutions.
232 PRE is quite expensive in complicated functions because the DFA can take
233 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
234 be modified if one wants to experiment.
236 **********************
238 The steps for PRE are:
240 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
242 2) Perform the data flow analysis for PRE.
244 3) Delete the redundant instructions
246 4) Insert the required copies [if any] that make the partially
247 redundant instructions fully redundant.
249 5) For other reaching expressions, insert an instruction to copy the value
250 to a newly created pseudo that will reach the redundant instruction.
252 The deletion is done first so that when we do insertions we
253 know which pseudo reg to use.
255 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
256 argue it is not. The number of iterations for the algorithm to converge
257 is typically 2-4 so I don't view it as that expensive (relatively speaking).
259 PRE GCSE depends heavily on the second CSE pass to clean up the copies
260 we create. To make an expression reach the place where it's redundant,
261 the result of the expression is copied to a new register, and the redundant
262 expression is deleted by replacing it with this new register. Classic GCSE
263 doesn't have this problem as much as it computes the reaching defs of
264 each register in each block and thus can try to use an existing register.
266 **********************
268 A fair bit of simplicity is created by creating small functions for simple
269 tasks, even when the function is only called in one place. This may
270 measurably slow things down [or may not] by creating more function call
271 overhead than is necessary. The source is laid out so that it's trivial
272 to make the affected functions inline so that one can measure what speed
273 up, if any, can be achieved, and maybe later when things settle things can
274 be rearranged.
276 Help stamp out big monolithic functions! */
278 /* GCSE global vars. */
280 /* -dG dump file. */
281 static FILE *gcse_file;
283 /* Note whether or not we should run jump optimization after gcse. We
284 want to do this for two cases.
286 * If we changed any jumps via cprop.
288 * If we added any labels via edge splitting. */
290 static int run_jump_opt_after_gcse;
292 /* Bitmaps are normally not included in debugging dumps.
293 However it's useful to be able to print them from GDB.
294 We could create special functions for this, but it's simpler to
295 just allow passing stderr to the dump_foo fns. Since stderr can
296 be a macro, we store a copy here. */
297 static FILE *debug_stderr;
299 /* An obstack for our working variables. */
300 static struct obstack gcse_obstack;
302 /* Non-zero for each mode that supports (set (reg) (reg)).
303 This is trivially true for integer and floating point values.
304 It may or may not be true for condition codes. */
305 static char can_copy_p[(int) NUM_MACHINE_MODES];
307 /* Non-zero if can_copy_p has been initialized. */
308 static int can_copy_init_p;
310 struct reg_use {
311 rtx reg_rtx;
314 /* Hash table of expressions. */
316 struct expr
318 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
319 rtx expr;
320 /* Index in the available expression bitmaps. */
321 int bitmap_index;
322 /* Next entry with the same hash. */
323 struct expr *next_same_hash;
324 /* List of anticipatable occurrences in basic blocks in the function.
325 An "anticipatable occurrence" is one that is the first occurrence in the
326 basic block, the operands are not modified in the basic block prior
327 to the occurrence and the output is not used between the start of
328 the block and the occurrence. */
329 struct occr *antic_occr;
330 /* List of available occurrence in basic blocks in the function.
331 An "available occurrence" is one that is the last occurrence in the
332 basic block and the operands are not modified by following statements in
333 the basic block [including this insn]. */
334 struct occr *avail_occr;
335 /* Non-null if the computation is PRE redundant.
336 The value is the newly created pseudo-reg to record a copy of the
337 expression in all the places that reach the redundant copy. */
338 rtx reaching_reg;
341 /* Occurrence of an expression.
342 There is one per basic block. If a pattern appears more than once the
343 last appearance is used [or first for anticipatable expressions]. */
345 struct occr
347 /* Next occurrence of this expression. */
348 struct occr *next;
349 /* The insn that computes the expression. */
350 rtx insn;
351 /* Non-zero if this [anticipatable] occurrence has been deleted. */
352 char deleted_p;
353 /* Non-zero if this [available] occurrence has been copied to
354 reaching_reg. */
355 /* ??? This is mutually exclusive with deleted_p, so they could share
356 the same byte. */
357 char copied_p;
360 /* Expression and copy propagation hash tables.
361 Each hash table is an array of buckets.
362 ??? It is known that if it were an array of entries, structure elements
363 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
364 not clear whether in the final analysis a sufficient amount of memory would
365 be saved as the size of the available expression bitmaps would be larger
366 [one could build a mapping table without holes afterwards though].
367 Someday I'll perform the computation and figure it out.
370 /* Total size of the expression hash table, in elements. */
371 static int expr_hash_table_size;
372 /* The table itself.
373 This is an array of `expr_hash_table_size' elements. */
374 static struct expr **expr_hash_table;
376 /* Total size of the copy propagation hash table, in elements. */
377 static int set_hash_table_size;
378 /* The table itself.
379 This is an array of `set_hash_table_size' elements. */
380 static struct expr **set_hash_table;
382 /* Mapping of uids to cuids.
383 Only real insns get cuids. */
384 static int *uid_cuid;
386 /* Highest UID in UID_CUID. */
387 static int max_uid;
389 /* Get the cuid of an insn. */
390 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
392 /* Number of cuids. */
393 static int max_cuid;
395 /* Mapping of cuids to insns. */
396 static rtx *cuid_insn;
398 /* Get insn from cuid. */
399 #define CUID_INSN(CUID) (cuid_insn[CUID])
401 /* Maximum register number in function prior to doing gcse + 1.
402 Registers created during this pass have regno >= max_gcse_regno.
403 This is named with "gcse" to not collide with global of same name. */
404 static int max_gcse_regno;
406 /* Maximum number of cse-able expressions found. */
407 static int n_exprs;
408 /* Maximum number of assignments for copy propagation found. */
409 static int n_sets;
411 /* Table of registers that are modified.
412 For each register, each element is a list of places where the pseudo-reg
413 is set.
415 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
416 requires knowledge of which blocks kill which regs [and thus could use
417 a bitmap instead of the lists `reg_set_table' uses].
419 `reg_set_table' and could be turned into an array of bitmaps
420 (num-bbs x num-regs)
421 [however perhaps it may be useful to keep the data as is].
422 One advantage of recording things this way is that `reg_set_table' is
423 fairly sparse with respect to pseudo regs but for hard regs could be
424 fairly dense [relatively speaking].
425 And recording sets of pseudo-regs in lists speeds
426 up functions like compute_transp since in the case of pseudo-regs we only
427 need to iterate over the number of times a pseudo-reg is set, not over the
428 number of basic blocks [clearly there is a bit of a slow down in the cases
429 where a pseudo is set more than once in a block, however it is believed
430 that the net effect is to speed things up]. This isn't done for hard-regs
431 because recording call-clobbered hard-regs in `reg_set_table' at each
432 function call can consume a fair bit of memory, and iterating over hard-regs
433 stored this way in compute_transp will be more expensive. */
435 typedef struct reg_set {
436 /* The next setting of this register. */
437 struct reg_set *next;
438 /* The insn where it was set. */
439 rtx insn;
440 } reg_set;
441 static reg_set **reg_set_table;
442 /* Size of `reg_set_table'.
443 The table starts out at max_gcse_regno + slop, and is enlarged as
444 necessary. */
445 static int reg_set_table_size;
446 /* Amount to grow `reg_set_table' by when it's full. */
447 #define REG_SET_TABLE_SLOP 100
449 /* Bitmap containing one bit for each register in the program.
450 Used when performing GCSE to track which registers have been set since
451 the start of the basic block. */
452 static sbitmap reg_set_bitmap;
454 /* For each block, a bitmap of registers set in the block.
455 This is used by expr_killed_p and compute_transp.
456 It is computed during hash table computation and not by compute_sets
457 as it includes registers added since the last pass (or between cprop and
458 gcse) and it's currently not easy to realloc sbitmap vectors. */
459 static sbitmap *reg_set_in_block;
461 /* For each block, non-zero if memory is set in that block.
462 This is computed during hash table computation and is used by
463 expr_killed_p and compute_transp.
464 ??? Handling of memory is very simple, we don't make any attempt
465 to optimize things (later).
466 ??? This can be computed by compute_sets since the information
467 doesn't change. */
468 static char *mem_set_in_block;
470 /* Various variables for statistics gathering. */
472 /* Memory used in a pass.
473 This isn't intended to be absolutely precise. Its intent is only
474 to keep an eye on memory usage. */
475 static int bytes_used;
476 /* GCSE substitutions made. */
477 static int gcse_subst_count;
478 /* Number of copy instructions created. */
479 static int gcse_create_count;
480 /* Number of constants propagated. */
481 static int const_prop_count;
482 /* Number of copys propagated. */
483 static int copy_prop_count;
485 /* These variables are used by classic GCSE.
486 Normally they'd be defined a bit later, but `rd_gen' needs to
487 be declared sooner. */
489 /* A bitmap of all ones for implementing the algorithm for available
490 expressions and reaching definitions. */
491 /* ??? Available expression bitmaps have a different size than reaching
492 definition bitmaps. This should be the larger of the two, however, it
493 is not currently used for reaching definitions. */
494 static sbitmap u_bitmap;
496 /* Each block has a bitmap of each type.
497 The length of each blocks bitmap is:
499 max_cuid - for reaching definitions
500 n_exprs - for available expressions
502 Thus we view the bitmaps as 2 dimensional arrays. i.e.
503 rd_kill[block_num][cuid_num]
504 ae_kill[block_num][expr_num]
507 /* For reaching defs */
508 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
510 /* for available exprs */
511 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
514 static void compute_can_copy PROTO ((void));
516 static char *gmalloc PROTO ((unsigned int));
517 static char *grealloc PROTO ((char *, unsigned int));
518 static char *gcse_alloc PROTO ((unsigned long));
519 static void alloc_gcse_mem PROTO ((rtx));
520 static void free_gcse_mem PROTO ((void));
521 static void alloc_reg_set_mem PROTO ((int));
522 static void free_reg_set_mem PROTO ((void));
523 static void record_one_set PROTO ((int, rtx));
524 static void record_set_info PROTO ((rtx, rtx, void *));
525 static void compute_sets PROTO ((rtx));
527 static void hash_scan_insn PROTO ((rtx, int, int));
528 static void hash_scan_set PROTO ((rtx, rtx, int));
529 static void hash_scan_clobber PROTO ((rtx, rtx));
530 static void hash_scan_call PROTO ((rtx, rtx));
531 static int want_to_gcse_p PROTO ((rtx));
532 static int oprs_unchanged_p PROTO ((rtx, rtx, int));
533 static int oprs_anticipatable_p PROTO ((rtx, rtx));
534 static int oprs_available_p PROTO ((rtx, rtx));
535 static void insert_expr_in_table PROTO ((rtx, enum machine_mode,
536 rtx, int, int));
537 static void insert_set_in_table PROTO ((rtx, rtx));
538 static unsigned int hash_expr PROTO ((rtx, enum machine_mode,
539 int *, int));
540 static unsigned int hash_expr_1 PROTO ((rtx, enum machine_mode, int *));
541 static unsigned int hash_set PROTO ((int, int));
542 static int expr_equiv_p PROTO ((rtx, rtx));
543 static void record_last_reg_set_info PROTO ((rtx, int));
544 static void record_last_mem_set_info PROTO ((rtx));
545 static void record_last_set_info PROTO ((rtx, rtx, void *));
546 static void compute_hash_table PROTO ((int));
547 static void alloc_set_hash_table PROTO ((int));
548 static void free_set_hash_table PROTO ((void));
549 static void compute_set_hash_table PROTO ((void));
550 static void alloc_expr_hash_table PROTO ((int));
551 static void free_expr_hash_table PROTO ((void));
552 static void compute_expr_hash_table PROTO ((void));
553 static void dump_hash_table PROTO ((FILE *, const char *, struct expr **,
554 int, int));
555 static struct expr *lookup_expr PROTO ((rtx));
556 static struct expr *lookup_set PROTO ((int, rtx));
557 static struct expr *next_set PROTO ((int, struct expr *));
558 static void reset_opr_set_tables PROTO ((void));
559 static int oprs_not_set_p PROTO ((rtx, rtx));
560 static void mark_call PROTO ((rtx));
561 static void mark_set PROTO ((rtx, rtx));
562 static void mark_clobber PROTO ((rtx, rtx));
563 static void mark_oprs_set PROTO ((rtx));
565 static void alloc_cprop_mem PROTO ((int, int));
566 static void free_cprop_mem PROTO ((void));
567 static void compute_transp PROTO ((rtx, int, sbitmap *, int));
568 static void compute_transpout PROTO ((void));
569 static void compute_local_properties PROTO ((sbitmap *, sbitmap *,
570 sbitmap *, int));
571 static void compute_cprop_avinout PROTO ((void));
572 static void compute_cprop_data PROTO ((void));
573 static void find_used_regs PROTO ((rtx));
574 static int try_replace_reg PROTO ((rtx, rtx, rtx));
575 static struct expr *find_avail_set PROTO ((int, rtx));
576 static int cprop_jump PROTO((rtx, rtx, struct reg_use *, rtx));
577 #ifdef HAVE_cc0
578 static int cprop_cc0_jump PROTO((rtx, struct reg_use *, rtx));
579 #endif
580 static int cprop_insn PROTO ((rtx, int));
581 static int cprop PROTO ((int));
582 static int one_cprop_pass PROTO ((int, int));
584 static void alloc_pre_mem PROTO ((int, int));
585 static void free_pre_mem PROTO ((void));
586 static void compute_pre_data PROTO ((void));
587 static int pre_expr_reaches_here_p PROTO ((int, struct expr *,
588 int, int));
589 static void insert_insn_end_bb PROTO ((struct expr *, int, int));
590 static void pre_insert_copy_insn PROTO ((struct expr *, rtx));
591 static void pre_insert_copies PROTO ((void));
592 static int pre_delete PROTO ((void));
593 static int pre_gcse PROTO ((void));
594 static int one_pre_gcse_pass PROTO ((int));
596 static void add_label_notes PROTO ((rtx, rtx));
598 static void alloc_code_hoist_mem PROTO ((int, int));
599 static void free_code_hoist_mem PROTO ((void));
600 static void compute_code_hoist_vbeinout PROTO ((void));
601 static void compute_code_hoist_data PROTO ((void));
602 static int hoist_expr_reaches_here_p PROTO ((int, int, int, char *));
603 static void hoist_code PROTO ((void));
604 static int one_code_hoisting_pass PROTO ((void));
606 static void alloc_rd_mem PROTO ((int, int));
607 static void free_rd_mem PROTO ((void));
608 static void handle_rd_kill_set PROTO ((rtx, int, int));
609 static void compute_kill_rd PROTO ((void));
610 static void compute_rd PROTO ((void));
611 static void alloc_avail_expr_mem PROTO ((int, int));
612 static void free_avail_expr_mem PROTO ((void));
613 static void compute_ae_gen PROTO ((void));
614 static int expr_killed_p PROTO ((rtx, int));
615 static void compute_ae_kill PROTO ((sbitmap *, sbitmap *));
616 static int expr_reaches_here_p PROTO ((struct occr *, struct expr *,
617 int, int));
618 static rtx computing_insn PROTO ((struct expr *, rtx));
619 static int def_reaches_here_p PROTO ((rtx, rtx));
620 static int can_disregard_other_sets PROTO ((struct reg_set **, rtx, int));
621 static int handle_avail_expr PROTO ((rtx, struct expr *));
622 static int classic_gcse PROTO ((void));
623 static int one_classic_gcse_pass PROTO ((int));
624 static void invalidate_nonnull_info PROTO ((rtx, rtx, void *));
625 static rtx process_insert_insn PROTO ((struct expr *));
626 static int pre_edge_insert PROTO ((struct edge_list *, struct expr **));
627 static int expr_reaches_here_p_work PROTO ((struct occr *, struct expr *, int, int, char *));
628 static int pre_expr_reaches_here_p_work PROTO ((int, struct expr *, int, int, char *));
630 /* Entry point for global common subexpression elimination.
631 F is the first instruction in the function. */
634 gcse_main (f, file)
635 rtx f;
636 FILE *file;
638 int changed, pass;
639 /* Bytes used at start of pass. */
640 int initial_bytes_used;
641 /* Maximum number of bytes used by a pass. */
642 int max_pass_bytes;
643 /* Point to release obstack data from for each pass. */
644 char *gcse_obstack_bottom;
646 /* We do not construct an accurate cfg in functions which call
647 setjmp, so just punt to be safe. */
648 if (current_function_calls_setjmp)
649 return 0;
651 /* Assume that we do not need to run jump optimizations after gcse. */
652 run_jump_opt_after_gcse = 0;
654 /* For calling dump_foo fns from gdb. */
655 debug_stderr = stderr;
656 gcse_file = file;
658 /* Identify the basic block information for this function, including
659 successors and predecessors. */
660 max_gcse_regno = max_reg_num ();
661 find_basic_blocks (f, max_gcse_regno, file, 1);
663 if (file)
664 dump_flow_info (file);
666 /* Return if there's nothing to do. */
667 if (n_basic_blocks <= 1)
669 /* Free storage allocated by find_basic_blocks. */
670 free_basic_block_vars (0);
671 return 0;
674 /* Trying to perform global optimizations on flow graphs which have
675 a high connectivity will take a long time and is unlikely to be
676 particularly useful.
678 In normal circumstances a cfg should have about twice has many edges
679 as blocks. But we do not want to punish small functions which have
680 a couple switch statements. So we require a relatively large number
681 of basic blocks and the ratio of edges to blocks to be high. */
682 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
684 /* Free storage allocated by find_basic_blocks. */
685 free_basic_block_vars (0);
686 return 0;
689 /* See what modes support reg/reg copy operations. */
690 if (! can_copy_init_p)
692 compute_can_copy ();
693 can_copy_init_p = 1;
696 gcc_obstack_init (&gcse_obstack);
697 bytes_used = 0;
699 /* Record where pseudo-registers are set.
700 This data is kept accurate during each pass.
701 ??? We could also record hard-reg information here
702 [since it's unchanging], however it is currently done during
703 hash table computation.
705 It may be tempting to compute MEM set information here too, but MEM
706 sets will be subject to code motion one day and thus we need to compute
707 information about memory sets when we build the hash tables. */
709 alloc_reg_set_mem (max_gcse_regno);
710 compute_sets (f);
712 pass = 0;
713 initial_bytes_used = bytes_used;
714 max_pass_bytes = 0;
715 gcse_obstack_bottom = gcse_alloc (1);
716 changed = 1;
717 while (changed && pass < MAX_PASSES)
719 changed = 0;
720 if (file)
721 fprintf (file, "GCSE pass %d\n\n", pass + 1);
723 /* Initialize bytes_used to the space for the pred/succ lists,
724 and the reg_set_table data. */
725 bytes_used = initial_bytes_used;
727 /* Each pass may create new registers, so recalculate each time. */
728 max_gcse_regno = max_reg_num ();
730 alloc_gcse_mem (f);
732 /* Don't allow constant propagation to modify jumps
733 during this pass. */
734 changed = one_cprop_pass (pass + 1, 0);
736 if (optimize_size)
737 changed |= one_classic_gcse_pass (pass + 1);
738 else
740 changed |= one_pre_gcse_pass (pass + 1);
741 free_reg_set_mem ();
742 alloc_reg_set_mem (max_reg_num ());
743 compute_sets (f);
744 run_jump_opt_after_gcse = 1;
747 if (max_pass_bytes < bytes_used)
748 max_pass_bytes = bytes_used;
750 /* Free up memory, then reallocate for code hoisting. We can
751 not re-use the existing allocated memory because the tables
752 will not have info for the insns or registers created by
753 partial redundancy elimination. */
754 free_gcse_mem ();
756 /* It does not make sense to run code hoisting unless we optimizing
757 for code size -- it rarely makes programs faster, and can make
758 them bigger if we did partial redundancy elimination (when optimizing
759 for space, we use a classic gcse algorithm instead of partial
760 redundancy algorithms). */
761 if (optimize_size)
763 max_gcse_regno = max_reg_num ();
764 alloc_gcse_mem (f);
765 changed |= one_code_hoisting_pass ();
766 free_gcse_mem ();
768 if (max_pass_bytes < bytes_used)
769 max_pass_bytes = bytes_used;
772 if (file)
774 fprintf (file, "\n");
775 fflush (file);
777 obstack_free (&gcse_obstack, gcse_obstack_bottom);
778 pass++;
781 /* Do one last pass of copy propagation, including cprop into
782 conditional jumps. */
784 max_gcse_regno = max_reg_num ();
785 alloc_gcse_mem (f);
786 /* This time, go ahead and allow cprop to alter jumps. */
787 one_cprop_pass (pass + 1, 1);
788 free_gcse_mem ();
790 if (file)
792 fprintf (file, "GCSE of %s: %d basic blocks, ",
793 current_function_name, n_basic_blocks);
794 fprintf (file, "%d pass%s, %d bytes\n\n",
795 pass, pass > 1 ? "es" : "", max_pass_bytes);
798 /* Free our obstack. */
799 obstack_free (&gcse_obstack, NULL_PTR);
800 /* Free reg_set_table. */
801 free_reg_set_mem ();
802 /* Free storage used to record predecessor/successor data. */
803 free_bb_mem ();
804 /* Free storage allocated by find_basic_blocks. */
805 free_basic_block_vars (0);
806 return run_jump_opt_after_gcse;
809 /* Misc. utilities. */
811 /* Compute which modes support reg/reg copy operations. */
813 static void
814 compute_can_copy ()
816 int i;
817 #ifndef AVOID_CCMODE_COPIES
818 rtx reg,insn;
819 #endif
820 char *free_point = (char *) oballoc (1);
822 bzero (can_copy_p, NUM_MACHINE_MODES);
824 start_sequence ();
825 for (i = 0; i < NUM_MACHINE_MODES; i++)
827 switch (GET_MODE_CLASS (i))
829 case MODE_CC :
830 #ifdef AVOID_CCMODE_COPIES
831 can_copy_p[i] = 0;
832 #else
833 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
834 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
835 if (recog (PATTERN (insn), insn, NULL_PTR) >= 0)
836 can_copy_p[i] = 1;
837 #endif
838 break;
839 default :
840 can_copy_p[i] = 1;
841 break;
844 end_sequence ();
846 /* Free the objects we just allocated. */
847 obfree (free_point);
850 /* Cover function to xmalloc to record bytes allocated. */
852 static char *
853 gmalloc (size)
854 unsigned int size;
856 bytes_used += size;
857 return xmalloc (size);
860 /* Cover function to xrealloc.
861 We don't record the additional size since we don't know it.
862 It won't affect memory usage stats much anyway. */
864 static char *
865 grealloc (ptr, size)
866 char *ptr;
867 unsigned int size;
869 return xrealloc (ptr, size);
872 /* Cover function to obstack_alloc.
873 We don't need to record the bytes allocated here since
874 obstack_chunk_alloc is set to gmalloc. */
876 static char *
877 gcse_alloc (size)
878 unsigned long size;
880 return (char *) obstack_alloc (&gcse_obstack, size);
883 /* Allocate memory for the cuid mapping array,
884 and reg/memory set tracking tables.
886 This is called at the start of each pass. */
888 static void
889 alloc_gcse_mem (f)
890 rtx f;
892 int i,n;
893 rtx insn;
895 /* Find the largest UID and create a mapping from UIDs to CUIDs.
896 CUIDs are like UIDs except they increase monotonically, have no gaps,
897 and only apply to real insns. */
899 max_uid = get_max_uid ();
900 n = (max_uid + 1) * sizeof (int);
901 uid_cuid = (int *) gmalloc (n);
902 bzero ((char *) uid_cuid, n);
903 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
905 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
906 INSN_CUID (insn) = i++;
907 else
908 INSN_CUID (insn) = i;
911 /* Create a table mapping cuids to insns. */
913 max_cuid = i;
914 n = (max_cuid + 1) * sizeof (rtx);
915 cuid_insn = (rtx *) gmalloc (n);
916 bzero ((char *) cuid_insn, n);
917 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
919 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
921 CUID_INSN (i) = insn;
922 i++;
926 /* Allocate vars to track sets of regs. */
928 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
930 /* Allocate vars to track sets of regs, memory per block. */
932 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
933 max_gcse_regno);
934 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
937 /* Free memory allocated by alloc_gcse_mem. */
939 static void
940 free_gcse_mem ()
942 free (uid_cuid);
943 free (cuid_insn);
945 free (reg_set_bitmap);
947 free (reg_set_in_block);
948 free (mem_set_in_block);
952 /* Compute the local properties of each recorded expression.
953 Local properties are those that are defined by the block, irrespective
954 of other blocks.
956 An expression is transparent in a block if its operands are not modified
957 in the block.
959 An expression is computed (locally available) in a block if it is computed
960 at least once and expression would contain the same value if the
961 computation was moved to the end of the block.
963 An expression is locally anticipatable in a block if it is computed at
964 least once and expression would contain the same value if the computation
965 was moved to the beginning of the block.
967 We call this routine for cprop, pre and code hoisting. They all
968 compute basically the same information and thus can easily share
969 this code.
971 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording
972 local properties. If NULL, then it is not necessary to compute
973 or record that particular property.
975 SETP controls which hash table to look at. If zero, this routine
976 looks at the expr hash table; if nonzero this routine looks at
977 the set hash table. Additionally, TRANSP is computed as ~TRANSP,
978 since this is really cprop's ABSALTERED. */
980 static void
981 compute_local_properties (transp, comp, antloc, setp)
982 sbitmap *transp;
983 sbitmap *comp;
984 sbitmap *antloc;
985 int setp;
987 int i, hash_table_size;
988 struct expr **hash_table;
990 /* Initialize any bitmaps that were passed in. */
991 if (transp)
993 if (setp)
994 sbitmap_vector_zero (transp, n_basic_blocks);
995 else
996 sbitmap_vector_ones (transp, n_basic_blocks);
998 if (comp)
999 sbitmap_vector_zero (comp, n_basic_blocks);
1000 if (antloc)
1001 sbitmap_vector_zero (antloc, n_basic_blocks);
1003 /* We use the same code for cprop, pre and hoisting. For cprop
1004 we care about the set hash table, for pre and hoisting we
1005 care about the expr hash table. */
1006 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1007 hash_table = setp ? set_hash_table : expr_hash_table;
1009 for (i = 0; i < hash_table_size; i++)
1011 struct expr *expr;
1013 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1015 struct occr *occr;
1016 int indx = expr->bitmap_index;
1018 /* The expression is transparent in this block if it is not killed.
1019 We start by assuming all are transparent [none are killed], and
1020 then reset the bits for those that are. */
1022 if (transp)
1023 compute_transp (expr->expr, indx, transp, setp);
1025 /* The occurrences recorded in antic_occr are exactly those that
1026 we want to set to non-zero in ANTLOC. */
1028 if (antloc)
1030 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1032 int bb = BLOCK_NUM (occr->insn);
1033 SET_BIT (antloc[bb], indx);
1035 /* While we're scanning the table, this is a good place to
1036 initialize this. */
1037 occr->deleted_p = 0;
1041 /* The occurrences recorded in avail_occr are exactly those that
1042 we want to set to non-zero in COMP. */
1043 if (comp)
1046 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1048 int bb = BLOCK_NUM (occr->insn);
1049 SET_BIT (comp[bb], indx);
1051 /* While we're scanning the table, this is a good place to
1052 initialize this. */
1053 occr->copied_p = 0;
1057 /* While we're scanning the table, this is a good place to
1058 initialize this. */
1059 expr->reaching_reg = 0;
1065 /* Register set information.
1067 `reg_set_table' records where each register is set or otherwise
1068 modified. */
1070 static struct obstack reg_set_obstack;
1072 static void
1073 alloc_reg_set_mem (n_regs)
1074 int n_regs;
1076 int n;
1078 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1079 n = reg_set_table_size * sizeof (struct reg_set *);
1080 reg_set_table = (struct reg_set **) gmalloc (n);
1081 bzero ((char *) reg_set_table, n);
1083 gcc_obstack_init (&reg_set_obstack);
1086 static void
1087 free_reg_set_mem ()
1089 free (reg_set_table);
1090 obstack_free (&reg_set_obstack, NULL_PTR);
1093 /* Record REGNO in the reg_set table. */
1095 static void
1096 record_one_set (regno, insn)
1097 int regno;
1098 rtx insn;
1100 /* allocate a new reg_set element and link it onto the list */
1101 struct reg_set *new_reg_info, *reg_info_ptr1, *reg_info_ptr2;
1103 /* If the table isn't big enough, enlarge it. */
1104 if (regno >= reg_set_table_size)
1106 int new_size = regno + REG_SET_TABLE_SLOP;
1107 reg_set_table = (struct reg_set **)
1108 grealloc ((char *) reg_set_table,
1109 new_size * sizeof (struct reg_set *));
1110 bzero ((char *) (reg_set_table + reg_set_table_size),
1111 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1112 reg_set_table_size = new_size;
1115 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1116 sizeof (struct reg_set));
1117 bytes_used += sizeof (struct reg_set);
1118 new_reg_info->insn = insn;
1119 new_reg_info->next = NULL;
1120 if (reg_set_table[regno] == NULL)
1121 reg_set_table[regno] = new_reg_info;
1122 else
1124 reg_info_ptr1 = reg_info_ptr2 = reg_set_table[regno];
1125 /* ??? One could keep a "last" pointer to speed this up. */
1126 while (reg_info_ptr1 != NULL)
1128 reg_info_ptr2 = reg_info_ptr1;
1129 reg_info_ptr1 = reg_info_ptr1->next;
1131 reg_info_ptr2->next = new_reg_info;
1135 /* Called from compute_sets via note_stores to handle one
1136 SET or CLOBBER in an insn. The DATA is really the instruction
1137 in which the SET is occurring. */
1139 static void
1140 record_set_info (dest, setter, data)
1141 rtx dest, setter ATTRIBUTE_UNUSED;
1142 void *data;
1144 rtx record_set_insn = (rtx) data;
1146 if (GET_CODE (dest) == SUBREG)
1147 dest = SUBREG_REG (dest);
1149 if (GET_CODE (dest) == REG)
1151 if (REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1152 record_one_set (REGNO (dest), record_set_insn);
1156 /* Scan the function and record each set of each pseudo-register.
1158 This is called once, at the start of the gcse pass.
1159 See the comments for `reg_set_table' for further docs. */
1161 static void
1162 compute_sets (f)
1163 rtx f;
1165 rtx insn = f;
1167 while (insn)
1169 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
1170 note_stores (PATTERN (insn), record_set_info, insn);
1171 insn = NEXT_INSN (insn);
1175 /* Hash table support. */
1177 #define NEVER_SET -1
1179 /* For each register, the cuid of the first/last insn in the block to set it,
1180 or -1 if not set. */
1181 static int *reg_first_set;
1182 static int *reg_last_set;
1184 /* While computing "first/last set" info, this is the CUID of first/last insn
1185 to set memory or -1 if not set. `mem_last_set' is also used when
1186 performing GCSE to record whether memory has been set since the beginning
1187 of the block.
1188 Note that handling of memory is very simple, we don't make any attempt
1189 to optimize things (later). */
1190 static int mem_first_set;
1191 static int mem_last_set;
1193 /* Perform a quick check whether X, the source of a set, is something
1194 we want to consider for GCSE. */
1196 static int
1197 want_to_gcse_p (x)
1198 rtx x;
1200 enum rtx_code code = GET_CODE (x);
1202 switch (code)
1204 case REG:
1205 case SUBREG:
1206 case CONST_INT:
1207 case CONST_DOUBLE:
1208 case CALL:
1209 return 0;
1211 default:
1212 break;
1215 return 1;
1218 /* Return non-zero if the operands of expression X are unchanged from the
1219 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1220 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1222 static int
1223 oprs_unchanged_p (x, insn, avail_p)
1224 rtx x, insn;
1225 int avail_p;
1227 int i;
1228 enum rtx_code code;
1229 const char *fmt;
1231 /* repeat is used to turn tail-recursion into iteration. */
1232 repeat:
1234 if (x == 0)
1235 return 1;
1237 code = GET_CODE (x);
1238 switch (code)
1240 case REG:
1241 if (avail_p)
1242 return (reg_last_set[REGNO (x)] == NEVER_SET
1243 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1244 else
1245 return (reg_first_set[REGNO (x)] == NEVER_SET
1246 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1248 case MEM:
1249 if (avail_p)
1251 if (mem_last_set != NEVER_SET
1252 && mem_last_set >= INSN_CUID (insn))
1253 return 0;
1255 else
1257 if (mem_first_set != NEVER_SET
1258 && mem_first_set < INSN_CUID (insn))
1259 return 0;
1261 x = XEXP (x, 0);
1262 goto repeat;
1264 case PRE_DEC:
1265 case PRE_INC:
1266 case POST_DEC:
1267 case POST_INC:
1268 return 0;
1270 case PC:
1271 case CC0: /*FIXME*/
1272 case CONST:
1273 case CONST_INT:
1274 case CONST_DOUBLE:
1275 case SYMBOL_REF:
1276 case LABEL_REF:
1277 case ADDR_VEC:
1278 case ADDR_DIFF_VEC:
1279 return 1;
1281 default:
1282 break;
1285 i = GET_RTX_LENGTH (code) - 1;
1286 fmt = GET_RTX_FORMAT (code);
1287 for (; i >= 0; i--)
1289 if (fmt[i] == 'e')
1291 rtx tem = XEXP (x, i);
1293 /* If we are about to do the last recursive call
1294 needed at this level, change it into iteration.
1295 This function is called enough to be worth it. */
1296 if (i == 0)
1298 x = tem;
1299 goto repeat;
1301 if (! oprs_unchanged_p (tem, insn, avail_p))
1302 return 0;
1304 else if (fmt[i] == 'E')
1306 int j;
1307 for (j = 0; j < XVECLEN (x, i); j++)
1309 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1310 return 0;
1315 return 1;
1318 /* Return non-zero if the operands of expression X are unchanged from
1319 the start of INSN's basic block up to but not including INSN. */
1321 static int
1322 oprs_anticipatable_p (x, insn)
1323 rtx x, insn;
1325 return oprs_unchanged_p (x, insn, 0);
1328 /* Return non-zero if the operands of expression X are unchanged from
1329 INSN to the end of INSN's basic block. */
1331 static int
1332 oprs_available_p (x, insn)
1333 rtx x, insn;
1335 return oprs_unchanged_p (x, insn, 1);
1338 /* Hash expression X.
1339 MODE is only used if X is a CONST_INT.
1340 A boolean indicating if a volatile operand is found or if the expression
1341 contains something we don't want to insert in the table is stored in
1342 DO_NOT_RECORD_P.
1344 ??? One might want to merge this with canon_hash. Later. */
1346 static unsigned int
1347 hash_expr (x, mode, do_not_record_p, hash_table_size)
1348 rtx x;
1349 enum machine_mode mode;
1350 int *do_not_record_p;
1351 int hash_table_size;
1353 unsigned int hash;
1355 *do_not_record_p = 0;
1357 hash = hash_expr_1 (x, mode, do_not_record_p);
1358 return hash % hash_table_size;
1361 /* Subroutine of hash_expr to do the actual work. */
1363 static unsigned int
1364 hash_expr_1 (x, mode, do_not_record_p)
1365 rtx x;
1366 enum machine_mode mode;
1367 int *do_not_record_p;
1369 int i, j;
1370 unsigned hash = 0;
1371 enum rtx_code code;
1372 const char *fmt;
1374 /* repeat is used to turn tail-recursion into iteration. */
1375 repeat:
1377 if (x == 0)
1378 return hash;
1380 code = GET_CODE (x);
1381 switch (code)
1383 case REG:
1385 register int regno = REGNO (x);
1386 hash += ((unsigned) REG << 7) + regno;
1387 return hash;
1390 case CONST_INT:
1392 unsigned HOST_WIDE_INT tem = INTVAL (x);
1393 hash += ((unsigned) CONST_INT << 7) + (unsigned) mode + tem;
1394 return hash;
1397 case CONST_DOUBLE:
1398 /* This is like the general case, except that it only counts
1399 the integers representing the constant. */
1400 hash += (unsigned) code + (unsigned) GET_MODE (x);
1401 if (GET_MODE (x) != VOIDmode)
1402 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1404 unsigned tem = XWINT (x, i);
1405 hash += tem;
1407 else
1408 hash += ((unsigned) CONST_DOUBLE_LOW (x)
1409 + (unsigned) CONST_DOUBLE_HIGH (x));
1410 return hash;
1412 /* Assume there is only one rtx object for any given label. */
1413 case LABEL_REF:
1414 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1415 differences and differences between each stage's debugging dumps. */
1416 hash += ((unsigned) LABEL_REF << 7) + CODE_LABEL_NUMBER (XEXP (x, 0));
1417 return hash;
1419 case SYMBOL_REF:
1421 /* Don't hash on the symbol's address to avoid bootstrap differences.
1422 Different hash values may cause expressions to be recorded in
1423 different orders and thus different registers to be used in the
1424 final assembler. This also avoids differences in the dump files
1425 between various stages. */
1426 unsigned int h = 0;
1427 unsigned char *p = (unsigned char *) XSTR (x, 0);
1428 while (*p)
1429 h += (h << 7) + *p++; /* ??? revisit */
1430 hash += ((unsigned) SYMBOL_REF << 7) + h;
1431 return hash;
1434 case MEM:
1435 if (MEM_VOLATILE_P (x))
1437 *do_not_record_p = 1;
1438 return 0;
1440 hash += (unsigned) MEM;
1441 hash += MEM_ALIAS_SET (x);
1442 x = XEXP (x, 0);
1443 goto repeat;
1445 case PRE_DEC:
1446 case PRE_INC:
1447 case POST_DEC:
1448 case POST_INC:
1449 case PC:
1450 case CC0:
1451 case CALL:
1452 case UNSPEC_VOLATILE:
1453 *do_not_record_p = 1;
1454 return 0;
1456 case ASM_OPERANDS:
1457 if (MEM_VOLATILE_P (x))
1459 *do_not_record_p = 1;
1460 return 0;
1463 default:
1464 break;
1467 i = GET_RTX_LENGTH (code) - 1;
1468 hash += (unsigned) code + (unsigned) GET_MODE (x);
1469 fmt = GET_RTX_FORMAT (code);
1470 for (; i >= 0; i--)
1472 if (fmt[i] == 'e')
1474 rtx tem = XEXP (x, i);
1476 /* If we are about to do the last recursive call
1477 needed at this level, change it into iteration.
1478 This function is called enough to be worth it. */
1479 if (i == 0)
1481 x = tem;
1482 goto repeat;
1484 hash += hash_expr_1 (tem, 0, do_not_record_p);
1485 if (*do_not_record_p)
1486 return 0;
1488 else if (fmt[i] == 'E')
1489 for (j = 0; j < XVECLEN (x, i); j++)
1491 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1492 if (*do_not_record_p)
1493 return 0;
1495 else if (fmt[i] == 's')
1497 register unsigned char *p = (unsigned char *) XSTR (x, i);
1498 if (p)
1499 while (*p)
1500 hash += *p++;
1502 else if (fmt[i] == 'i')
1504 register unsigned tem = XINT (x, i);
1505 hash += tem;
1507 else
1508 abort ();
1511 return hash;
1514 /* Hash a set of register REGNO.
1516 Sets are hashed on the register that is set.
1517 This simplifies the PRE copy propagation code.
1519 ??? May need to make things more elaborate. Later, as necessary. */
1521 static unsigned int
1522 hash_set (regno, hash_table_size)
1523 int regno;
1524 int hash_table_size;
1526 unsigned int hash;
1528 hash = regno;
1529 return hash % hash_table_size;
1532 /* Return non-zero if exp1 is equivalent to exp2.
1533 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1535 static int
1536 expr_equiv_p (x, y)
1537 rtx x, y;
1539 register int i, j;
1540 register enum rtx_code code;
1541 register const char *fmt;
1543 if (x == y)
1544 return 1;
1545 if (x == 0 || y == 0)
1546 return x == y;
1548 code = GET_CODE (x);
1549 if (code != GET_CODE (y))
1550 return 0;
1552 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1553 if (GET_MODE (x) != GET_MODE (y))
1554 return 0;
1556 switch (code)
1558 case PC:
1559 case CC0:
1560 return x == y;
1562 case CONST_INT:
1563 return INTVAL (x) == INTVAL (y);
1565 case LABEL_REF:
1566 return XEXP (x, 0) == XEXP (y, 0);
1568 case SYMBOL_REF:
1569 return XSTR (x, 0) == XSTR (y, 0);
1571 case REG:
1572 return REGNO (x) == REGNO (y);
1574 case MEM:
1575 /* Can't merge two expressions in different alias sets, since we can
1576 decide that the expression is transparent in a block when it isn't,
1577 due to it being set with the different alias set. */
1578 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1579 return 0;
1580 break;
1582 /* For commutative operations, check both orders. */
1583 case PLUS:
1584 case MULT:
1585 case AND:
1586 case IOR:
1587 case XOR:
1588 case NE:
1589 case EQ:
1590 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1591 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1592 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1593 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1595 default:
1596 break;
1599 /* Compare the elements. If any pair of corresponding elements
1600 fail to match, return 0 for the whole thing. */
1602 fmt = GET_RTX_FORMAT (code);
1603 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1605 switch (fmt[i])
1607 case 'e':
1608 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1609 return 0;
1610 break;
1612 case 'E':
1613 if (XVECLEN (x, i) != XVECLEN (y, i))
1614 return 0;
1615 for (j = 0; j < XVECLEN (x, i); j++)
1616 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1617 return 0;
1618 break;
1620 case 's':
1621 if (strcmp (XSTR (x, i), XSTR (y, i)))
1622 return 0;
1623 break;
1625 case 'i':
1626 if (XINT (x, i) != XINT (y, i))
1627 return 0;
1628 break;
1630 case 'w':
1631 if (XWINT (x, i) != XWINT (y, i))
1632 return 0;
1633 break;
1635 case '0':
1636 break;
1638 default:
1639 abort ();
1643 return 1;
1646 /* Insert expression X in INSN in the hash table.
1647 If it is already present, record it as the last occurrence in INSN's
1648 basic block.
1650 MODE is the mode of the value X is being stored into.
1651 It is only used if X is a CONST_INT.
1653 ANTIC_P is non-zero if X is an anticipatable expression.
1654 AVAIL_P is non-zero if X is an available expression. */
1656 static void
1657 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1658 rtx x;
1659 enum machine_mode mode;
1660 rtx insn;
1661 int antic_p, avail_p;
1663 int found, do_not_record_p;
1664 unsigned int hash;
1665 struct expr *cur_expr, *last_expr = NULL;
1666 struct occr *antic_occr, *avail_occr;
1667 struct occr *last_occr = NULL;
1669 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1671 /* Do not insert expression in table if it contains volatile operands,
1672 or if hash_expr determines the expression is something we don't want
1673 to or can't handle. */
1674 if (do_not_record_p)
1675 return;
1677 cur_expr = expr_hash_table[hash];
1678 found = 0;
1680 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1682 /* If the expression isn't found, save a pointer to the end of
1683 the list. */
1684 last_expr = cur_expr;
1685 cur_expr = cur_expr->next_same_hash;
1688 if (! found)
1690 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1691 bytes_used += sizeof (struct expr);
1692 if (expr_hash_table[hash] == NULL)
1694 /* This is the first pattern that hashed to this index. */
1695 expr_hash_table[hash] = cur_expr;
1697 else
1699 /* Add EXPR to end of this hash chain. */
1700 last_expr->next_same_hash = cur_expr;
1702 /* Set the fields of the expr element. */
1703 cur_expr->expr = x;
1704 cur_expr->bitmap_index = n_exprs++;
1705 cur_expr->next_same_hash = NULL;
1706 cur_expr->antic_occr = NULL;
1707 cur_expr->avail_occr = NULL;
1710 /* Now record the occurrence(s). */
1712 if (antic_p)
1714 antic_occr = cur_expr->antic_occr;
1716 /* Search for another occurrence in the same basic block. */
1717 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1719 /* If an occurrence isn't found, save a pointer to the end of
1720 the list. */
1721 last_occr = antic_occr;
1722 antic_occr = antic_occr->next;
1725 if (antic_occr)
1727 /* Found another instance of the expression in the same basic block.
1728 Prefer the currently recorded one. We want the first one in the
1729 block and the block is scanned from start to end. */
1730 ; /* nothing to do */
1732 else
1734 /* First occurrence of this expression in this basic block. */
1735 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1736 bytes_used += sizeof (struct occr);
1737 /* First occurrence of this expression in any block? */
1738 if (cur_expr->antic_occr == NULL)
1739 cur_expr->antic_occr = antic_occr;
1740 else
1741 last_occr->next = antic_occr;
1742 antic_occr->insn = insn;
1743 antic_occr->next = NULL;
1747 if (avail_p)
1749 avail_occr = cur_expr->avail_occr;
1751 /* Search for another occurrence in the same basic block. */
1752 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1754 /* If an occurrence isn't found, save a pointer to the end of
1755 the list. */
1756 last_occr = avail_occr;
1757 avail_occr = avail_occr->next;
1760 if (avail_occr)
1762 /* Found another instance of the expression in the same basic block.
1763 Prefer this occurrence to the currently recorded one. We want
1764 the last one in the block and the block is scanned from start
1765 to end. */
1766 avail_occr->insn = insn;
1768 else
1770 /* First occurrence of this expression in this basic block. */
1771 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1772 bytes_used += sizeof (struct occr);
1773 /* First occurrence of this expression in any block? */
1774 if (cur_expr->avail_occr == NULL)
1775 cur_expr->avail_occr = avail_occr;
1776 else
1777 last_occr->next = avail_occr;
1778 avail_occr->insn = insn;
1779 avail_occr->next = NULL;
1784 /* Insert pattern X in INSN in the hash table.
1785 X is a SET of a reg to either another reg or a constant.
1786 If it is already present, record it as the last occurrence in INSN's
1787 basic block. */
1789 static void
1790 insert_set_in_table (x, insn)
1791 rtx x;
1792 rtx insn;
1794 int found;
1795 unsigned int hash;
1796 struct expr *cur_expr, *last_expr = NULL;
1797 struct occr *cur_occr, *last_occr = NULL;
1799 if (GET_CODE (x) != SET
1800 || GET_CODE (SET_DEST (x)) != REG)
1801 abort ();
1803 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
1805 cur_expr = set_hash_table[hash];
1806 found = 0;
1808 while (cur_expr && ! (found = expr_equiv_p (cur_expr->expr, x)))
1810 /* If the expression isn't found, save a pointer to the end of
1811 the list. */
1812 last_expr = cur_expr;
1813 cur_expr = cur_expr->next_same_hash;
1816 if (! found)
1818 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1819 bytes_used += sizeof (struct expr);
1820 if (set_hash_table[hash] == NULL)
1822 /* This is the first pattern that hashed to this index. */
1823 set_hash_table[hash] = cur_expr;
1825 else
1827 /* Add EXPR to end of this hash chain. */
1828 last_expr->next_same_hash = cur_expr;
1830 /* Set the fields of the expr element.
1831 We must copy X because it can be modified when copy propagation is
1832 performed on its operands. */
1833 /* ??? Should this go in a different obstack? */
1834 cur_expr->expr = copy_rtx (x);
1835 cur_expr->bitmap_index = n_sets++;
1836 cur_expr->next_same_hash = NULL;
1837 cur_expr->antic_occr = NULL;
1838 cur_expr->avail_occr = NULL;
1841 /* Now record the occurrence. */
1843 cur_occr = cur_expr->avail_occr;
1845 /* Search for another occurrence in the same basic block. */
1846 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1848 /* If an occurrence isn't found, save a pointer to the end of
1849 the list. */
1850 last_occr = cur_occr;
1851 cur_occr = cur_occr->next;
1854 if (cur_occr)
1856 /* Found another instance of the expression in the same basic block.
1857 Prefer this occurrence to the currently recorded one. We want
1858 the last one in the block and the block is scanned from start
1859 to end. */
1860 cur_occr->insn = insn;
1862 else
1864 /* First occurrence of this expression in this basic block. */
1865 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1866 bytes_used += sizeof (struct occr);
1867 /* First occurrence of this expression in any block? */
1868 if (cur_expr->avail_occr == NULL)
1869 cur_expr->avail_occr = cur_occr;
1870 else
1871 last_occr->next = cur_occr;
1872 cur_occr->insn = insn;
1873 cur_occr->next = NULL;
1877 /* Scan pattern PAT of INSN and add an entry to the hash table.
1878 If SET_P is non-zero, this is for the assignment hash table,
1879 otherwise it is for the expression hash table. */
1881 static void
1882 hash_scan_set (pat, insn, set_p)
1883 rtx pat, insn;
1884 int set_p;
1886 rtx src = SET_SRC (pat);
1887 rtx dest = SET_DEST (pat);
1889 if (GET_CODE (src) == CALL)
1890 hash_scan_call (src, insn);
1892 if (GET_CODE (dest) == REG)
1894 int regno = REGNO (dest);
1895 rtx tmp;
1897 /* Only record sets of pseudo-regs in the hash table. */
1898 if (! set_p
1899 && regno >= FIRST_PSEUDO_REGISTER
1900 /* Don't GCSE something if we can't do a reg/reg copy. */
1901 && can_copy_p [GET_MODE (dest)]
1902 /* Is SET_SRC something we want to gcse? */
1903 && want_to_gcse_p (src))
1905 /* An expression is not anticipatable if its operands are
1906 modified before this insn. */
1907 int antic_p = oprs_anticipatable_p (src, insn);
1908 /* An expression is not available if its operands are
1909 subsequently modified, including this insn. */
1910 int avail_p = oprs_available_p (src, insn);
1911 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
1913 /* Record sets for constant/copy propagation. */
1914 else if (set_p
1915 && regno >= FIRST_PSEUDO_REGISTER
1916 && ((GET_CODE (src) == REG
1917 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1918 && can_copy_p [GET_MODE (dest)])
1919 || GET_CODE (src) == CONST_INT
1920 || GET_CODE (src) == SYMBOL_REF
1921 || GET_CODE (src) == CONST_DOUBLE)
1922 /* A copy is not available if its src or dest is subsequently
1923 modified. Here we want to search from INSN+1 on, but
1924 oprs_available_p searches from INSN on. */
1925 && (insn == BLOCK_END (BLOCK_NUM (insn))
1926 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1927 && oprs_available_p (pat, tmp))))
1928 insert_set_in_table (pat, insn);
1932 static void
1933 hash_scan_clobber (x, insn)
1934 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1936 /* Currently nothing to do. */
1939 static void
1940 hash_scan_call (x, insn)
1941 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
1943 /* Currently nothing to do. */
1946 /* Process INSN and add hash table entries as appropriate.
1948 Only available expressions that set a single pseudo-reg are recorded.
1950 Single sets in a PARALLEL could be handled, but it's an extra complication
1951 that isn't dealt with right now. The trick is handling the CLOBBERs that
1952 are also in the PARALLEL. Later.
1954 If SET_P is non-zero, this is for the assignment hash table,
1955 otherwise it is for the expression hash table.
1956 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
1957 not record any expressions. */
1959 static void
1960 hash_scan_insn (insn, set_p, in_libcall_block)
1961 rtx insn;
1962 int set_p;
1963 int in_libcall_block;
1965 rtx pat = PATTERN (insn);
1967 /* Pick out the sets of INSN and for other forms of instructions record
1968 what's been modified. */
1970 if (GET_CODE (pat) == SET && ! in_libcall_block)
1972 /* Ignore obvious no-ops. */
1973 if (SET_SRC (pat) != SET_DEST (pat))
1974 hash_scan_set (pat, insn, set_p);
1976 else if (GET_CODE (pat) == PARALLEL)
1978 int i;
1980 for (i = 0; i < XVECLEN (pat, 0); i++)
1982 rtx x = XVECEXP (pat, 0, i);
1984 if (GET_CODE (x) == SET)
1986 if (GET_CODE (SET_SRC (x)) == CALL)
1987 hash_scan_call (SET_SRC (x), insn);
1989 else if (GET_CODE (x) == CLOBBER)
1990 hash_scan_clobber (x, insn);
1991 else if (GET_CODE (x) == CALL)
1992 hash_scan_call (x, insn);
1995 else if (GET_CODE (pat) == CLOBBER)
1996 hash_scan_clobber (pat, insn);
1997 else if (GET_CODE (pat) == CALL)
1998 hash_scan_call (pat, insn);
2001 static void
2002 dump_hash_table (file, name, table, table_size, total_size)
2003 FILE *file;
2004 const char *name;
2005 struct expr **table;
2006 int table_size, total_size;
2008 int i;
2009 /* Flattened out table, so it's printed in proper order. */
2010 struct expr **flat_table = (struct expr **) alloca (total_size * sizeof (struct expr *));
2011 unsigned int *hash_val = (unsigned int *) alloca (total_size * sizeof (unsigned int));
2013 bzero ((char *) flat_table, total_size * sizeof (struct expr *));
2014 for (i = 0; i < table_size; i++)
2016 struct expr *expr;
2018 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2020 flat_table[expr->bitmap_index] = expr;
2021 hash_val[expr->bitmap_index] = i;
2025 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2026 name, table_size, total_size);
2028 for (i = 0; i < total_size; i++)
2030 struct expr *expr = flat_table[i];
2032 fprintf (file, "Index %d (hash value %d)\n ",
2033 expr->bitmap_index, hash_val[i]);
2034 print_rtl (file, expr->expr);
2035 fprintf (file, "\n");
2038 fprintf (file, "\n");
2041 /* Record register first/last/block set information for REGNO in INSN.
2042 reg_first_set records the first place in the block where the register
2043 is set and is used to compute "anticipatability".
2044 reg_last_set records the last place in the block where the register
2045 is set and is used to compute "availability".
2046 reg_set_in_block records whether the register is set in the block
2047 and is used to compute "transparency". */
2049 static void
2050 record_last_reg_set_info (insn, regno)
2051 rtx insn;
2052 int regno;
2054 if (reg_first_set[regno] == NEVER_SET)
2055 reg_first_set[regno] = INSN_CUID (insn);
2056 reg_last_set[regno] = INSN_CUID (insn);
2057 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2060 /* Record memory first/last/block set information for INSN. */
2062 static void
2063 record_last_mem_set_info (insn)
2064 rtx insn;
2066 if (mem_first_set == NEVER_SET)
2067 mem_first_set = INSN_CUID (insn);
2068 mem_last_set = INSN_CUID (insn);
2069 mem_set_in_block[BLOCK_NUM (insn)] = 1;
2072 /* Called from compute_hash_table via note_stores to handle one
2073 SET or CLOBBER in an insn. DATA is really the instruction in which
2074 the SET is taking place. */
2076 static void
2077 record_last_set_info (dest, setter, data)
2078 rtx dest, setter ATTRIBUTE_UNUSED;
2079 void *data;
2081 rtx last_set_insn = (rtx) data;
2083 if (GET_CODE (dest) == SUBREG)
2084 dest = SUBREG_REG (dest);
2086 if (GET_CODE (dest) == REG)
2087 record_last_reg_set_info (last_set_insn, REGNO (dest));
2088 else if (GET_CODE (dest) == MEM
2089 /* Ignore pushes, they clobber nothing. */
2090 && ! push_operand (dest, GET_MODE (dest)))
2091 record_last_mem_set_info (last_set_insn);
2094 /* Top level function to create an expression or assignment hash table.
2096 Expression entries are placed in the hash table if
2097 - they are of the form (set (pseudo-reg) src),
2098 - src is something we want to perform GCSE on,
2099 - none of the operands are subsequently modified in the block
2101 Assignment entries are placed in the hash table if
2102 - they are of the form (set (pseudo-reg) src),
2103 - src is something we want to perform const/copy propagation on,
2104 - none of the operands or target are subsequently modified in the block
2105 Currently src must be a pseudo-reg or a const_int.
2107 F is the first insn.
2108 SET_P is non-zero for computing the assignment hash table. */
2110 static void
2111 compute_hash_table (set_p)
2112 int set_p;
2114 int bb;
2116 /* While we compute the hash table we also compute a bit array of which
2117 registers are set in which blocks.
2118 We also compute which blocks set memory, in the absence of aliasing
2119 support [which is TODO].
2120 ??? This isn't needed during const/copy propagation, but it's cheap to
2121 compute. Later. */
2122 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2123 bzero ((char *) mem_set_in_block, n_basic_blocks);
2125 /* Some working arrays used to track first and last set in each block. */
2126 /* ??? One could use alloca here, but at some size a threshold is crossed
2127 beyond which one should use malloc. Are we at that threshold here? */
2128 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2129 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2131 for (bb = 0; bb < n_basic_blocks; bb++)
2133 rtx insn;
2134 int regno;
2135 int in_libcall_block;
2136 int i;
2138 /* First pass over the instructions records information used to
2139 determine when registers and memory are first and last set.
2140 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2141 could be moved to compute_sets since they currently don't change. */
2143 for (i = 0; i < max_gcse_regno; i++)
2144 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
2145 mem_first_set = NEVER_SET;
2146 mem_last_set = NEVER_SET;
2148 for (insn = BLOCK_HEAD (bb);
2149 insn && insn != NEXT_INSN (BLOCK_END (bb));
2150 insn = NEXT_INSN (insn))
2152 #ifdef NON_SAVING_SETJMP
2153 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2154 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2156 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2157 record_last_reg_set_info (insn, regno);
2158 continue;
2160 #endif
2162 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
2163 continue;
2165 if (GET_CODE (insn) == CALL_INSN)
2167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2168 if ((call_used_regs[regno]
2169 && regno != STACK_POINTER_REGNUM
2170 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2171 && regno != HARD_FRAME_POINTER_REGNUM
2172 #endif
2173 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2174 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2175 #endif
2176 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2177 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2178 #endif
2180 && regno != FRAME_POINTER_REGNUM)
2181 || global_regs[regno])
2182 record_last_reg_set_info (insn, regno);
2183 if (! CONST_CALL_P (insn))
2184 record_last_mem_set_info (insn);
2187 note_stores (PATTERN (insn), record_last_set_info, insn);
2190 /* The next pass builds the hash table. */
2192 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2193 insn && insn != NEXT_INSN (BLOCK_END (bb));
2194 insn = NEXT_INSN (insn))
2196 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
2198 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2199 in_libcall_block = 1;
2200 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2201 in_libcall_block = 0;
2202 hash_scan_insn (insn, set_p, in_libcall_block);
2207 free (reg_first_set);
2208 free (reg_last_set);
2209 /* Catch bugs early. */
2210 reg_first_set = reg_last_set = 0;
2213 /* Allocate space for the set hash table.
2214 N_INSNS is the number of instructions in the function.
2215 It is used to determine the number of buckets to use. */
2217 static void
2218 alloc_set_hash_table (n_insns)
2219 int n_insns;
2221 int n;
2223 set_hash_table_size = n_insns / 4;
2224 if (set_hash_table_size < 11)
2225 set_hash_table_size = 11;
2226 /* Attempt to maintain efficient use of hash table.
2227 Making it an odd number is simplest for now.
2228 ??? Later take some measurements. */
2229 set_hash_table_size |= 1;
2230 n = set_hash_table_size * sizeof (struct expr *);
2231 set_hash_table = (struct expr **) gmalloc (n);
2234 /* Free things allocated by alloc_set_hash_table. */
2236 static void
2237 free_set_hash_table ()
2239 free (set_hash_table);
2242 /* Compute the hash table for doing copy/const propagation. */
2244 static void
2245 compute_set_hash_table ()
2247 /* Initialize count of number of entries in hash table. */
2248 n_sets = 0;
2249 bzero ((char *) set_hash_table, set_hash_table_size * sizeof (struct expr *));
2251 compute_hash_table (1);
2254 /* Allocate space for the expression hash table.
2255 N_INSNS is the number of instructions in the function.
2256 It is used to determine the number of buckets to use. */
2258 static void
2259 alloc_expr_hash_table (n_insns)
2260 int n_insns;
2262 int n;
2264 expr_hash_table_size = n_insns / 2;
2265 /* Make sure the amount is usable. */
2266 if (expr_hash_table_size < 11)
2267 expr_hash_table_size = 11;
2268 /* Attempt to maintain efficient use of hash table.
2269 Making it an odd number is simplest for now.
2270 ??? Later take some measurements. */
2271 expr_hash_table_size |= 1;
2272 n = expr_hash_table_size * sizeof (struct expr *);
2273 expr_hash_table = (struct expr **) gmalloc (n);
2276 /* Free things allocated by alloc_expr_hash_table. */
2278 static void
2279 free_expr_hash_table ()
2281 free (expr_hash_table);
2284 /* Compute the hash table for doing GCSE. */
2286 static void
2287 compute_expr_hash_table ()
2289 /* Initialize count of number of entries in hash table. */
2290 n_exprs = 0;
2291 bzero ((char *) expr_hash_table, expr_hash_table_size * sizeof (struct expr *));
2293 compute_hash_table (0);
2296 /* Expression tracking support. */
2298 /* Lookup pattern PAT in the expression table.
2299 The result is a pointer to the table entry, or NULL if not found. */
2301 static struct expr *
2302 lookup_expr (pat)
2303 rtx pat;
2305 int do_not_record_p;
2306 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2307 expr_hash_table_size);
2308 struct expr *expr;
2310 if (do_not_record_p)
2311 return NULL;
2313 expr = expr_hash_table[hash];
2315 while (expr && ! expr_equiv_p (expr->expr, pat))
2316 expr = expr->next_same_hash;
2318 return expr;
2321 /* Lookup REGNO in the set table.
2322 If PAT is non-NULL look for the entry that matches it, otherwise return
2323 the first entry for REGNO.
2324 The result is a pointer to the table entry, or NULL if not found. */
2326 static struct expr *
2327 lookup_set (regno, pat)
2328 int regno;
2329 rtx pat;
2331 unsigned int hash = hash_set (regno, set_hash_table_size);
2332 struct expr *expr;
2334 expr = set_hash_table[hash];
2336 if (pat)
2338 while (expr && ! expr_equiv_p (expr->expr, pat))
2339 expr = expr->next_same_hash;
2341 else
2343 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2344 expr = expr->next_same_hash;
2347 return expr;
2350 /* Return the next entry for REGNO in list EXPR. */
2352 static struct expr *
2353 next_set (regno, expr)
2354 int regno;
2355 struct expr *expr;
2358 expr = expr->next_same_hash;
2359 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2360 return expr;
2363 /* Reset tables used to keep track of what's still available [since the
2364 start of the block]. */
2366 static void
2367 reset_opr_set_tables ()
2369 /* Maintain a bitmap of which regs have been set since beginning of
2370 the block. */
2371 sbitmap_zero (reg_set_bitmap);
2372 /* Also keep a record of the last instruction to modify memory.
2373 For now this is very trivial, we only record whether any memory
2374 location has been modified. */
2375 mem_last_set = 0;
2378 /* Return non-zero if the operands of X are not set before INSN in
2379 INSN's basic block. */
2381 static int
2382 oprs_not_set_p (x, insn)
2383 rtx x, insn;
2385 int i;
2386 enum rtx_code code;
2387 const char *fmt;
2389 /* repeat is used to turn tail-recursion into iteration. */
2390 repeat:
2392 if (x == 0)
2393 return 1;
2395 code = GET_CODE (x);
2396 switch (code)
2398 case PC:
2399 case CC0:
2400 case CONST:
2401 case CONST_INT:
2402 case CONST_DOUBLE:
2403 case SYMBOL_REF:
2404 case LABEL_REF:
2405 case ADDR_VEC:
2406 case ADDR_DIFF_VEC:
2407 return 1;
2409 case MEM:
2410 if (mem_last_set != 0)
2411 return 0;
2412 x = XEXP (x, 0);
2413 goto repeat;
2415 case REG:
2416 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2418 default:
2419 break;
2422 fmt = GET_RTX_FORMAT (code);
2423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2425 if (fmt[i] == 'e')
2427 int not_set_p;
2428 /* If we are about to do the last recursive call
2429 needed at this level, change it into iteration.
2430 This function is called enough to be worth it. */
2431 if (i == 0)
2433 x = XEXP (x, 0);
2434 goto repeat;
2436 not_set_p = oprs_not_set_p (XEXP (x, i), insn);
2437 if (! not_set_p)
2438 return 0;
2440 else if (fmt[i] == 'E')
2442 int j;
2443 for (j = 0; j < XVECLEN (x, i); j++)
2445 int not_set_p = oprs_not_set_p (XVECEXP (x, i, j), insn);
2446 if (! not_set_p)
2447 return 0;
2452 return 1;
2455 /* Mark things set by a CALL. */
2457 static void
2458 mark_call (insn)
2459 rtx insn;
2461 mem_last_set = INSN_CUID (insn);
2464 /* Mark things set by a SET. */
2466 static void
2467 mark_set (pat, insn)
2468 rtx pat, insn;
2470 rtx dest = SET_DEST (pat);
2472 while (GET_CODE (dest) == SUBREG
2473 || GET_CODE (dest) == ZERO_EXTRACT
2474 || GET_CODE (dest) == SIGN_EXTRACT
2475 || GET_CODE (dest) == STRICT_LOW_PART)
2476 dest = XEXP (dest, 0);
2478 if (GET_CODE (dest) == REG)
2479 SET_BIT (reg_set_bitmap, REGNO (dest));
2480 else if (GET_CODE (dest) == MEM)
2481 mem_last_set = INSN_CUID (insn);
2483 if (GET_CODE (SET_SRC (pat)) == CALL)
2484 mark_call (insn);
2487 /* Record things set by a CLOBBER. */
2489 static void
2490 mark_clobber (pat, insn)
2491 rtx pat, insn;
2493 rtx clob = XEXP (pat, 0);
2495 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2496 clob = XEXP (clob, 0);
2498 if (GET_CODE (clob) == REG)
2499 SET_BIT (reg_set_bitmap, REGNO (clob));
2500 else
2501 mem_last_set = INSN_CUID (insn);
2504 /* Record things set by INSN.
2505 This data is used by oprs_not_set_p. */
2507 static void
2508 mark_oprs_set (insn)
2509 rtx insn;
2511 rtx pat = PATTERN (insn);
2513 if (GET_CODE (pat) == SET)
2514 mark_set (pat, insn);
2515 else if (GET_CODE (pat) == PARALLEL)
2517 int i;
2519 for (i = 0; i < XVECLEN (pat, 0); i++)
2521 rtx x = XVECEXP (pat, 0, i);
2523 if (GET_CODE (x) == SET)
2524 mark_set (x, insn);
2525 else if (GET_CODE (x) == CLOBBER)
2526 mark_clobber (x, insn);
2527 else if (GET_CODE (x) == CALL)
2528 mark_call (insn);
2531 else if (GET_CODE (pat) == CLOBBER)
2532 mark_clobber (pat, insn);
2533 else if (GET_CODE (pat) == CALL)
2534 mark_call (insn);
2538 /* Classic GCSE reaching definition support. */
2540 /* Allocate reaching def variables. */
2542 static void
2543 alloc_rd_mem (n_blocks, n_insns)
2544 int n_blocks, n_insns;
2546 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2547 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2549 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2550 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2552 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2553 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2555 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2556 sbitmap_vector_zero (rd_out, n_basic_blocks);
2559 /* Free reaching def variables. */
2561 static void
2562 free_rd_mem ()
2564 free (rd_kill);
2565 free (rd_gen);
2566 free (reaching_defs);
2567 free (rd_out);
2570 /* Add INSN to the kills of BB.
2571 REGNO, set in BB, is killed by INSN. */
2573 static void
2574 handle_rd_kill_set (insn, regno, bb)
2575 rtx insn;
2576 int regno, bb;
2578 struct reg_set *this_reg = reg_set_table[regno];
2580 while (this_reg)
2582 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2583 SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn));
2584 this_reg = this_reg->next;
2588 /* Compute the set of kill's for reaching definitions. */
2590 static void
2591 compute_kill_rd ()
2593 int bb,cuid;
2595 /* For each block
2596 For each set bit in `gen' of the block (i.e each insn which
2597 generates a definition in the block)
2598 Call the reg set by the insn corresponding to that bit regx
2599 Look at the linked list starting at reg_set_table[regx]
2600 For each setting of regx in the linked list, which is not in
2601 this block
2602 Set the bit in `kill' corresponding to that insn
2605 for (bb = 0; bb < n_basic_blocks; bb++)
2607 for (cuid = 0; cuid < max_cuid; cuid++)
2609 if (TEST_BIT (rd_gen[bb], cuid))
2611 rtx insn = CUID_INSN (cuid);
2612 rtx pat = PATTERN (insn);
2614 if (GET_CODE (insn) == CALL_INSN)
2616 int regno;
2618 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2620 if ((call_used_regs[regno]
2621 && regno != STACK_POINTER_REGNUM
2622 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2623 && regno != HARD_FRAME_POINTER_REGNUM
2624 #endif
2625 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2626 && ! (regno == ARG_POINTER_REGNUM
2627 && fixed_regs[regno])
2628 #endif
2629 #if defined (PIC_OFFSET_TABLE_REGNUM) && !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2630 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2631 #endif
2632 && regno != FRAME_POINTER_REGNUM)
2633 || global_regs[regno])
2634 handle_rd_kill_set (insn, regno, bb);
2638 if (GET_CODE (pat) == PARALLEL)
2640 int i;
2642 /* We work backwards because ... */
2643 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2645 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2646 if ((code == SET || code == CLOBBER)
2647 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2648 handle_rd_kill_set (insn,
2649 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2650 bb);
2653 else if (GET_CODE (pat) == SET)
2655 if (GET_CODE (SET_DEST (pat)) == REG)
2657 /* Each setting of this register outside of this block
2658 must be marked in the set of kills in this block. */
2659 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2662 /* FIXME: CLOBBER? */
2668 /* Compute the reaching definitions as in
2669 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2670 Chapter 10. It is the same algorithm as used for computing available
2671 expressions but applied to the gens and kills of reaching definitions. */
2673 static void
2674 compute_rd ()
2676 int bb, changed, passes;
2678 for (bb = 0; bb < n_basic_blocks; bb++)
2679 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2681 passes = 0;
2682 changed = 1;
2683 while (changed)
2685 changed = 0;
2686 for (bb = 0; bb < n_basic_blocks; bb++)
2688 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
2689 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2690 reaching_defs[bb], rd_kill[bb]);
2692 passes++;
2695 if (gcse_file)
2696 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2699 /* Classic GCSE available expression support. */
2701 /* Allocate memory for available expression computation. */
2703 static void
2704 alloc_avail_expr_mem (n_blocks, n_exprs)
2705 int n_blocks, n_exprs;
2707 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2708 sbitmap_vector_zero (ae_kill, n_basic_blocks);
2710 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2711 sbitmap_vector_zero (ae_gen, n_basic_blocks);
2713 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2714 sbitmap_vector_zero (ae_in, n_basic_blocks);
2716 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2717 sbitmap_vector_zero (ae_out, n_basic_blocks);
2719 u_bitmap = (sbitmap) sbitmap_alloc (n_exprs);
2720 sbitmap_ones (u_bitmap);
2723 static void
2724 free_avail_expr_mem ()
2726 free (ae_kill);
2727 free (ae_gen);
2728 free (ae_in);
2729 free (ae_out);
2730 free (u_bitmap);
2733 /* Compute the set of available expressions generated in each basic block. */
2735 static void
2736 compute_ae_gen ()
2738 int i;
2740 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2741 This is all we have to do because an expression is not recorded if it
2742 is not available, and the only expressions we want to work with are the
2743 ones that are recorded. */
2745 for (i = 0; i < expr_hash_table_size; i++)
2747 struct expr *expr = expr_hash_table[i];
2748 while (expr != NULL)
2750 struct occr *occr = expr->avail_occr;
2751 while (occr != NULL)
2753 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
2754 occr = occr->next;
2756 expr = expr->next_same_hash;
2761 /* Return non-zero if expression X is killed in BB. */
2763 static int
2764 expr_killed_p (x, bb)
2765 rtx x;
2766 int bb;
2768 int i;
2769 enum rtx_code code;
2770 const char *fmt;
2772 /* repeat is used to turn tail-recursion into iteration. */
2773 repeat:
2775 if (x == 0)
2776 return 1;
2778 code = GET_CODE (x);
2779 switch (code)
2781 case REG:
2782 return TEST_BIT (reg_set_in_block[bb], REGNO (x));
2784 case MEM:
2785 if (mem_set_in_block[bb])
2786 return 1;
2787 x = XEXP (x, 0);
2788 goto repeat;
2790 case PC:
2791 case CC0: /*FIXME*/
2792 case CONST:
2793 case CONST_INT:
2794 case CONST_DOUBLE:
2795 case SYMBOL_REF:
2796 case LABEL_REF:
2797 case ADDR_VEC:
2798 case ADDR_DIFF_VEC:
2799 return 0;
2801 default:
2802 break;
2805 i = GET_RTX_LENGTH (code) - 1;
2806 fmt = GET_RTX_FORMAT (code);
2807 for (; i >= 0; i--)
2809 if (fmt[i] == 'e')
2811 rtx tem = XEXP (x, i);
2813 /* If we are about to do the last recursive call
2814 needed at this level, change it into iteration.
2815 This function is called enough to be worth it. */
2816 if (i == 0)
2818 x = tem;
2819 goto repeat;
2821 if (expr_killed_p (tem, bb))
2822 return 1;
2824 else if (fmt[i] == 'E')
2826 int j;
2827 for (j = 0; j < XVECLEN (x, i); j++)
2829 if (expr_killed_p (XVECEXP (x, i, j), bb))
2830 return 1;
2835 return 0;
2838 /* Compute the set of available expressions killed in each basic block. */
2840 static void
2841 compute_ae_kill (ae_gen, ae_kill)
2842 sbitmap *ae_gen, *ae_kill;
2844 int bb,i;
2846 for (bb = 0; bb < n_basic_blocks; bb++)
2848 for (i = 0; i < expr_hash_table_size; i++)
2850 struct expr *expr = expr_hash_table[i];
2852 for ( ; expr != NULL; expr = expr->next_same_hash)
2854 /* Skip EXPR if generated in this block. */
2855 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
2856 continue;
2858 if (expr_killed_p (expr->expr, bb))
2859 SET_BIT (ae_kill[bb], expr->bitmap_index);
2865 /* Actually perform the Classic GCSE optimizations. */
2867 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2869 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2870 as a positive reach. We want to do this when there are two computations
2871 of the expression in the block.
2873 VISITED is a pointer to a working buffer for tracking which BB's have
2874 been visited. It is NULL for the top-level call.
2876 We treat reaching expressions that go through blocks containing the same
2877 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2878 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2879 2 as not reaching. The intent is to improve the probability of finding
2880 only one reaching expression and to reduce register lifetimes by picking
2881 the closest such expression. */
2883 static int
2884 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
2885 struct occr *occr;
2886 struct expr *expr;
2887 int bb;
2888 int check_self_loop;
2889 char *visited;
2891 edge pred;
2893 for (pred = BASIC_BLOCK(bb)->pred; pred != NULL; pred = pred->pred_next)
2895 int pred_bb = pred->src->index;
2897 if (visited[pred_bb])
2899 /* This predecessor has already been visited.
2900 Nothing to do. */
2903 else if (pred_bb == bb)
2905 /* BB loops on itself. */
2906 if (check_self_loop
2907 && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)
2908 && BLOCK_NUM (occr->insn) == pred_bb)
2909 return 1;
2910 visited[pred_bb] = 1;
2912 /* Ignore this predecessor if it kills the expression. */
2913 else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index))
2914 visited[pred_bb] = 1;
2915 /* Does this predecessor generate this expression? */
2916 else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index))
2918 /* Is this the occurrence we're looking for?
2919 Note that there's only one generating occurrence per block
2920 so we just need to check the block number. */
2921 if (BLOCK_NUM (occr->insn) == pred_bb)
2922 return 1;
2923 visited[pred_bb] = 1;
2925 /* Neither gen nor kill. */
2926 else
2928 visited[pred_bb] = 1;
2929 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
2930 visited))
2931 return 1;
2935 /* All paths have been checked. */
2936 return 0;
2939 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
2940 memory allocated for that function is returned. */
2942 static int
2943 expr_reaches_here_p (occr, expr, bb, check_self_loop)
2944 struct occr *occr;
2945 struct expr *expr;
2946 int bb;
2947 int check_self_loop;
2949 int rval;
2950 char * visited = (char *) xcalloc (n_basic_blocks, 1);
2952 rval = expr_reaches_here_p_work(occr, expr, bb, check_self_loop, visited);
2954 free (visited);
2956 return (rval);
2959 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2960 If there is more than one such instruction, return NULL.
2962 Called only by handle_avail_expr. */
2964 static rtx
2965 computing_insn (expr, insn)
2966 struct expr *expr;
2967 rtx insn;
2969 int bb = BLOCK_NUM (insn);
2971 if (expr->avail_occr->next == NULL)
2973 if (BLOCK_NUM (expr->avail_occr->insn) == bb)
2975 /* The available expression is actually itself
2976 (i.e. a loop in the flow graph) so do nothing. */
2977 return NULL;
2979 /* (FIXME) Case that we found a pattern that was created by
2980 a substitution that took place. */
2981 return expr->avail_occr->insn;
2983 else
2985 /* Pattern is computed more than once.
2986 Search backwards from this insn to see how many of these
2987 computations actually reach this insn. */
2988 struct occr *occr;
2989 rtx insn_computes_expr = NULL;
2990 int can_reach = 0;
2992 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
2994 if (BLOCK_NUM (occr->insn) == bb)
2996 /* The expression is generated in this block.
2997 The only time we care about this is when the expression
2998 is generated later in the block [and thus there's a loop].
2999 We let the normal cse pass handle the other cases. */
3000 if (INSN_CUID (insn) < INSN_CUID (occr->insn))
3002 if (expr_reaches_here_p (occr, expr, bb, 1))
3004 can_reach++;
3005 if (can_reach > 1)
3006 return NULL;
3007 insn_computes_expr = occr->insn;
3011 else /* Computation of the pattern outside this block. */
3013 if (expr_reaches_here_p (occr, expr, bb, 0))
3015 can_reach++;
3016 if (can_reach > 1)
3017 return NULL;
3018 insn_computes_expr = occr->insn;
3023 if (insn_computes_expr == NULL)
3024 abort ();
3025 return insn_computes_expr;
3029 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3030 Only called by can_disregard_other_sets. */
3032 static int
3033 def_reaches_here_p (insn, def_insn)
3034 rtx insn, def_insn;
3036 rtx reg;
3038 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3039 return 1;
3041 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3043 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3045 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3046 return 1;
3047 if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3048 reg = XEXP (PATTERN (def_insn), 0);
3049 else if (GET_CODE (PATTERN (def_insn)) == SET)
3050 reg = SET_DEST (PATTERN (def_insn));
3051 else
3052 abort ();
3053 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3055 else
3056 return 0;
3059 return 0;
3062 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN.
3063 The value returned is the number of definitions that reach INSN.
3064 Returning a value of zero means that [maybe] more than one definition
3065 reaches INSN and the caller can't perform whatever optimization it is
3066 trying. i.e. it is always safe to return zero. */
3068 static int
3069 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3070 struct reg_set **addr_this_reg;
3071 rtx insn;
3072 int for_combine;
3074 int number_of_reaching_defs = 0;
3075 struct reg_set *this_reg = *addr_this_reg;
3077 while (this_reg)
3079 if (def_reaches_here_p (insn, this_reg->insn))
3081 number_of_reaching_defs++;
3082 /* Ignore parallels for now. */
3083 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3084 return 0;
3085 if (!for_combine
3086 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3087 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3088 SET_SRC (PATTERN (insn)))))
3090 /* A setting of the reg to a different value reaches INSN. */
3091 return 0;
3093 if (number_of_reaching_defs > 1)
3095 /* If in this setting the value the register is being
3096 set to is equal to the previous value the register
3097 was set to and this setting reaches the insn we are
3098 trying to do the substitution on then we are ok. */
3100 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3101 return 0;
3102 if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3103 SET_SRC (PATTERN (insn))))
3104 return 0;
3106 *addr_this_reg = this_reg;
3109 /* prev_this_reg = this_reg; */
3110 this_reg = this_reg->next;
3113 return number_of_reaching_defs;
3116 /* Expression computed by insn is available and the substitution is legal,
3117 so try to perform the substitution.
3119 The result is non-zero if any changes were made. */
3121 static int
3122 handle_avail_expr (insn, expr)
3123 rtx insn;
3124 struct expr *expr;
3126 rtx pat, insn_computes_expr;
3127 rtx to;
3128 struct reg_set *this_reg;
3129 int found_setting, use_src;
3130 int changed = 0;
3132 /* We only handle the case where one computation of the expression
3133 reaches this instruction. */
3134 insn_computes_expr = computing_insn (expr, insn);
3135 if (insn_computes_expr == NULL)
3136 return 0;
3138 found_setting = 0;
3139 use_src = 0;
3141 /* At this point we know only one computation of EXPR outside of this
3142 block reaches this insn. Now try to find a register that the
3143 expression is computed into. */
3145 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG)
3147 /* This is the case when the available expression that reaches
3148 here has already been handled as an available expression. */
3149 int regnum_for_replacing = REGNO (SET_SRC (PATTERN (insn_computes_expr)));
3150 /* If the register was created by GCSE we can't use `reg_set_table',
3151 however we know it's set only once. */
3152 if (regnum_for_replacing >= max_gcse_regno
3153 /* If the register the expression is computed into is set only once,
3154 or only one set reaches this insn, we can use it. */
3155 || (((this_reg = reg_set_table[regnum_for_replacing]),
3156 this_reg->next == NULL)
3157 || can_disregard_other_sets (&this_reg, insn, 0)))
3159 use_src = 1;
3160 found_setting = 1;
3164 if (!found_setting)
3166 int regnum_for_replacing = REGNO (SET_DEST (PATTERN (insn_computes_expr)));
3167 /* This shouldn't happen. */
3168 if (regnum_for_replacing >= max_gcse_regno)
3169 abort ();
3170 this_reg = reg_set_table[regnum_for_replacing];
3171 /* If the register the expression is computed into is set only once,
3172 or only one set reaches this insn, use it. */
3173 if (this_reg->next == NULL
3174 || can_disregard_other_sets (&this_reg, insn, 0))
3175 found_setting = 1;
3178 if (found_setting)
3180 pat = PATTERN (insn);
3181 if (use_src)
3182 to = SET_SRC (PATTERN (insn_computes_expr));
3183 else
3184 to = SET_DEST (PATTERN (insn_computes_expr));
3185 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3187 /* We should be able to ignore the return code from validate_change but
3188 to play it safe we check. */
3189 if (changed)
3191 gcse_subst_count++;
3192 if (gcse_file != NULL)
3194 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d %s insn %d\n",
3195 INSN_UID (insn), REGNO (to),
3196 use_src ? "from" : "set in",
3197 INSN_UID (insn_computes_expr));
3202 /* The register that the expr is computed into is set more than once. */
3203 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3205 /* Insert an insn after insnx that copies the reg set in insnx
3206 into a new pseudo register call this new register REGN.
3207 From insnb until end of basic block or until REGB is set
3208 replace all uses of REGB with REGN. */
3209 rtx new_insn;
3211 to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr))));
3213 /* Generate the new insn. */
3214 /* ??? If the change fails, we return 0, even though we created
3215 an insn. I think this is ok. */
3216 new_insn
3217 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3218 SET_DEST (PATTERN (insn_computes_expr))),
3219 insn_computes_expr);
3220 /* Keep block number table up to date. */
3221 set_block_num (new_insn, BLOCK_NUM (insn_computes_expr));
3222 /* Keep register set table up to date. */
3223 record_one_set (REGNO (to), new_insn);
3225 gcse_create_count++;
3226 if (gcse_file != NULL)
3228 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d, computed in insn %d,\n",
3229 INSN_UID (NEXT_INSN (insn_computes_expr)),
3230 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))),
3231 INSN_UID (insn_computes_expr));
3232 fprintf (gcse_file, " into newly allocated reg %d\n", REGNO (to));
3235 pat = PATTERN (insn);
3237 /* Do register replacement for INSN. */
3238 changed = validate_change (insn, &SET_SRC (pat),
3239 SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr))),
3242 /* We should be able to ignore the return code from validate_change but
3243 to play it safe we check. */
3244 if (changed)
3246 gcse_subst_count++;
3247 if (gcse_file != NULL)
3249 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with reg %d set in insn %d\n",
3250 INSN_UID (insn),
3251 REGNO (SET_DEST (PATTERN (NEXT_INSN (insn_computes_expr)))),
3252 INSN_UID (insn_computes_expr));
3258 return changed;
3261 /* Perform classic GCSE.
3262 This is called by one_classic_gcse_pass after all the dataflow analysis
3263 has been done.
3265 The result is non-zero if a change was made. */
3267 static int
3268 classic_gcse ()
3270 int bb, changed;
3271 rtx insn;
3273 /* Note we start at block 1. */
3275 changed = 0;
3276 for (bb = 1; bb < n_basic_blocks; bb++)
3278 /* Reset tables used to keep track of what's still valid [since the
3279 start of the block]. */
3280 reset_opr_set_tables ();
3282 for (insn = BLOCK_HEAD (bb);
3283 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3284 insn = NEXT_INSN (insn))
3286 /* Is insn of form (set (pseudo-reg) ...)? */
3288 if (GET_CODE (insn) == INSN
3289 && GET_CODE (PATTERN (insn)) == SET
3290 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3291 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3293 rtx pat = PATTERN (insn);
3294 rtx src = SET_SRC (pat);
3295 struct expr *expr;
3297 if (want_to_gcse_p (src)
3298 /* Is the expression recorded? */
3299 && ((expr = lookup_expr (src)) != NULL)
3300 /* Is the expression available [at the start of the
3301 block]? */
3302 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3303 /* Are the operands unchanged since the start of the
3304 block? */
3305 && oprs_not_set_p (src, insn))
3306 changed |= handle_avail_expr (insn, expr);
3309 /* Keep track of everything modified by this insn. */
3310 /* ??? Need to be careful w.r.t. mods done to INSN. */
3311 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
3312 mark_oprs_set (insn);
3316 return changed;
3319 /* Top level routine to perform one classic GCSE pass.
3321 Return non-zero if a change was made. */
3323 static int
3324 one_classic_gcse_pass (pass)
3325 int pass;
3327 int changed = 0;
3329 gcse_subst_count = 0;
3330 gcse_create_count = 0;
3332 alloc_expr_hash_table (max_cuid);
3333 alloc_rd_mem (n_basic_blocks, max_cuid);
3334 compute_expr_hash_table ();
3335 if (gcse_file)
3336 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3337 expr_hash_table_size, n_exprs);
3338 if (n_exprs > 0)
3340 int passes;
3341 compute_kill_rd ();
3342 compute_rd ();
3343 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3344 compute_ae_gen ();
3345 compute_ae_kill (ae_gen, ae_kill);
3346 passes = compute_available (ae_gen, ae_kill, ae_out, ae_in);
3347 if (gcse_file)
3348 fprintf (gcse_file, "avail expr computation: %d passes\n", passes);
3349 changed = classic_gcse ();
3350 free_avail_expr_mem ();
3352 free_rd_mem ();
3353 free_expr_hash_table ();
3355 if (gcse_file)
3357 fprintf (gcse_file, "\n");
3358 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
3359 current_function_name, pass,
3360 bytes_used, gcse_subst_count, gcse_create_count);
3363 return changed;
3366 /* Compute copy/constant propagation working variables. */
3368 /* Local properties of assignments. */
3370 static sbitmap *cprop_pavloc;
3371 static sbitmap *cprop_absaltered;
3373 /* Global properties of assignments (computed from the local properties). */
3375 static sbitmap *cprop_avin;
3376 static sbitmap *cprop_avout;
3378 /* Allocate vars used for copy/const propagation.
3379 N_BLOCKS is the number of basic blocks.
3380 N_SETS is the number of sets. */
3382 static void
3383 alloc_cprop_mem (n_blocks, n_sets)
3384 int n_blocks, n_sets;
3386 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3387 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3389 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3390 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3393 /* Free vars used by copy/const propagation. */
3395 static void
3396 free_cprop_mem ()
3398 free (cprop_pavloc);
3399 free (cprop_absaltered);
3400 free (cprop_avin);
3401 free (cprop_avout);
3404 /* For each block, compute whether X is transparent.
3405 X is either an expression or an assignment [though we don't care which,
3406 for this context an assignment is treated as an expression].
3407 For each block where an element of X is modified, set (SET_P == 1) or reset
3408 (SET_P == 0) the INDX bit in BMAP. */
3410 static void
3411 compute_transp (x, indx, bmap, set_p)
3412 rtx x;
3413 int indx;
3414 sbitmap *bmap;
3415 int set_p;
3417 int bb,i;
3418 enum rtx_code code;
3419 const char *fmt;
3421 /* repeat is used to turn tail-recursion into iteration. */
3422 repeat:
3424 if (x == 0)
3425 return;
3427 code = GET_CODE (x);
3428 switch (code)
3430 case REG:
3432 reg_set *r;
3433 int regno = REGNO (x);
3435 if (set_p)
3437 if (regno < FIRST_PSEUDO_REGISTER)
3439 for (bb = 0; bb < n_basic_blocks; bb++)
3440 if (TEST_BIT (reg_set_in_block[bb], regno))
3441 SET_BIT (bmap[bb], indx);
3443 else
3445 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3447 bb = BLOCK_NUM (r->insn);
3448 SET_BIT (bmap[bb], indx);
3452 else
3454 if (regno < FIRST_PSEUDO_REGISTER)
3456 for (bb = 0; bb < n_basic_blocks; bb++)
3457 if (TEST_BIT (reg_set_in_block[bb], regno))
3458 RESET_BIT (bmap[bb], indx);
3460 else
3462 for (r = reg_set_table[regno]; r != NULL; r = r->next)
3464 bb = BLOCK_NUM (r->insn);
3465 RESET_BIT (bmap[bb], indx);
3469 return;
3472 case MEM:
3473 if (set_p)
3475 for (bb = 0; bb < n_basic_blocks; bb++)
3476 if (mem_set_in_block[bb])
3477 SET_BIT (bmap[bb], indx);
3479 else
3481 for (bb = 0; bb < n_basic_blocks; bb++)
3482 if (mem_set_in_block[bb])
3483 RESET_BIT (bmap[bb], indx);
3485 x = XEXP (x, 0);
3486 goto repeat;
3488 case PC:
3489 case CC0: /*FIXME*/
3490 case CONST:
3491 case CONST_INT:
3492 case CONST_DOUBLE:
3493 case SYMBOL_REF:
3494 case LABEL_REF:
3495 case ADDR_VEC:
3496 case ADDR_DIFF_VEC:
3497 return;
3499 default:
3500 break;
3503 i = GET_RTX_LENGTH (code) - 1;
3504 fmt = GET_RTX_FORMAT (code);
3505 for (; i >= 0; i--)
3507 if (fmt[i] == 'e')
3509 rtx tem = XEXP (x, i);
3511 /* If we are about to do the last recursive call
3512 needed at this level, change it into iteration.
3513 This function is called enough to be worth it. */
3514 if (i == 0)
3516 x = tem;
3517 goto repeat;
3519 compute_transp (tem, indx, bmap, set_p);
3521 else if (fmt[i] == 'E')
3523 int j;
3524 for (j = 0; j < XVECLEN (x, i); j++)
3525 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3530 /* Compute the available expressions at the start and end of each
3531 basic block for cprop. This particular dataflow equation is
3532 used often enough that we might want to generalize it and make
3533 as a subroutine for other global optimizations that need available
3534 in/out information. */
3535 static void
3536 compute_cprop_avinout ()
3538 int bb, changed, passes;
3540 sbitmap_zero (cprop_avin[0]);
3541 sbitmap_vector_ones (cprop_avout, n_basic_blocks);
3543 passes = 0;
3544 changed = 1;
3545 while (changed)
3547 changed = 0;
3548 for (bb = 0; bb < n_basic_blocks; bb++)
3550 if (bb != 0)
3551 sbitmap_intersection_of_preds (cprop_avin[bb], cprop_avout, bb);
3552 changed |= sbitmap_union_of_diff (cprop_avout[bb],
3553 cprop_pavloc[bb],
3554 cprop_avin[bb],
3555 cprop_absaltered[bb]);
3557 passes++;
3560 if (gcse_file)
3561 fprintf (gcse_file, "cprop avail expr computation: %d passes\n", passes);
3564 /* Top level routine to do the dataflow analysis needed by copy/const
3565 propagation. */
3567 static void
3568 compute_cprop_data ()
3570 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3571 compute_cprop_avinout ();
3574 /* Copy/constant propagation. */
3576 /* Maximum number of register uses in an insn that we handle. */
3577 #define MAX_USES 8
3579 /* Table of uses found in an insn.
3580 Allocated statically to avoid alloc/free complexity and overhead. */
3581 static struct reg_use reg_use_table[MAX_USES];
3583 /* Index into `reg_use_table' while building it. */
3584 static int reg_use_count;
3586 /* Set up a list of register numbers used in INSN.
3587 The found uses are stored in `reg_use_table'.
3588 `reg_use_count' is initialized to zero before entry, and
3589 contains the number of uses in the table upon exit.
3591 ??? If a register appears multiple times we will record it multiple
3592 times. This doesn't hurt anything but it will slow things down. */
3594 static void
3595 find_used_regs (x)
3596 rtx x;
3598 int i;
3599 enum rtx_code code;
3600 const char *fmt;
3602 /* repeat is used to turn tail-recursion into iteration. */
3603 repeat:
3605 if (x == 0)
3606 return;
3608 code = GET_CODE (x);
3609 switch (code)
3611 case REG:
3612 if (reg_use_count == MAX_USES)
3613 return;
3614 reg_use_table[reg_use_count].reg_rtx = x;
3615 reg_use_count++;
3616 return;
3618 case MEM:
3619 x = XEXP (x, 0);
3620 goto repeat;
3622 case PC:
3623 case CC0:
3624 case CONST:
3625 case CONST_INT:
3626 case CONST_DOUBLE:
3627 case SYMBOL_REF:
3628 case LABEL_REF:
3629 case CLOBBER:
3630 case ADDR_VEC:
3631 case ADDR_DIFF_VEC:
3632 case ASM_INPUT: /*FIXME*/
3633 return;
3635 case SET:
3636 if (GET_CODE (SET_DEST (x)) == MEM)
3637 find_used_regs (SET_DEST (x));
3638 x = SET_SRC (x);
3639 goto repeat;
3641 default:
3642 break;
3645 /* Recursively scan the operands of this expression. */
3647 fmt = GET_RTX_FORMAT (code);
3648 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3650 if (fmt[i] == 'e')
3652 /* If we are about to do the last recursive call
3653 needed at this level, change it into iteration.
3654 This function is called enough to be worth it. */
3655 if (i == 0)
3657 x = XEXP (x, 0);
3658 goto repeat;
3660 find_used_regs (XEXP (x, i));
3662 else if (fmt[i] == 'E')
3664 int j;
3665 for (j = 0; j < XVECLEN (x, i); j++)
3666 find_used_regs (XVECEXP (x, i, j));
3671 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3672 Returns non-zero is successful. */
3674 static int
3675 try_replace_reg (from, to, insn)
3676 rtx from, to, insn;
3678 /* If this fails we could try to simplify the result of the
3679 replacement and attempt to recognize the simplified insn.
3681 But we need a general simplify_rtx that doesn't have pass
3682 specific state variables. I'm not aware of one at the moment. */
3683 return validate_replace_src (from, to, insn);
3686 /* Find a set of REGNO that is available on entry to INSN's block.
3687 Returns NULL if not found. */
3689 static struct expr *
3690 find_avail_set (regno, insn)
3691 int regno;
3692 rtx insn;
3694 /* SET1 contains the last set found that can be returned to the caller for
3695 use in a substitution. */
3696 struct expr *set1 = 0;
3698 /* Loops are not possible here. To get a loop we would need two sets
3699 available at the start of the block containing INSN. ie we would
3700 need two sets like this available at the start of the block:
3702 (set (reg X) (reg Y))
3703 (set (reg Y) (reg X))
3705 This can not happen since the set of (reg Y) would have killed the
3706 set of (reg X) making it unavailable at the start of this block. */
3707 while (1)
3709 rtx src;
3710 struct expr *set = lookup_set (regno, NULL_RTX);
3712 /* Find a set that is available at the start of the block
3713 which contains INSN. */
3714 while (set)
3716 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3717 break;
3718 set = next_set (regno, set);
3721 /* If no available set was found we've reached the end of the
3722 (possibly empty) copy chain. */
3723 if (set == 0)
3724 break;
3726 if (GET_CODE (set->expr) != SET)
3727 abort ();
3729 src = SET_SRC (set->expr);
3731 /* We know the set is available.
3732 Now check that SRC is ANTLOC (i.e. none of the source operands
3733 have changed since the start of the block).
3735 If the source operand changed, we may still use it for the next
3736 iteration of this loop, but we may not use it for substitutions. */
3737 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
3738 set1 = set;
3740 /* If the source of the set is anything except a register, then
3741 we have reached the end of the copy chain. */
3742 if (GET_CODE (src) != REG)
3743 break;
3745 /* Follow the copy chain, ie start another iteration of the loop
3746 and see if we have an available copy into SRC. */
3747 regno = REGNO (src);
3750 /* SET1 holds the last set that was available and anticipatable at
3751 INSN. */
3752 return set1;
3755 /* Subroutine of cprop_insn that tries to propagate constants into
3756 JUMP_INSNS. INSN must be a conditional jump; COPY is a copy of it
3757 that we can use for substitutions.
3758 REG_USED is the use we will try to replace, SRC is the constant we
3759 will try to substitute for it.
3760 Returns nonzero if a change was made. */
3761 static int
3762 cprop_jump (insn, copy, reg_used, src)
3763 rtx insn, copy;
3764 struct reg_use *reg_used;
3765 rtx src;
3767 rtx set = PATTERN (copy);
3768 rtx temp;
3770 /* Replace the register with the appropriate constant. */
3771 replace_rtx (SET_SRC (set), reg_used->reg_rtx, src);
3773 temp = simplify_ternary_operation (GET_CODE (SET_SRC (set)),
3774 GET_MODE (SET_SRC (set)),
3775 GET_MODE (XEXP (SET_SRC (set), 0)),
3776 XEXP (SET_SRC (set), 0),
3777 XEXP (SET_SRC (set), 1),
3778 XEXP (SET_SRC (set), 2));
3780 /* If no simplification can be made, then try the next
3781 register. */
3782 if (temp == 0)
3783 return 0;
3785 SET_SRC (set) = temp;
3787 /* That may have changed the structure of TEMP, so
3788 force it to be rerecognized if it has not turned
3789 into a nop or unconditional jump. */
3791 INSN_CODE (copy) = -1;
3792 if ((SET_DEST (set) == pc_rtx
3793 && (SET_SRC (set) == pc_rtx
3794 || GET_CODE (SET_SRC (set)) == LABEL_REF))
3795 || recog (PATTERN (copy), copy, NULL) >= 0)
3797 /* This has either become an unconditional jump
3798 or a nop-jump. We'd like to delete nop jumps
3799 here, but doing so confuses gcse. So we just
3800 make the replacement and let later passes
3801 sort things out. */
3802 PATTERN (insn) = set;
3803 INSN_CODE (insn) = -1;
3805 /* One less use of the label this insn used to jump to
3806 if we turned this into a NOP jump. */
3807 if (SET_SRC (set) == pc_rtx && JUMP_LABEL (insn) != 0)
3808 --LABEL_NUSES (JUMP_LABEL (insn));
3810 /* If this has turned into an unconditional jump,
3811 then put a barrier after it so that the unreachable
3812 code will be deleted. */
3813 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3814 emit_barrier_after (insn);
3816 run_jump_opt_after_gcse = 1;
3818 const_prop_count++;
3819 if (gcse_file != NULL)
3821 int regno = REGNO (reg_used->reg_rtx);
3822 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3823 regno, INSN_UID (insn));
3824 print_rtl (gcse_file, src);
3825 fprintf (gcse_file, "\n");
3827 return 1;
3829 return 0;
3832 #ifdef HAVE_cc0
3833 /* Subroutine of cprop_insn that tries to propagate constants into
3834 JUMP_INSNS for machines that have CC0. INSN is a single set that
3835 stores into CC0; the insn following it is a conditional jump.
3836 REG_USED is the use we will try to replace, SRC is the constant we
3837 will try to substitute for it.
3838 Returns nonzero if a change was made. */
3839 static int
3840 cprop_cc0_jump (insn, reg_used, src)
3841 rtx insn;
3842 struct reg_use *reg_used;
3843 rtx src;
3845 rtx jump = NEXT_INSN (insn);
3846 rtx copy = copy_rtx (jump);
3847 rtx set = PATTERN (copy);
3849 /* We need to copy the source of the cc0 setter, as cprop_jump is going to
3850 substitute into it. */
3851 replace_rtx (SET_SRC (set), cc0_rtx, copy_rtx (SET_SRC (PATTERN (insn))));
3852 if (! cprop_jump (jump, copy, reg_used, src))
3853 return 0;
3855 /* If we succeeded, delete the cc0 setter. */
3856 PUT_CODE (insn, NOTE);
3857 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3858 NOTE_SOURCE_FILE (insn) = 0;
3859 return 1;
3861 #endif
3863 /* Perform constant and copy propagation on INSN.
3864 The result is non-zero if a change was made. */
3866 static int
3867 cprop_insn (insn, alter_jumps)
3868 rtx insn;
3869 int alter_jumps;
3871 struct reg_use *reg_used;
3872 int changed = 0;
3874 /* Only propagate into SETs. Note that a conditional jump is a
3875 SET with pc_rtx as the destination. */
3876 if ((GET_CODE (insn) != INSN
3877 && GET_CODE (insn) != JUMP_INSN)
3878 || GET_CODE (PATTERN (insn)) != SET)
3879 return 0;
3881 reg_use_count = 0;
3882 find_used_regs (PATTERN (insn));
3884 reg_used = &reg_use_table[0];
3885 for ( ; reg_use_count > 0; reg_used++, reg_use_count--)
3887 rtx pat, src;
3888 struct expr *set;
3889 int regno = REGNO (reg_used->reg_rtx);
3891 /* Ignore registers created by GCSE.
3892 We do this because ... */
3893 if (regno >= max_gcse_regno)
3894 continue;
3896 /* If the register has already been set in this block, there's
3897 nothing we can do. */
3898 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3899 continue;
3901 /* Find an assignment that sets reg_used and is available
3902 at the start of the block. */
3903 set = find_avail_set (regno, insn);
3904 if (! set)
3905 continue;
3907 pat = set->expr;
3908 /* ??? We might be able to handle PARALLELs. Later. */
3909 if (GET_CODE (pat) != SET)
3910 abort ();
3911 src = SET_SRC (pat);
3913 /* Constant propagation. */
3914 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
3915 || GET_CODE (src) == SYMBOL_REF)
3917 /* Handle normal insns first. */
3918 if (GET_CODE (insn) == INSN
3919 && try_replace_reg (reg_used->reg_rtx, src, insn))
3921 changed = 1;
3922 const_prop_count++;
3923 if (gcse_file != NULL)
3925 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in insn %d with constant ",
3926 regno, INSN_UID (insn));
3927 print_rtl (gcse_file, src);
3928 fprintf (gcse_file, "\n");
3931 /* The original insn setting reg_used may or may not now be
3932 deletable. We leave the deletion to flow. */
3935 /* Try to propagate a CONST_INT into a conditional jump.
3936 We're pretty specific about what we will handle in this
3937 code, we can extend this as necessary over time.
3939 Right now the insn in question must look like
3940 (set (pc) (if_then_else ...)) */
3941 else if (alter_jumps
3942 && GET_CODE (insn) == JUMP_INSN
3943 && condjump_p (insn)
3944 && ! simplejump_p (insn))
3945 changed |= cprop_jump (insn, copy_rtx (insn), reg_used, src);
3946 #ifdef HAVE_cc0
3947 /* Similar code for machines that use a pair of CC0 setter and
3948 conditional jump insn. */
3949 else if (alter_jumps
3950 && GET_CODE (PATTERN (insn)) == SET
3951 && SET_DEST (PATTERN (insn)) == cc0_rtx
3952 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3953 && condjump_p (NEXT_INSN (insn))
3954 && ! simplejump_p (NEXT_INSN (insn)))
3955 changed |= cprop_cc0_jump (insn, reg_used, src);
3956 #endif
3958 else if (GET_CODE (src) == REG
3959 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3960 && REGNO (src) != regno)
3962 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3964 changed = 1;
3965 copy_prop_count++;
3966 if (gcse_file != NULL)
3968 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d with reg %d\n",
3969 regno, INSN_UID (insn), REGNO (src));
3972 /* The original insn setting reg_used may or may not now be
3973 deletable. We leave the deletion to flow. */
3974 /* FIXME: If it turns out that the insn isn't deletable,
3975 then we may have unnecessarily extended register lifetimes
3976 and made things worse. */
3981 return changed;
3984 /* Forward propagate copies.
3985 This includes copies and constants.
3986 Return non-zero if a change was made. */
3988 static int
3989 cprop (alter_jumps)
3990 int alter_jumps;
3992 int bb, changed;
3993 rtx insn;
3995 /* Note we start at block 1. */
3997 changed = 0;
3998 for (bb = 1; bb < n_basic_blocks; bb++)
4000 /* Reset tables used to keep track of what's still valid [since the
4001 start of the block]. */
4002 reset_opr_set_tables ();
4004 for (insn = BLOCK_HEAD (bb);
4005 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
4006 insn = NEXT_INSN (insn))
4008 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4010 changed |= cprop_insn (insn, alter_jumps);
4012 /* Keep track of everything modified by this insn. */
4013 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4014 call mark_oprs_set if we turned the insn into a NOTE. */
4015 if (GET_CODE (insn) != NOTE)
4016 mark_oprs_set (insn);
4021 if (gcse_file != NULL)
4022 fprintf (gcse_file, "\n");
4024 return changed;
4027 /* Perform one copy/constant propagation pass.
4028 F is the first insn in the function.
4029 PASS is the pass count. */
4031 static int
4032 one_cprop_pass (pass, alter_jumps)
4033 int pass;
4034 int alter_jumps;
4036 int changed = 0;
4038 const_prop_count = 0;
4039 copy_prop_count = 0;
4041 alloc_set_hash_table (max_cuid);
4042 compute_set_hash_table ();
4043 if (gcse_file)
4044 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4045 n_sets);
4046 if (n_sets > 0)
4048 alloc_cprop_mem (n_basic_blocks, n_sets);
4049 compute_cprop_data ();
4050 changed = cprop (alter_jumps);
4051 free_cprop_mem ();
4053 free_set_hash_table ();
4055 if (gcse_file)
4057 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, %d const props, %d copy props\n",
4058 current_function_name, pass,
4059 bytes_used, const_prop_count, copy_prop_count);
4060 fprintf (gcse_file, "\n");
4063 return changed;
4066 /* Compute PRE+LCM working variables. */
4068 /* Local properties of expressions. */
4069 /* Nonzero for expressions that are transparent in the block. */
4070 static sbitmap *transp;
4072 /* Nonzero for expressions that are transparent at the end of the block.
4073 This is only zero for expressions killed by abnormal critical edge
4074 created by a calls. */
4075 static sbitmap *transpout;
4077 /* Nonzero for expressions that are computed (available) in the block. */
4078 static sbitmap *comp;
4080 /* Nonzero for expressions that are locally anticipatable in the block. */
4081 static sbitmap *antloc;
4083 /* Nonzero for expressions where this block is an optimal computation
4084 point. */
4085 static sbitmap *pre_optimal;
4087 /* Nonzero for expressions which are redundant in a particular block. */
4088 static sbitmap *pre_redundant;
4090 /* Nonzero for expressions which should be inserted on a specific edge. */
4091 static sbitmap *pre_insert_map;
4093 /* Nonzero for expressions which should be deleted in a specific block. */
4094 static sbitmap *pre_delete_map;
4096 /* Contains the edge_list returned by pre_edge_lcm. */
4097 static struct edge_list *edge_list;
4099 static sbitmap *temp_bitmap;
4101 /* Redundant insns. */
4102 static sbitmap pre_redundant_insns;
4104 /* Allocate vars used for PRE analysis. */
4106 static void
4107 alloc_pre_mem (n_blocks, n_exprs)
4108 int n_blocks, n_exprs;
4110 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4111 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4112 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4113 temp_bitmap = sbitmap_vector_alloc (n_blocks, n_exprs);
4115 pre_optimal = NULL;
4116 pre_redundant = NULL;
4117 pre_insert_map = NULL;
4118 pre_delete_map = NULL;
4119 ae_in = NULL;
4120 ae_out = NULL;
4121 u_bitmap = NULL;
4122 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
4123 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4124 /* pre_insert and pre_delete are allocated later. */
4127 /* Free vars used for PRE analysis. */
4129 static void
4130 free_pre_mem ()
4132 free (transp);
4133 free (comp);
4134 free (antloc);
4135 free (temp_bitmap);
4137 if (pre_optimal)
4138 free (pre_optimal);
4139 if (pre_redundant)
4140 free (pre_redundant);
4141 if (pre_insert_map)
4142 free (pre_insert_map);
4143 if (pre_delete_map)
4144 free (pre_delete_map);
4145 if (transpout)
4146 free (transpout);
4148 if (ae_in)
4149 free (ae_in);
4150 if (ae_out)
4151 free (ae_out);
4152 if (ae_kill)
4153 free (ae_kill);
4154 if (u_bitmap)
4155 free (u_bitmap);
4157 transp = comp = antloc = NULL;
4158 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4159 transpout = ae_in = ae_out = ae_kill = NULL;
4160 u_bitmap = NULL;
4164 /* Top level routine to do the dataflow analysis needed by PRE. */
4166 static void
4167 compute_pre_data ()
4169 compute_local_properties (transp, comp, antloc, 0);
4170 compute_transpout ();
4171 sbitmap_vector_zero (ae_kill, n_basic_blocks);
4172 compute_ae_kill (comp, ae_kill);
4173 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4174 ae_kill, &pre_insert_map, &pre_delete_map);
4178 /* PRE utilities */
4180 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4181 block BB.
4183 VISITED is a pointer to a working buffer for tracking which BB's have
4184 been visited. It is NULL for the top-level call.
4186 CHECK_PRE_COMP controls whether or not we check for a computation of
4187 EXPR in OCCR_BB.
4189 We treat reaching expressions that go through blocks containing the same
4190 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4191 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4192 2 as not reaching. The intent is to improve the probability of finding
4193 only one reaching expression and to reduce register lifetimes by picking
4194 the closest such expression. */
4196 static int
4197 pre_expr_reaches_here_p_work (occr_bb, expr, bb, check_pre_comp, visited)
4198 int occr_bb;
4199 struct expr *expr;
4200 int bb;
4201 int check_pre_comp;
4202 char *visited;
4204 edge pred;
4206 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
4208 int pred_bb = pred->src->index;
4210 if (pred->src == ENTRY_BLOCK_PTR
4211 /* Has predecessor has already been visited? */
4212 || visited[pred_bb])
4214 /* Nothing to do. */
4216 /* Does this predecessor generate this expression? */
4217 else if ((!check_pre_comp && occr_bb == pred_bb)
4218 || TEST_BIT (comp[pred_bb], expr->bitmap_index))
4220 /* Is this the occurrence we're looking for?
4221 Note that there's only one generating occurrence per block
4222 so we just need to check the block number. */
4223 if (occr_bb == pred_bb)
4224 return 1;
4225 visited[pred_bb] = 1;
4227 /* Ignore this predecessor if it kills the expression. */
4228 else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index))
4229 visited[pred_bb] = 1;
4230 /* Neither gen nor kill. */
4231 else
4233 visited[pred_bb] = 1;
4234 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb,
4235 check_pre_comp, visited))
4236 return 1;
4240 /* All paths have been checked. */
4241 return 0;
4244 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4245 memory allocated for that function is returned. */
4247 static int
4248 pre_expr_reaches_here_p (occr_bb, expr, bb, check_pre_comp)
4249 int occr_bb;
4250 struct expr *expr;
4251 int bb;
4252 int check_pre_comp;
4254 int rval;
4255 char * visited = (char *) xcalloc (n_basic_blocks, 1);
4257 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, check_pre_comp,
4258 visited);
4260 free (visited);
4262 return (rval);
4266 /* Given an expr, generate RTL which we can insert at the end of a BB,
4267 or on an edge. Set the block number of any insns generated to
4268 the value of BB. */
4270 static rtx
4271 process_insert_insn (expr)
4272 struct expr *expr;
4274 rtx reg = expr->reaching_reg;
4275 rtx pat, copied_expr;
4276 rtx first_new_insn;
4278 start_sequence ();
4279 copied_expr = copy_rtx (expr->expr);
4280 emit_move_insn (reg, copied_expr);
4281 first_new_insn = get_insns ();
4282 pat = gen_sequence ();
4283 end_sequence ();
4285 return pat;
4288 /* Add EXPR to the end of basic block BB.
4290 This is used by both the PRE and code hoisting.
4292 For PRE, we want to verify that the expr is either transparent
4293 or locally anticipatable in the target block. This check makes
4294 no sense for code hoisting. */
4296 static void
4297 insert_insn_end_bb (expr, bb, pre)
4298 struct expr *expr;
4299 int bb;
4300 int pre;
4302 rtx insn = BLOCK_END (bb);
4303 rtx new_insn;
4304 rtx reg = expr->reaching_reg;
4305 int regno = REGNO (reg);
4306 rtx pat;
4308 pat = process_insert_insn (expr);
4310 /* If the last insn is a jump, insert EXPR in front [taking care to
4311 handle cc0, etc. properly]. */
4313 if (GET_CODE (insn) == JUMP_INSN)
4315 #ifdef HAVE_cc0
4316 rtx note;
4317 #endif
4319 /* If this is a jump table, then we can't insert stuff here. Since
4320 we know the previous real insn must be the tablejump, we insert
4321 the new instruction just before the tablejump. */
4322 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4323 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4324 insn = prev_real_insn (insn);
4326 #ifdef HAVE_cc0
4327 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4328 if cc0 isn't set. */
4329 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4330 if (note)
4331 insn = XEXP (note, 0);
4332 else
4334 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4335 if (maybe_cc0_setter
4336 && GET_RTX_CLASS (GET_CODE (maybe_cc0_setter)) == 'i'
4337 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4338 insn = maybe_cc0_setter;
4340 #endif
4341 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4342 new_insn = emit_insn_before (pat, insn);
4343 if (BLOCK_HEAD (bb) == insn)
4344 BLOCK_HEAD (bb) = new_insn;
4346 /* Likewise if the last insn is a call, as will happen in the presence
4347 of exception handling. */
4348 else if (GET_CODE (insn) == CALL_INSN)
4350 HARD_REG_SET parm_regs;
4351 int nparm_regs;
4352 rtx p;
4354 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4355 we search backward and place the instructions before the first
4356 parameter is loaded. Do this for everyone for consistency and a
4357 presumtion that we'll get better code elsewhere as well. */
4359 /* It should always be the case that we can put these instructions
4360 anywhere in the basic block with performing PRE optimizations.
4361 Check this. */
4362 if (pre
4363 && !TEST_BIT (antloc[bb], expr->bitmap_index)
4364 && !TEST_BIT (transp[bb], expr->bitmap_index))
4365 abort ();
4367 /* Since different machines initialize their parameter registers
4368 in different orders, assume nothing. Collect the set of all
4369 parameter registers. */
4370 CLEAR_HARD_REG_SET (parm_regs);
4371 nparm_regs = 0;
4372 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4373 if (GET_CODE (XEXP (p, 0)) == USE
4374 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4376 int regno = REGNO (XEXP (XEXP (p, 0), 0));
4377 if (regno >= FIRST_PSEUDO_REGISTER)
4378 abort ();
4379 SET_HARD_REG_BIT (parm_regs, regno);
4380 nparm_regs++;
4383 /* Search backward for the first set of a register in this set. */
4384 while (nparm_regs && BLOCK_HEAD (bb) != insn)
4386 insn = PREV_INSN (insn);
4387 p = single_set (insn);
4388 if (p && GET_CODE (SET_DEST (p)) == REG
4389 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4390 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4392 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4393 nparm_regs--;
4397 /* If we found all the parameter loads, then we want to insert
4398 before the first parameter load.
4400 If we did not find all the parameter loads, then we might have
4401 stopped on the head of the block, which could be a CODE_LABEL.
4402 If we inserted before the CODE_LABEL, then we would be putting
4403 the insn in the wrong basic block. In that case, put the insn
4404 after the CODE_LABEL.
4406 ?!? Do we need to account for NOTE_INSN_BASIC_BLOCK here? */
4407 if (GET_CODE (insn) != CODE_LABEL)
4409 new_insn = emit_insn_before (pat, insn);
4410 if (BLOCK_HEAD (bb) == insn)
4411 BLOCK_HEAD (bb) = new_insn;
4413 else
4415 new_insn = emit_insn_after (pat, insn);
4418 else
4420 new_insn = emit_insn_after (pat, insn);
4421 BLOCK_END (bb) = new_insn;
4424 /* Keep block number table up to date.
4425 Note, PAT could be a multiple insn sequence, we have to make
4426 sure that each insn in the sequence is handled. */
4427 if (GET_CODE (pat) == SEQUENCE)
4429 int i;
4431 for (i = 0; i < XVECLEN (pat, 0); i++)
4433 rtx insn = XVECEXP (pat, 0, i);
4434 set_block_num (insn, bb);
4435 if (GET_RTX_CLASS (GET_CODE (insn)) == 'i')
4436 add_label_notes (PATTERN (insn), new_insn);
4437 note_stores (PATTERN (insn), record_set_info, insn);
4440 else
4442 add_label_notes (SET_SRC (pat), new_insn);
4443 set_block_num (new_insn, bb);
4444 /* Keep register set table up to date. */
4445 record_one_set (regno, new_insn);
4448 gcse_create_count++;
4450 if (gcse_file)
4452 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, copying expression %d to reg %d\n",
4453 bb, INSN_UID (new_insn), expr->bitmap_index, regno);
4457 /* Insert partially redundant expressions on edges in the CFG to make
4458 the expressions fully redundant. */
4460 static int
4461 pre_edge_insert (edge_list, index_map)
4462 struct edge_list *edge_list;
4463 struct expr **index_map;
4465 int e, i, num_edges, set_size, did_insert = 0;
4466 sbitmap *inserted;
4468 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4469 if it reaches any of the deleted expressions. */
4471 set_size = pre_insert_map[0]->size;
4472 num_edges = NUM_EDGES (edge_list);
4473 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4474 sbitmap_vector_zero (inserted, num_edges);
4476 for (e = 0; e < num_edges; e++)
4478 int indx;
4479 basic_block pred = INDEX_EDGE_PRED_BB (edge_list, e);
4480 int bb = pred->index;
4482 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4484 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4485 int j;
4487 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4489 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4491 struct expr *expr = index_map[j];
4492 struct occr *occr;
4494 /* Now look at each deleted occurence of this expression. */
4495 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4497 if (! occr->deleted_p)
4498 continue;
4500 /* Insert this expression on this edge if if it would
4501 reach the deleted occurence in BB. */
4502 if (!TEST_BIT (inserted[e], j)
4503 && (bb == ENTRY_BLOCK
4504 || pre_expr_reaches_here_p (bb, expr,
4505 BLOCK_NUM (occr->insn), 0)))
4507 rtx insn;
4508 edge eg = INDEX_EDGE (edge_list, e);
4509 /* We can't insert anything on an abnormal
4510 and critical edge, so we insert the
4511 insn at the end of the previous block. There
4512 are several alternatives detailed in
4513 Morgans book P277 (sec 10.5) for handling
4514 this situation. This one is easiest for now. */
4516 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4518 insert_insn_end_bb (index_map[j], bb, 0);
4520 else
4522 insn = process_insert_insn (index_map[j]);
4523 insert_insn_on_edge (insn, eg);
4525 if (gcse_file)
4527 fprintf (gcse_file,
4528 "PRE/HOIST: edge (%d,%d), copy expression %d\n",
4530 INDEX_EDGE_SUCC_BB (edge_list, e)->index, expr->bitmap_index);
4532 SET_BIT (inserted[e], j);
4533 did_insert = 1;
4534 gcse_create_count++;
4542 /* Clean up. */
4543 free (inserted);
4545 return did_insert;
4548 /* Copy the result of INSN to REG.
4549 INDX is the expression number. */
4551 static void
4552 pre_insert_copy_insn (expr, insn)
4553 struct expr *expr;
4554 rtx insn;
4556 rtx reg = expr->reaching_reg;
4557 int regno = REGNO (reg);
4558 int indx = expr->bitmap_index;
4559 rtx set = single_set (insn);
4560 rtx new_insn;
4561 int bb = BLOCK_NUM (insn);
4563 if (!set)
4564 abort ();
4565 new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)),
4566 insn);
4567 /* Keep block number table up to date. */
4568 set_block_num (new_insn, bb);
4569 /* Keep register set table up to date. */
4570 record_one_set (regno, new_insn);
4571 if (insn == BLOCK_END (bb))
4572 BLOCK_END (bb) = new_insn;
4574 gcse_create_count++;
4576 if (gcse_file)
4577 fprintf (gcse_file,
4578 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4579 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4580 INSN_UID (insn), regno);
4583 /* Copy available expressions that reach the redundant expression
4584 to `reaching_reg'. */
4586 static void
4587 pre_insert_copies ()
4589 int i;
4591 /* For each available expression in the table, copy the result to
4592 `reaching_reg' if the expression reaches a deleted one.
4594 ??? The current algorithm is rather brute force.
4595 Need to do some profiling. */
4597 for (i = 0; i < expr_hash_table_size; i++)
4599 struct expr *expr;
4601 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4603 struct occr *occr;
4605 /* If the basic block isn't reachable, PPOUT will be TRUE.
4606 However, we don't want to insert a copy here because the
4607 expression may not really be redundant. So only insert
4608 an insn if the expression was deleted.
4609 This test also avoids further processing if the expression
4610 wasn't deleted anywhere. */
4611 if (expr->reaching_reg == NULL)
4612 continue;
4614 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4616 struct occr *avail;
4618 if (! occr->deleted_p)
4619 continue;
4621 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4623 rtx insn = avail->insn;
4625 /* No need to handle this one if handled already. */
4626 if (avail->copied_p)
4627 continue;
4628 /* Don't handle this one if it's a redundant one. */
4629 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4630 continue;
4631 /* Or if the expression doesn't reach the deleted one. */
4632 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr,
4633 BLOCK_NUM (occr->insn),1))
4634 continue;
4636 /* Copy the result of avail to reaching_reg. */
4637 pre_insert_copy_insn (expr, insn);
4638 avail->copied_p = 1;
4645 /* Delete redundant computations.
4646 Deletion is done by changing the insn to copy the `reaching_reg' of
4647 the expression into the result of the SET. It is left to later passes
4648 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4650 Returns non-zero if a change is made. */
4652 static int
4653 pre_delete ()
4655 int i, bb, changed;
4657 /* Compute the expressions which are redundant and need to be replaced by
4658 copies from the reaching reg to the target reg. */
4659 for (bb = 0; bb < n_basic_blocks; bb++)
4660 sbitmap_copy (temp_bitmap[bb], pre_delete_map[bb]);
4662 changed = 0;
4663 for (i = 0; i < expr_hash_table_size; i++)
4665 struct expr *expr;
4667 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4669 struct occr *occr;
4670 int indx = expr->bitmap_index;
4672 /* We only need to search antic_occr since we require
4673 ANTLOC != 0. */
4675 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4677 rtx insn = occr->insn;
4678 rtx set;
4679 int bb = BLOCK_NUM (insn);
4681 if (TEST_BIT (temp_bitmap[bb], indx))
4683 set = single_set (insn);
4684 if (! set)
4685 abort ();
4687 /* Create a pseudo-reg to store the result of reaching
4688 expressions into. Get the mode for the new pseudo
4689 from the mode of the original destination pseudo. */
4690 if (expr->reaching_reg == NULL)
4691 expr->reaching_reg
4692 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4694 /* In theory this should never fail since we're creating
4695 a reg->reg copy.
4697 However, on the x86 some of the movXX patterns actually
4698 contain clobbers of scratch regs. This may cause the
4699 insn created by validate_change to not match any pattern
4700 and thus cause validate_change to fail. */
4701 if (validate_change (insn, &SET_SRC (set),
4702 expr->reaching_reg, 0))
4704 occr->deleted_p = 1;
4705 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4706 changed = 1;
4707 gcse_subst_count++;
4710 if (gcse_file)
4712 fprintf (gcse_file,
4713 "PRE: redundant insn %d (expression %d) in bb %d, reaching reg is %d\n",
4714 INSN_UID (insn), indx, bb, REGNO (expr->reaching_reg));
4721 return changed;
4724 /* Perform GCSE optimizations using PRE.
4725 This is called by one_pre_gcse_pass after all the dataflow analysis
4726 has been done.
4728 This is based on the original Morel-Renvoise paper Fred Chow's thesis,
4729 and lazy code motion from Knoop, Ruthing and Steffen as described in
4730 Advanced Compiler Design and Implementation.
4732 ??? A new pseudo reg is created to hold the reaching expression.
4733 The nice thing about the classical approach is that it would try to
4734 use an existing reg. If the register can't be adequately optimized
4735 [i.e. we introduce reload problems], one could add a pass here to
4736 propagate the new register through the block.
4738 ??? We don't handle single sets in PARALLELs because we're [currently]
4739 not able to copy the rest of the parallel when we insert copies to create
4740 full redundancies from partial redundancies. However, there's no reason
4741 why we can't handle PARALLELs in the cases where there are no partial
4742 redundancies. */
4744 static int
4745 pre_gcse ()
4747 int i, did_insert;
4748 int changed;
4749 struct expr **index_map;
4751 /* Compute a mapping from expression number (`bitmap_index') to
4752 hash table entry. */
4754 index_map = xcalloc (n_exprs, sizeof (struct expr *));
4755 for (i = 0; i < expr_hash_table_size; i++)
4757 struct expr *expr;
4759 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4760 index_map[expr->bitmap_index] = expr;
4763 /* Reset bitmap used to track which insns are redundant. */
4764 pre_redundant_insns = sbitmap_alloc (max_cuid);
4765 sbitmap_zero (pre_redundant_insns);
4767 /* Delete the redundant insns first so that
4768 - we know what register to use for the new insns and for the other
4769 ones with reaching expressions
4770 - we know which insns are redundant when we go to create copies */
4771 changed = pre_delete ();
4773 did_insert = pre_edge_insert (edge_list, index_map);
4774 /* In other places with reaching expressions, copy the expression to the
4775 specially allocated pseudo-reg that reaches the redundant expr. */
4776 pre_insert_copies ();
4777 if (did_insert)
4779 commit_edge_insertions ();
4780 changed = 1;
4783 free (index_map);
4784 free (pre_redundant_insns);
4786 return changed;
4789 /* Top level routine to perform one PRE GCSE pass.
4791 Return non-zero if a change was made. */
4793 static int
4794 one_pre_gcse_pass (pass)
4795 int pass;
4797 int changed = 0;
4799 gcse_subst_count = 0;
4800 gcse_create_count = 0;
4802 alloc_expr_hash_table (max_cuid);
4803 add_noreturn_fake_exit_edges ();
4804 compute_expr_hash_table ();
4805 if (gcse_file)
4806 dump_hash_table (gcse_file, "Expression", expr_hash_table,
4807 expr_hash_table_size, n_exprs);
4808 if (n_exprs > 0)
4810 alloc_pre_mem (n_basic_blocks, n_exprs);
4811 compute_pre_data ();
4812 changed |= pre_gcse ();
4813 free_edge_list (edge_list);
4814 free_pre_mem ();
4816 remove_fake_edges ();
4817 free_expr_hash_table ();
4819 if (gcse_file)
4821 fprintf (gcse_file, "\n");
4822 fprintf (gcse_file, "PRE GCSE of %s, pass %d: %d bytes needed, %d substs, %d insns created\n",
4823 current_function_name, pass,
4824 bytes_used, gcse_subst_count, gcse_create_count);
4827 return changed;
4830 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4831 We have to add REG_LABEL notes, because the following loop optimization
4832 pass requires them. */
4834 /* ??? This is very similar to the loop.c add_label_notes function. We
4835 could probably share code here. */
4837 /* ??? If there was a jump optimization pass after gcse and before loop,
4838 then we would not need to do this here, because jump would add the
4839 necessary REG_LABEL notes. */
4841 static void
4842 add_label_notes (x, insn)
4843 rtx x;
4844 rtx insn;
4846 enum rtx_code code = GET_CODE (x);
4847 int i, j;
4848 const char *fmt;
4850 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4852 /* This code used to ignore labels that referred to dispatch tables to
4853 avoid flow generating (slighly) worse code.
4855 We no longer ignore such label references (see LABEL_REF handling in
4856 mark_jump_label for additional information). */
4857 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
4858 REG_NOTES (insn));
4859 return;
4862 fmt = GET_RTX_FORMAT (code);
4863 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4865 if (fmt[i] == 'e')
4866 add_label_notes (XEXP (x, i), insn);
4867 else if (fmt[i] == 'E')
4868 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4869 add_label_notes (XVECEXP (x, i, j), insn);
4873 /* Compute transparent outgoing information for each block.
4875 An expression is transparent to an edge unless it is killed by
4876 the edge itself. This can only happen with abnormal control flow,
4877 when the edge is traversed through a call. This happens with
4878 non-local labels and exceptions.
4880 This would not be necessary if we split the edge. While this is
4881 normally impossible for abnormal critical edges, with some effort
4882 it should be possible with exception handling, since we still have
4883 control over which handler should be invoked. But due to increased
4884 EH table sizes, this may not be worthwhile. */
4886 static void
4887 compute_transpout ()
4889 int bb;
4891 sbitmap_vector_ones (transpout, n_basic_blocks);
4893 for (bb = 0; bb < n_basic_blocks; ++bb)
4895 int i;
4897 /* Note that flow inserted a nop a the end of basic blocks that
4898 end in call instructions for reasons other than abnormal
4899 control flow. */
4900 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
4901 continue;
4903 for (i = 0; i < expr_hash_table_size; i++)
4905 struct expr *expr;
4906 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
4907 if (GET_CODE (expr->expr) == MEM)
4909 rtx addr = XEXP (expr->expr, 0);
4911 if (GET_CODE (addr) == SYMBOL_REF
4912 && CONSTANT_POOL_ADDRESS_P (addr))
4913 continue;
4915 /* ??? Optimally, we would use interprocedural alias
4916 analysis to determine if this mem is actually killed
4917 by this call. */
4918 RESET_BIT (transpout[bb], expr->bitmap_index);
4924 /* Removal of useless null pointer checks */
4926 /* These need to be file static for communication between
4927 invalidate_nonnull_info and delete_null_pointer_checks. */
4928 static int current_block;
4929 static sbitmap *nonnull_local;
4930 static sbitmap *nonnull_killed;
4932 /* Called via note_stores. X is set by SETTER. If X is a register we must
4933 invalidate nonnull_local and set nonnull_killed.
4935 We ignore hard registers. */
4936 static void
4937 invalidate_nonnull_info (x, setter, data)
4938 rtx x;
4939 rtx setter ATTRIBUTE_UNUSED;
4940 void *data ATTRIBUTE_UNUSED;
4942 int offset, regno;
4944 offset = 0;
4945 while (GET_CODE (x) == SUBREG)
4946 x = SUBREG_REG (x);
4948 /* Ignore anything that is not a register or is a hard register. */
4949 if (GET_CODE (x) != REG
4950 || REGNO (x) < FIRST_PSEUDO_REGISTER)
4951 return;
4953 regno = REGNO (x);
4955 RESET_BIT (nonnull_local[current_block], regno);
4956 SET_BIT (nonnull_killed[current_block], regno);
4960 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
4961 at compile time.
4963 This is conceptually similar to global constant/copy propagation and
4964 classic global CSE (it even uses the same dataflow equations as cprop).
4966 If a register is used as memory address with the form (mem (reg)), then we
4967 know that REG can not be zero at that point in the program. Any instruction
4968 which sets REG "kills" this property.
4970 So, if every path leading to a conditional branch has an available memory
4971 reference of that form, then we know the register can not have the value
4972 zero at the conditional branch.
4974 So we merely need to compute the local properies and propagate that data
4975 around the cfg, then optimize where possible.
4977 We run this pass two times. Once before CSE, then again after CSE. This
4978 has proven to be the most profitable approach. It is rare for new
4979 optimization opportunities of this nature to appear after the first CSE
4980 pass.
4982 This could probably be integrated with global cprop with a little work. */
4984 void
4985 delete_null_pointer_checks (f)
4986 rtx f;
4988 int_list_ptr *s_preds, *s_succs;
4989 int *num_preds, *num_succs;
4990 int changed, bb;
4991 sbitmap *nonnull_avin, *nonnull_avout;
4993 /* First break the program into basic blocks. */
4994 find_basic_blocks (f, max_reg_num (), NULL, 1);
4996 /* If we have only a single block, then there's nothing to do. */
4997 if (n_basic_blocks <= 1)
4999 /* Free storage allocated by find_basic_blocks. */
5000 free_basic_block_vars (0);
5001 return;
5004 /* Trying to perform global optimizations on flow graphs which have
5005 a high connectivity will take a long time and is unlikely to be
5006 particularly useful.
5008 In normal circumstances a cfg should have about twice has many edges
5009 as blocks. But we do not want to punish small functions which have
5010 a couple switch statements. So we require a relatively large number
5011 of basic blocks and the ratio of edges to blocks to be high. */
5012 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5014 /* Free storage allocated by find_basic_blocks. */
5015 free_basic_block_vars (0);
5016 return;
5019 /* We need predecessor/successor lists as well as pred/succ counts for
5020 each basic block. */
5021 s_preds = (int_list_ptr *) gmalloc (n_basic_blocks * sizeof (int_list_ptr));
5022 s_succs = (int_list_ptr *) gmalloc (n_basic_blocks * sizeof (int_list_ptr));
5023 num_preds = (int *) gmalloc (n_basic_blocks * sizeof (int));
5024 num_succs = (int *) gmalloc (n_basic_blocks * sizeof (int));
5025 compute_preds_succs (s_preds, s_succs, num_preds, num_succs);
5027 /* Allocate bitmaps to hold local and global properties. */
5028 nonnull_local = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ());
5029 nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ());
5030 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ());
5031 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, max_reg_num ());
5033 /* Compute local properties, nonnull and killed. A register will have
5034 the nonnull property if at the end of the current block its value is
5035 known to be nonnull. The killed property indicates that somewhere in
5036 the block any information we had about the register is killed.
5038 Note that a register can have both properties in a single block. That
5039 indicates that it's killed, then later in the block a new value is
5040 computed. */
5041 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
5042 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
5043 for (current_block = 0; current_block < n_basic_blocks; current_block++)
5045 rtx insn, stop_insn;
5047 /* Scan each insn in the basic block looking for memory references and
5048 register sets. */
5049 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5050 for (insn = BLOCK_HEAD (current_block);
5051 insn != stop_insn;
5052 insn = NEXT_INSN (insn))
5054 rtx set;
5056 /* Ignore anything that is not a normal insn. */
5057 if (GET_RTX_CLASS (GET_CODE (insn)) != 'i')
5058 continue;
5060 /* Basically ignore anything that is not a simple SET. We do have
5061 to make sure to invalidate nonnull_local and set nonnull_killed
5062 for such insns though. */
5063 set = single_set (insn);
5064 if (!set)
5066 note_stores (PATTERN (insn), invalidate_nonnull_info, NULL);
5067 continue;
5070 /* See if we've got a useable memory load. We handle it first
5071 in case it uses its address register as a dest (which kills
5072 the nonnull property). */
5073 if (GET_CODE (SET_SRC (set)) == MEM
5074 && GET_CODE (XEXP (SET_SRC (set), 0)) == REG
5075 && REGNO (XEXP (SET_SRC (set), 0)) >= FIRST_PSEUDO_REGISTER)
5076 SET_BIT (nonnull_local[current_block],
5077 REGNO (XEXP (SET_SRC (set), 0)));
5079 /* Now invalidate stuff clobbered by this insn. */
5080 note_stores (PATTERN (insn), invalidate_nonnull_info, NULL);
5082 /* And handle stores, we do these last since any sets in INSN can
5083 not kill the nonnull property if it is derived from a MEM
5084 appearing in a SET_DEST. */
5085 if (GET_CODE (SET_DEST (set)) == MEM
5086 && GET_CODE (XEXP (SET_DEST (set), 0)) == REG
5087 && REGNO (XEXP (SET_DEST (set), 0)) >= FIRST_PSEUDO_REGISTER)
5088 SET_BIT (nonnull_local[current_block],
5089 REGNO (XEXP (SET_DEST (set), 0)));
5093 /* Now compute global properties based on the local properties. This
5094 is a classic global availablity algorithm. */
5095 sbitmap_zero (nonnull_avin[0]);
5096 sbitmap_vector_ones (nonnull_avout, n_basic_blocks);
5097 changed = 1;
5098 while (changed)
5100 changed = 0;
5102 for (bb = 0; bb < n_basic_blocks; bb++)
5104 if (bb != 0)
5105 sbitmap_intersect_of_predecessors (nonnull_avin[bb],
5106 nonnull_avout, bb, s_preds);
5108 changed |= sbitmap_union_of_diff (nonnull_avout[bb],
5109 nonnull_local[bb],
5110 nonnull_avin[bb],
5111 nonnull_killed[bb]);
5115 /* Now look at each bb and see if it ends with a compare of a value
5116 against zero. */
5117 for (bb = 0; bb < n_basic_blocks; bb++)
5119 rtx last_insn = BLOCK_END (bb);
5120 rtx condition, earliest, reg;
5121 int compare_and_branch;
5123 /* We only want conditional branches. */
5124 if (GET_CODE (last_insn) != JUMP_INSN
5125 || !condjump_p (last_insn)
5126 || simplejump_p (last_insn))
5127 continue;
5129 /* LAST_INSN is a conditional jump. Get its condition. */
5130 condition = get_condition (last_insn, &earliest);
5132 /* If we were unable to get the condition, or it is not a equality
5133 comparison against zero then there's nothing we can do. */
5134 if (!condition
5135 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5136 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5137 || XEXP (condition, 1) != CONST0_RTX (GET_MODE (XEXP (condition, 0))))
5138 continue;
5140 /* We must be checking a register against zero. */
5141 reg = XEXP (condition, 0);
5142 if (GET_CODE (reg) != REG)
5143 continue;
5145 /* Is the register known to have a nonzero value? */
5146 if (!TEST_BIT (nonnull_avout[bb], REGNO (reg)))
5147 continue;
5149 /* Try to compute whether the compare/branch at the loop end is one or
5150 two instructions. */
5151 if (earliest == last_insn)
5152 compare_and_branch = 1;
5153 else if (earliest == prev_nonnote_insn (last_insn))
5154 compare_and_branch = 2;
5155 else
5156 continue;
5158 /* We know the register in this comparison is nonnull at exit from
5159 this block. We can optimize this comparison. */
5160 if (GET_CODE (condition) == NE)
5162 rtx new_jump;
5164 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5165 last_insn);
5166 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5167 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5168 emit_barrier_after (new_jump);
5170 delete_insn (last_insn);
5171 if (compare_and_branch == 2)
5172 delete_insn (earliest);
5175 /* Free storage allocated by find_basic_blocks. */
5176 free_basic_block_vars (0);
5178 /* Free our local predecessor/successor lists. */
5179 free (s_preds);
5180 free (s_succs);
5181 free (num_preds);
5182 free (num_succs);
5184 /* Free bitmaps. */
5185 free (nonnull_local);
5186 free (nonnull_killed);
5187 free (nonnull_avin);
5188 free (nonnull_avout);
5191 /* Code Hoisting variables and subroutines. */
5193 /* Very busy expressions. */
5194 static sbitmap *hoist_vbein;
5195 static sbitmap *hoist_vbeout;
5197 /* Hoistable expressions. */
5198 static sbitmap *hoist_exprs;
5200 /* Dominator bitmaps. */
5201 static sbitmap *dominators;
5202 static sbitmap *post_dominators;
5204 /* ??? We could compute post dominators and run this algorithm in
5205 reverse to to perform tail merging, doing so would probably be
5206 more effective than the tail merging code in jump.c.
5208 It's unclear if tail merging could be run in parallel with
5209 code hoisting. It would be nice. */
5211 /* Allocate vars used for code hoisting analysis. */
5213 static void
5214 alloc_code_hoist_mem (n_blocks, n_exprs)
5215 int n_blocks, n_exprs;
5217 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5218 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5219 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5221 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5222 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5223 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5224 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5226 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
5227 post_dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
5230 /* Free vars used for code hoisting analysis. */
5232 static void
5233 free_code_hoist_mem ()
5235 free (antloc);
5236 free (transp);
5237 free (comp);
5239 free (hoist_vbein);
5240 free (hoist_vbeout);
5241 free (hoist_exprs);
5242 free (transpout);
5244 free (dominators);
5245 free (post_dominators);
5248 /* Compute the very busy expressions at entry/exit from each block.
5250 An expression is very busy if all paths from a given point
5251 compute the expression. */
5253 static void
5254 compute_code_hoist_vbeinout ()
5256 int bb, changed, passes;
5258 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5259 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5261 passes = 0;
5262 changed = 1;
5263 while (changed)
5265 changed = 0;
5266 /* We scan the blocks in the reverse order to speed up
5267 the convergence. */
5268 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5270 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5271 hoist_vbeout[bb], transp[bb]);
5272 if (bb != n_basic_blocks - 1)
5273 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
5275 passes++;
5278 if (gcse_file)
5279 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5282 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5284 static void
5285 compute_code_hoist_data ()
5287 compute_local_properties (transp, comp, antloc, 0);
5288 compute_transpout ();
5289 compute_code_hoist_vbeinout ();
5290 compute_flow_dominators (dominators, post_dominators);
5291 if (gcse_file)
5292 fprintf (gcse_file, "\n");
5295 /* Determine if the expression identified by EXPR_INDEX would
5296 reach BB unimpared if it was placed at the end of EXPR_BB.
5298 It's unclear exactly what Muchnick meant by "unimpared". It seems
5299 to me that the expression must either be computed or transparent in
5300 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5301 would allow the expression to be hoisted out of loops, even if
5302 the expression wasn't a loop invariant.
5304 Contrast this to reachability for PRE where an expression is
5305 considered reachable if *any* path reaches instead of *all*
5306 paths. */
5308 static int
5309 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5310 int expr_bb;
5311 int expr_index;
5312 int bb;
5313 char *visited;
5315 edge pred;
5316 int visited_allocated_locally = 0;
5319 if (visited == NULL)
5321 visited_allocated_locally = 1;
5322 visited = xcalloc (n_basic_blocks, 1);
5325 visited[expr_bb] = 1;
5326 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
5328 int pred_bb = pred->src->index;
5330 if (pred->src == ENTRY_BLOCK_PTR)
5331 break;
5332 else if (visited[pred_bb])
5333 continue;
5334 /* Does this predecessor generate this expression? */
5335 else if (TEST_BIT (comp[pred_bb], expr_index))
5336 break;
5337 else if (! TEST_BIT (transp[pred_bb], expr_index))
5338 break;
5339 /* Not killed. */
5340 else
5342 visited[pred_bb] = 1;
5343 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5344 pred_bb, visited))
5345 break;
5348 if (visited_allocated_locally)
5349 free (visited);
5350 return (pred == NULL);
5353 /* Actually perform code hoisting. */
5354 static void
5355 hoist_code ()
5357 int bb, dominated, i;
5358 struct expr **index_map;
5360 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5362 /* Compute a mapping from expression number (`bitmap_index') to
5363 hash table entry. */
5365 index_map = xcalloc (n_exprs, sizeof (struct expr *));
5366 for (i = 0; i < expr_hash_table_size; i++)
5368 struct expr *expr;
5370 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5371 index_map[expr->bitmap_index] = expr;
5374 /* Walk over each basic block looking for potentially hoistable
5375 expressions, nothing gets hoisted from the entry block. */
5376 for (bb = 0; bb < n_basic_blocks; bb++)
5378 int found = 0;
5379 int insn_inserted_p;
5381 /* Examine each expression that is very busy at the exit of this
5382 block. These are the potentially hoistable expressions. */
5383 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5385 int hoistable = 0;
5386 if (TEST_BIT (hoist_vbeout[bb], i)
5387 && TEST_BIT (transpout[bb], i))
5389 /* We've found a potentially hoistable expression, now
5390 we look at every block BB dominates to see if it
5391 computes the expression. */
5392 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5394 /* Ignore self dominance. */
5395 if (bb == dominated
5396 || ! TEST_BIT (dominators[dominated], bb))
5397 continue;
5399 /* We've found a dominated block, now see if it computes
5400 the busy expression and whether or not moving that
5401 expression to the "beginning" of that block is safe. */
5402 if (!TEST_BIT (antloc[dominated], i))
5403 continue;
5405 /* Note if the expression would reach the dominated block
5406 unimpared if it was placed at the end of BB.
5408 Keep track of how many times this expression is hoistable
5409 from a dominated block into BB. */
5410 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5411 hoistable++;
5414 /* If we found more than one hoistable occurence of this
5415 expression, then note it in the bitmap of expressions to
5416 hoist. It makes no sense to hoist things which are computed
5417 in only one BB, and doing so tends to pessimize register
5418 allocation. One could increase this value to try harder
5419 to avoid any possible code expansion due to register
5420 allocation issues; however experiments have shown that
5421 the vast majority of hoistable expressions are only movable
5422 from two successors, so raising this threshhold is likely
5423 to nullify any benefit we get from code hoisting. */
5424 if (hoistable > 1)
5426 SET_BIT (hoist_exprs[bb], i);
5427 found = 1;
5432 /* If we found nothing to hoist, then quit now. */
5433 if (! found)
5434 continue;
5436 /* Loop over all the hoistable expressions. */
5437 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5439 /* We want to insert the expression into BB only once, so
5440 note when we've inserted it. */
5441 insn_inserted_p = 0;
5443 /* These tests should be the same as the tests above. */
5444 if (TEST_BIT (hoist_vbeout[bb], i))
5446 /* We've found a potentially hoistable expression, now
5447 we look at every block BB dominates to see if it
5448 computes the expression. */
5449 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5451 /* Ignore self dominance. */
5452 if (bb == dominated
5453 || ! TEST_BIT (dominators[dominated], bb))
5454 continue;
5456 /* We've found a dominated block, now see if it computes
5457 the busy expression and whether or not moving that
5458 expression to the "beginning" of that block is safe. */
5459 if (!TEST_BIT (antloc[dominated], i))
5460 continue;
5462 /* The expression is computed in the dominated block and
5463 it would be safe to compute it at the start of the
5464 dominated block. Now we have to determine if the
5465 expresion would reach the dominated block if it was
5466 placed at the end of BB. */
5467 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5469 struct expr *expr = index_map[i];
5470 struct occr *occr = expr->antic_occr;
5471 rtx insn;
5472 rtx set;
5475 /* Find the right occurence of this expression. */
5476 while (BLOCK_NUM (occr->insn) != dominated && occr)
5477 occr = occr->next;
5479 /* Should never happen. */
5480 if (!occr)
5481 abort ();
5483 insn = occr->insn;
5485 set = single_set (insn);
5486 if (! set)
5487 abort ();
5489 /* Create a pseudo-reg to store the result of reaching
5490 expressions into. Get the mode for the new pseudo
5491 from the mode of the original destination pseudo. */
5492 if (expr->reaching_reg == NULL)
5493 expr->reaching_reg
5494 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5496 /* In theory this should never fail since we're creating
5497 a reg->reg copy.
5499 However, on the x86 some of the movXX patterns actually
5500 contain clobbers of scratch regs. This may cause the
5501 insn created by validate_change to not match any
5502 pattern and thus cause validate_change to fail. */
5503 if (validate_change (insn, &SET_SRC (set),
5504 expr->reaching_reg, 0))
5506 occr->deleted_p = 1;
5507 if (!insn_inserted_p)
5509 insert_insn_end_bb (index_map[i], bb, 0);
5510 insn_inserted_p = 1;
5518 free (index_map);
5521 /* Top level routine to perform one code hoisting (aka unification) pass
5523 Return non-zero if a change was made. */
5525 static int
5526 one_code_hoisting_pass ()
5528 int changed = 0;
5530 alloc_expr_hash_table (max_cuid);
5531 compute_expr_hash_table ();
5532 if (gcse_file)
5533 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5534 expr_hash_table_size, n_exprs);
5535 if (n_exprs > 0)
5537 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5538 compute_code_hoist_data ();
5539 hoist_code ();
5540 free_code_hoist_mem ();
5542 free_expr_hash_table ();
5544 return changed;