* Makefile.in (SYSTEM_H): Define.
[official-gcc.git] / gcc / gcse.c
blobcac80f9d4b947dd44822131de12d23bedb43508a
1 /* Global common subexpression elimination/Partial redundancy elimination
2 and global constant/copy propagation for GNU compiler.
3 Copyright (C) 1997, 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* TODO
23 - reordering of memory allocation and freeing to be more space efficient
24 - do rough calc of how many regs are needed in each block, and a rough
25 calc of how many regs are available in each class and use that to
26 throttle back the code in cases where RTX_COST is minimal.
27 - dead store elimination
28 - a store to the same address as a load does not kill the load if the
29 source of the store is also the destination of the load. Handling this
30 allows more load motion, particularly out of loops.
31 - ability to realloc sbitmap vectors would allow one initial computation
32 of reg_set_in_block with only subsequent additions, rather than
33 recomputing it for each pass
37 /* References searched while implementing this.
39 Compilers Principles, Techniques and Tools
40 Aho, Sethi, Ullman
41 Addison-Wesley, 1988
43 Global Optimization by Suppression of Partial Redundancies
44 E. Morel, C. Renvoise
45 communications of the acm, Vol. 22, Num. 2, Feb. 1979
47 A Portable Machine-Independent Global Optimizer - Design and Measurements
48 Frederick Chow
49 Stanford Ph.D. thesis, Dec. 1983
51 A Fast Algorithm for Code Movement Optimization
52 D.M. Dhamdhere
53 SIGPLAN Notices, Vol. 23, Num. 10, Oct. 1988
55 A Solution to a Problem with Morel and Renvoise's
56 Global Optimization by Suppression of Partial Redundancies
57 K-H Drechsler, M.P. Stadel
58 ACM TOPLAS, Vol. 10, Num. 4, Oct. 1988
60 Practical Adaptation of the Global Optimization
61 Algorithm of Morel and Renvoise
62 D.M. Dhamdhere
63 ACM TOPLAS, Vol. 13, Num. 2. Apr. 1991
65 Efficiently Computing Static Single Assignment Form and the Control
66 Dependence Graph
67 R. Cytron, J. Ferrante, B.K. Rosen, M.N. Wegman, and F.K. Zadeck
68 ACM TOPLAS, Vol. 13, Num. 4, Oct. 1991
70 Lazy Code Motion
71 J. Knoop, O. Ruthing, B. Steffen
72 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
74 What's In a Region? Or Computing Control Dependence Regions in Near-Linear
75 Time for Reducible Flow Control
76 Thomas Ball
77 ACM Letters on Programming Languages and Systems,
78 Vol. 2, Num. 1-4, Mar-Dec 1993
80 An Efficient Representation for Sparse Sets
81 Preston Briggs, Linda Torczon
82 ACM Letters on Programming Languages and Systems,
83 Vol. 2, Num. 1-4, Mar-Dec 1993
85 A Variation of Knoop, Ruthing, and Steffen's Lazy Code Motion
86 K-H Drechsler, M.P. Stadel
87 ACM SIGPLAN Notices, Vol. 28, Num. 5, May 1993
89 Partial Dead Code Elimination
90 J. Knoop, O. Ruthing, B. Steffen
91 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
93 Effective Partial Redundancy Elimination
94 P. Briggs, K.D. Cooper
95 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
97 The Program Structure Tree: Computing Control Regions in Linear Time
98 R. Johnson, D. Pearson, K. Pingali
99 ACM SIGPLAN Notices, Vol. 29, Num. 6, Jun. 1994
101 Optimal Code Motion: Theory and Practice
102 J. Knoop, O. Ruthing, B. Steffen
103 ACM TOPLAS, Vol. 16, Num. 4, Jul. 1994
105 The power of assignment motion
106 J. Knoop, O. Ruthing, B. Steffen
107 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
109 Global code motion / global value numbering
110 C. Click
111 ACM SIGPLAN Notices Vol. 30, Num. 6, Jun. 1995, '95 Conference on PLDI
113 Value Driven Redundancy Elimination
114 L.T. Simpson
115 Rice University Ph.D. thesis, Apr. 1996
117 Value Numbering
118 L.T. Simpson
119 Massively Scalar Compiler Project, Rice University, Sep. 1996
121 High Performance Compilers for Parallel Computing
122 Michael Wolfe
123 Addison-Wesley, 1996
125 Advanced Compiler Design and Implementation
126 Steven Muchnick
127 Morgan Kaufmann, 1997
129 Building an Optimizing Compiler
130 Robert Morgan
131 Digital Press, 1998
133 People wishing to speed up the code here should read:
134 Elimination Algorithms for Data Flow Analysis
135 B.G. Ryder, M.C. Paull
136 ACM Computing Surveys, Vol. 18, Num. 3, Sep. 1986
138 How to Analyze Large Programs Efficiently and Informatively
139 D.M. Dhamdhere, B.K. Rosen, F.K. Zadeck
140 ACM SIGPLAN Notices Vol. 27, Num. 7, Jul. 1992, '92 Conference on PLDI
142 People wishing to do something different can find various possibilities
143 in the above papers and elsewhere.
146 #include "config.h"
147 #include "system.h"
148 #include "toplev.h"
150 #include "rtl.h"
151 #include "tm_p.h"
152 #include "regs.h"
153 #include "hard-reg-set.h"
154 #include "flags.h"
155 #include "real.h"
156 #include "insn-config.h"
157 #include "recog.h"
158 #include "basic-block.h"
159 #include "output.h"
160 #include "function.h"
161 #include "expr.h"
162 #include "ggc.h"
164 #include "obstack.h"
165 #define obstack_chunk_alloc gmalloc
166 #define obstack_chunk_free free
168 /* Maximum number of passes to perform. */
169 #define MAX_PASSES 1
171 /* Propagate flow information through back edges and thus enable PRE's
172 moving loop invariant calculations out of loops.
174 Originally this tended to create worse overall code, but several
175 improvements during the development of PRE seem to have made following
176 back edges generally a win.
178 Note much of the loop invariant code motion done here would normally
179 be done by loop.c, which has more heuristics for when to move invariants
180 out of loops. At some point we might need to move some of those
181 heuristics into gcse.c. */
182 #define FOLLOW_BACK_EDGES 1
184 /* We support GCSE via Partial Redundancy Elimination. PRE optimizations
185 are a superset of those done by GCSE.
187 We perform the following steps:
189 1) Compute basic block information.
191 2) Compute table of places where registers are set.
193 3) Perform copy/constant propagation.
195 4) Perform global cse.
197 5) Perform another pass of copy/constant propagation.
199 Two passes of copy/constant propagation are done because the first one
200 enables more GCSE and the second one helps to clean up the copies that
201 GCSE creates. This is needed more for PRE than for Classic because Classic
202 GCSE will try to use an existing register containing the common
203 subexpression rather than create a new one. This is harder to do for PRE
204 because of the code motion (which Classic GCSE doesn't do).
206 Expressions we are interested in GCSE-ing are of the form
207 (set (pseudo-reg) (expression)).
208 Function want_to_gcse_p says what these are.
210 PRE handles moving invariant expressions out of loops (by treating them as
211 partially redundant).
213 Eventually it would be nice to replace cse.c/gcse.c with SSA (static single
214 assignment) based GVN (global value numbering). L. T. Simpson's paper
215 (Rice University) on value numbering is a useful reference for this.
217 **********************
219 We used to support multiple passes but there are diminishing returns in
220 doing so. The first pass usually makes 90% of the changes that are doable.
221 A second pass can make a few more changes made possible by the first pass.
222 Experiments show any further passes don't make enough changes to justify
223 the expense.
225 A study of spec92 using an unlimited number of passes:
226 [1 pass] = 1208 substitutions, [2] = 577, [3] = 202, [4] = 192, [5] = 83,
227 [6] = 34, [7] = 17, [8] = 9, [9] = 4, [10] = 4, [11] = 2,
228 [12] = 2, [13] = 1, [15] = 1, [16] = 2, [41] = 1
230 It was found doing copy propagation between each pass enables further
231 substitutions.
233 PRE is quite expensive in complicated functions because the DFA can take
234 awhile to converge. Hence we only perform one pass. Macro MAX_PASSES can
235 be modified if one wants to experiment.
237 **********************
239 The steps for PRE are:
241 1) Build the hash table of expressions we wish to GCSE (expr_hash_table).
243 2) Perform the data flow analysis for PRE.
245 3) Delete the redundant instructions
247 4) Insert the required copies [if any] that make the partially
248 redundant instructions fully redundant.
250 5) For other reaching expressions, insert an instruction to copy the value
251 to a newly created pseudo that will reach the redundant instruction.
253 The deletion is done first so that when we do insertions we
254 know which pseudo reg to use.
256 Various papers have argued that PRE DFA is expensive (O(n^2)) and others
257 argue it is not. The number of iterations for the algorithm to converge
258 is typically 2-4 so I don't view it as that expensive (relatively speaking).
260 PRE GCSE depends heavily on the second CSE pass to clean up the copies
261 we create. To make an expression reach the place where it's redundant,
262 the result of the expression is copied to a new register, and the redundant
263 expression is deleted by replacing it with this new register. Classic GCSE
264 doesn't have this problem as much as it computes the reaching defs of
265 each register in each block and thus can try to use an existing register.
267 **********************
269 A fair bit of simplicity is created by creating small functions for simple
270 tasks, even when the function is only called in one place. This may
271 measurably slow things down [or may not] by creating more function call
272 overhead than is necessary. The source is laid out so that it's trivial
273 to make the affected functions inline so that one can measure what speed
274 up, if any, can be achieved, and maybe later when things settle things can
275 be rearranged.
277 Help stamp out big monolithic functions! */
279 /* GCSE global vars. */
281 /* -dG dump file. */
282 static FILE *gcse_file;
284 /* Note whether or not we should run jump optimization after gcse. We
285 want to do this for two cases.
287 * If we changed any jumps via cprop.
289 * If we added any labels via edge splitting. */
291 static int run_jump_opt_after_gcse;
293 /* Bitmaps are normally not included in debugging dumps.
294 However it's useful to be able to print them from GDB.
295 We could create special functions for this, but it's simpler to
296 just allow passing stderr to the dump_foo fns. Since stderr can
297 be a macro, we store a copy here. */
298 static FILE *debug_stderr;
300 /* An obstack for our working variables. */
301 static struct obstack gcse_obstack;
303 /* Non-zero for each mode that supports (set (reg) (reg)).
304 This is trivially true for integer and floating point values.
305 It may or may not be true for condition codes. */
306 static char can_copy_p[(int) NUM_MACHINE_MODES];
308 /* Non-zero if can_copy_p has been initialized. */
309 static int can_copy_init_p;
311 struct reg_use {rtx reg_rtx; };
313 /* Hash table of expressions. */
315 struct expr
317 /* The expression (SET_SRC for expressions, PATTERN for assignments). */
318 rtx expr;
319 /* Index in the available expression bitmaps. */
320 int bitmap_index;
321 /* Next entry with the same hash. */
322 struct expr *next_same_hash;
323 /* List of anticipatable occurrences in basic blocks in the function.
324 An "anticipatable occurrence" is one that is the first occurrence in the
325 basic block, the operands are not modified in the basic block prior
326 to the occurrence and the output is not used between the start of
327 the block and the occurrence. */
328 struct occr *antic_occr;
329 /* List of available occurrence in basic blocks in the function.
330 An "available occurrence" is one that is the last occurrence in the
331 basic block and the operands are not modified by following statements in
332 the basic block [including this insn]. */
333 struct occr *avail_occr;
334 /* Non-null if the computation is PRE redundant.
335 The value is the newly created pseudo-reg to record a copy of the
336 expression in all the places that reach the redundant copy. */
337 rtx reaching_reg;
340 /* Occurrence of an expression.
341 There is one per basic block. If a pattern appears more than once the
342 last appearance is used [or first for anticipatable expressions]. */
344 struct occr
346 /* Next occurrence of this expression. */
347 struct occr *next;
348 /* The insn that computes the expression. */
349 rtx insn;
350 /* Non-zero if this [anticipatable] occurrence has been deleted. */
351 char deleted_p;
352 /* Non-zero if this [available] occurrence has been copied to
353 reaching_reg. */
354 /* ??? This is mutually exclusive with deleted_p, so they could share
355 the same byte. */
356 char copied_p;
359 /* Expression and copy propagation hash tables.
360 Each hash table is an array of buckets.
361 ??? It is known that if it were an array of entries, structure elements
362 `next_same_hash' and `bitmap_index' wouldn't be necessary. However, it is
363 not clear whether in the final analysis a sufficient amount of memory would
364 be saved as the size of the available expression bitmaps would be larger
365 [one could build a mapping table without holes afterwards though].
366 Someday I'll perform the computation and figure it out. */
368 /* Total size of the expression hash table, in elements. */
369 static unsigned int expr_hash_table_size;
371 /* The table itself.
372 This is an array of `expr_hash_table_size' elements. */
373 static struct expr **expr_hash_table;
375 /* Total size of the copy propagation hash table, in elements. */
376 static unsigned int set_hash_table_size;
378 /* The table itself.
379 This is an array of `set_hash_table_size' elements. */
380 static struct expr **set_hash_table;
382 /* Mapping of uids to cuids.
383 Only real insns get cuids. */
384 static int *uid_cuid;
386 /* Highest UID in UID_CUID. */
387 static int max_uid;
389 /* Get the cuid of an insn. */
390 #ifdef ENABLE_CHECKING
391 #define INSN_CUID(INSN) (INSN_UID (INSN) > max_uid ? (abort (), 0) : uid_cuid[INSN_UID (INSN)])
392 #else
393 #define INSN_CUID(INSN) (uid_cuid[INSN_UID (INSN)])
394 #endif
396 /* Number of cuids. */
397 static int max_cuid;
399 /* Mapping of cuids to insns. */
400 static rtx *cuid_insn;
402 /* Get insn from cuid. */
403 #define CUID_INSN(CUID) (cuid_insn[CUID])
405 /* Maximum register number in function prior to doing gcse + 1.
406 Registers created during this pass have regno >= max_gcse_regno.
407 This is named with "gcse" to not collide with global of same name. */
408 static unsigned int max_gcse_regno;
410 /* Maximum number of cse-able expressions found. */
411 static int n_exprs;
413 /* Maximum number of assignments for copy propagation found. */
414 static int n_sets;
416 /* Table of registers that are modified.
418 For each register, each element is a list of places where the pseudo-reg
419 is set.
421 For simplicity, GCSE is done on sets of pseudo-regs only. PRE GCSE only
422 requires knowledge of which blocks kill which regs [and thus could use
423 a bitmap instead of the lists `reg_set_table' uses].
425 `reg_set_table' and could be turned into an array of bitmaps (num-bbs x
426 num-regs) [however perhaps it may be useful to keep the data as is]. One
427 advantage of recording things this way is that `reg_set_table' is fairly
428 sparse with respect to pseudo regs but for hard regs could be fairly dense
429 [relatively speaking]. And recording sets of pseudo-regs in lists speeds
430 up functions like compute_transp since in the case of pseudo-regs we only
431 need to iterate over the number of times a pseudo-reg is set, not over the
432 number of basic blocks [clearly there is a bit of a slow down in the cases
433 where a pseudo is set more than once in a block, however it is believed
434 that the net effect is to speed things up]. This isn't done for hard-regs
435 because recording call-clobbered hard-regs in `reg_set_table' at each
436 function call can consume a fair bit of memory, and iterating over
437 hard-regs stored this way in compute_transp will be more expensive. */
439 typedef struct reg_set
441 /* The next setting of this register. */
442 struct reg_set *next;
443 /* The insn where it was set. */
444 rtx insn;
445 } reg_set;
447 static reg_set **reg_set_table;
449 /* Size of `reg_set_table'.
450 The table starts out at max_gcse_regno + slop, and is enlarged as
451 necessary. */
452 static int reg_set_table_size;
454 /* Amount to grow `reg_set_table' by when it's full. */
455 #define REG_SET_TABLE_SLOP 100
457 /* Bitmap containing one bit for each register in the program.
458 Used when performing GCSE to track which registers have been set since
459 the start of the basic block. */
460 static sbitmap reg_set_bitmap;
462 /* For each block, a bitmap of registers set in the block.
463 This is used by expr_killed_p and compute_transp.
464 It is computed during hash table computation and not by compute_sets
465 as it includes registers added since the last pass (or between cprop and
466 gcse) and it's currently not easy to realloc sbitmap vectors. */
467 static sbitmap *reg_set_in_block;
469 /* For each block, non-zero if memory is set in that block.
470 This is computed during hash table computation and is used by
471 expr_killed_p and compute_transp.
472 ??? Handling of memory is very simple, we don't make any attempt
473 to optimize things (later).
474 ??? This can be computed by compute_sets since the information
475 doesn't change. */
476 static char *mem_set_in_block;
478 /* Various variables for statistics gathering. */
480 /* Memory used in a pass.
481 This isn't intended to be absolutely precise. Its intent is only
482 to keep an eye on memory usage. */
483 static int bytes_used;
485 /* GCSE substitutions made. */
486 static int gcse_subst_count;
487 /* Number of copy instructions created. */
488 static int gcse_create_count;
489 /* Number of constants propagated. */
490 static int const_prop_count;
491 /* Number of copys propagated. */
492 static int copy_prop_count;
494 /* These variables are used by classic GCSE.
495 Normally they'd be defined a bit later, but `rd_gen' needs to
496 be declared sooner. */
498 /* Each block has a bitmap of each type.
499 The length of each blocks bitmap is:
501 max_cuid - for reaching definitions
502 n_exprs - for available expressions
504 Thus we view the bitmaps as 2 dimensional arrays. i.e.
505 rd_kill[block_num][cuid_num]
506 ae_kill[block_num][expr_num] */
508 /* For reaching defs */
509 static sbitmap *rd_kill, *rd_gen, *reaching_defs, *rd_out;
511 /* for available exprs */
512 static sbitmap *ae_kill, *ae_gen, *ae_in, *ae_out;
514 /* Objects of this type are passed around by the null-pointer check
515 removal routines. */
516 struct null_pointer_info
518 /* The basic block being processed. */
519 int current_block;
520 /* The first register to be handled in this pass. */
521 unsigned int min_reg;
522 /* One greater than the last register to be handled in this pass. */
523 unsigned int max_reg;
524 sbitmap *nonnull_local;
525 sbitmap *nonnull_killed;
528 static void compute_can_copy PARAMS ((void));
529 static char *gmalloc PARAMS ((unsigned int));
530 static char *grealloc PARAMS ((char *, unsigned int));
531 static char *gcse_alloc PARAMS ((unsigned long));
532 static void alloc_gcse_mem PARAMS ((rtx));
533 static void free_gcse_mem PARAMS ((void));
534 static void alloc_reg_set_mem PARAMS ((int));
535 static void free_reg_set_mem PARAMS ((void));
536 static int get_bitmap_width PARAMS ((int, int, int));
537 static void record_one_set PARAMS ((int, rtx));
538 static void record_set_info PARAMS ((rtx, rtx, void *));
539 static void compute_sets PARAMS ((rtx));
540 static void hash_scan_insn PARAMS ((rtx, int, int));
541 static void hash_scan_set PARAMS ((rtx, rtx, int));
542 static void hash_scan_clobber PARAMS ((rtx, rtx));
543 static void hash_scan_call PARAMS ((rtx, rtx));
544 static int want_to_gcse_p PARAMS ((rtx));
545 static int oprs_unchanged_p PARAMS ((rtx, rtx, int));
546 static int oprs_anticipatable_p PARAMS ((rtx, rtx));
547 static int oprs_available_p PARAMS ((rtx, rtx));
548 static void insert_expr_in_table PARAMS ((rtx, enum machine_mode, rtx,
549 int, int));
550 static void insert_set_in_table PARAMS ((rtx, rtx));
551 static unsigned int hash_expr PARAMS ((rtx, enum machine_mode, int *, int));
552 static unsigned int hash_expr_1 PARAMS ((rtx, enum machine_mode, int *));
553 static unsigned int hash_string_1 PARAMS ((const char *));
554 static unsigned int hash_set PARAMS ((int, int));
555 static int expr_equiv_p PARAMS ((rtx, rtx));
556 static void record_last_reg_set_info PARAMS ((rtx, int));
557 static void record_last_mem_set_info PARAMS ((rtx));
558 static void record_last_set_info PARAMS ((rtx, rtx, void *));
559 static void compute_hash_table PARAMS ((int));
560 static void alloc_set_hash_table PARAMS ((int));
561 static void free_set_hash_table PARAMS ((void));
562 static void compute_set_hash_table PARAMS ((void));
563 static void alloc_expr_hash_table PARAMS ((unsigned int));
564 static void free_expr_hash_table PARAMS ((void));
565 static void compute_expr_hash_table PARAMS ((void));
566 static void dump_hash_table PARAMS ((FILE *, const char *, struct expr **,
567 int, int));
568 static struct expr *lookup_expr PARAMS ((rtx));
569 static struct expr *lookup_set PARAMS ((unsigned int, rtx));
570 static struct expr *next_set PARAMS ((unsigned int, struct expr *));
571 static void reset_opr_set_tables PARAMS ((void));
572 static int oprs_not_set_p PARAMS ((rtx, rtx));
573 static void mark_call PARAMS ((rtx));
574 static void mark_set PARAMS ((rtx, rtx));
575 static void mark_clobber PARAMS ((rtx, rtx));
576 static void mark_oprs_set PARAMS ((rtx));
577 static void alloc_cprop_mem PARAMS ((int, int));
578 static void free_cprop_mem PARAMS ((void));
579 static void compute_transp PARAMS ((rtx, int, sbitmap *, int));
580 static void compute_transpout PARAMS ((void));
581 static void compute_local_properties PARAMS ((sbitmap *, sbitmap *, sbitmap *,
582 int));
583 static void compute_cprop_data PARAMS ((void));
584 static void find_used_regs PARAMS ((rtx));
585 static int try_replace_reg PARAMS ((rtx, rtx, rtx));
586 static struct expr *find_avail_set PARAMS ((int, rtx));
587 static int cprop_jump PARAMS ((rtx, rtx, rtx));
588 #ifdef HAVE_cc0
589 static int cprop_cc0_jump PARAMS ((rtx, struct reg_use *, rtx));
590 #endif
591 static int cprop_insn PARAMS ((rtx, int));
592 static int cprop PARAMS ((int));
593 static int one_cprop_pass PARAMS ((int, int));
594 static void alloc_pre_mem PARAMS ((int, int));
595 static void free_pre_mem PARAMS ((void));
596 static void compute_pre_data PARAMS ((void));
597 static int pre_expr_reaches_here_p PARAMS ((int, struct expr *, int));
598 static void insert_insn_end_bb PARAMS ((struct expr *, int, int));
599 static void pre_insert_copy_insn PARAMS ((struct expr *, rtx));
600 static void pre_insert_copies PARAMS ((void));
601 static int pre_delete PARAMS ((void));
602 static int pre_gcse PARAMS ((void));
603 static int one_pre_gcse_pass PARAMS ((int));
604 static void add_label_notes PARAMS ((rtx, rtx));
605 static void alloc_code_hoist_mem PARAMS ((int, int));
606 static void free_code_hoist_mem PARAMS ((void));
607 static void compute_code_hoist_vbeinout PARAMS ((void));
608 static void compute_code_hoist_data PARAMS ((void));
609 static int hoist_expr_reaches_here_p PARAMS ((int, int, int, char *));
610 static void hoist_code PARAMS ((void));
611 static int one_code_hoisting_pass PARAMS ((void));
612 static void alloc_rd_mem PARAMS ((int, int));
613 static void free_rd_mem PARAMS ((void));
614 static void handle_rd_kill_set PARAMS ((rtx, int, int));
615 static void compute_kill_rd PARAMS ((void));
616 static void compute_rd PARAMS ((void));
617 static void alloc_avail_expr_mem PARAMS ((int, int));
618 static void free_avail_expr_mem PARAMS ((void));
619 static void compute_ae_gen PARAMS ((void));
620 static int expr_killed_p PARAMS ((rtx, int));
621 static void compute_ae_kill PARAMS ((sbitmap *, sbitmap *));
622 static int expr_reaches_here_p PARAMS ((struct occr *, struct expr *,
623 int, int));
624 static rtx computing_insn PARAMS ((struct expr *, rtx));
625 static int def_reaches_here_p PARAMS ((rtx, rtx));
626 static int can_disregard_other_sets PARAMS ((struct reg_set **, rtx, int));
627 static int handle_avail_expr PARAMS ((rtx, struct expr *));
628 static int classic_gcse PARAMS ((void));
629 static int one_classic_gcse_pass PARAMS ((int));
630 static void invalidate_nonnull_info PARAMS ((rtx, rtx, void *));
631 static void delete_null_pointer_checks_1 PARAMS ((unsigned int *, sbitmap *,
632 sbitmap *,
633 struct null_pointer_info *));
634 static rtx process_insert_insn PARAMS ((struct expr *));
635 static int pre_edge_insert PARAMS ((struct edge_list *, struct expr **));
636 static int expr_reaches_here_p_work PARAMS ((struct occr *, struct expr *,
637 int, int, char *));
638 static int pre_expr_reaches_here_p_work PARAMS ((int, struct expr *,
639 int, char *));
641 /* Entry point for global common subexpression elimination.
642 F is the first instruction in the function. */
645 gcse_main (f, file)
646 rtx f;
647 FILE *file;
649 int changed, pass;
650 /* Bytes used at start of pass. */
651 int initial_bytes_used;
652 /* Maximum number of bytes used by a pass. */
653 int max_pass_bytes;
654 /* Point to release obstack data from for each pass. */
655 char *gcse_obstack_bottom;
657 /* We do not construct an accurate cfg in functions which call
658 setjmp, so just punt to be safe. */
659 if (current_function_calls_setjmp)
660 return 0;
662 /* Assume that we do not need to run jump optimizations after gcse. */
663 run_jump_opt_after_gcse = 0;
665 /* For calling dump_foo fns from gdb. */
666 debug_stderr = stderr;
667 gcse_file = file;
669 /* Identify the basic block information for this function, including
670 successors and predecessors. */
671 max_gcse_regno = max_reg_num ();
673 if (file)
674 dump_flow_info (file);
676 /* Return if there's nothing to do. */
677 if (n_basic_blocks <= 1)
678 return 0;
680 /* Trying to perform global optimizations on flow graphs which have
681 a high connectivity will take a long time and is unlikely to be
682 particularly useful.
684 In normal circumstances a cfg should have about twice has many edges
685 as blocks. But we do not want to punish small functions which have
686 a couple switch statements. So we require a relatively large number
687 of basic blocks and the ratio of edges to blocks to be high. */
688 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
690 if (warn_disabled_optimization)
691 warning ("GCSE disabled: %d > 1000 basic blocks and %d >= 20 edges/basic block",
692 n_basic_blocks, n_edges / n_basic_blocks);
693 return 0;
696 /* See what modes support reg/reg copy operations. */
697 if (! can_copy_init_p)
699 compute_can_copy ();
700 can_copy_init_p = 1;
703 gcc_obstack_init (&gcse_obstack);
704 bytes_used = 0;
706 /* Record where pseudo-registers are set. This data is kept accurate
707 during each pass. ??? We could also record hard-reg information here
708 [since it's unchanging], however it is currently done during hash table
709 computation.
711 It may be tempting to compute MEM set information here too, but MEM sets
712 will be subject to code motion one day and thus we need to compute
713 information about memory sets when we build the hash tables. */
715 alloc_reg_set_mem (max_gcse_regno);
716 compute_sets (f);
718 pass = 0;
719 initial_bytes_used = bytes_used;
720 max_pass_bytes = 0;
721 gcse_obstack_bottom = gcse_alloc (1);
722 changed = 1;
723 while (changed && pass < MAX_PASSES)
725 changed = 0;
726 if (file)
727 fprintf (file, "GCSE pass %d\n\n", pass + 1);
729 /* Initialize bytes_used to the space for the pred/succ lists,
730 and the reg_set_table data. */
731 bytes_used = initial_bytes_used;
733 /* Each pass may create new registers, so recalculate each time. */
734 max_gcse_regno = max_reg_num ();
736 alloc_gcse_mem (f);
738 /* Don't allow constant propagation to modify jumps
739 during this pass. */
740 changed = one_cprop_pass (pass + 1, 0);
742 if (optimize_size)
743 changed |= one_classic_gcse_pass (pass + 1);
744 else
746 changed |= one_pre_gcse_pass (pass + 1);
747 free_reg_set_mem ();
748 alloc_reg_set_mem (max_reg_num ());
749 compute_sets (f);
750 run_jump_opt_after_gcse = 1;
753 if (max_pass_bytes < bytes_used)
754 max_pass_bytes = bytes_used;
756 /* Free up memory, then reallocate for code hoisting. We can
757 not re-use the existing allocated memory because the tables
758 will not have info for the insns or registers created by
759 partial redundancy elimination. */
760 free_gcse_mem ();
762 /* It does not make sense to run code hoisting unless we optimizing
763 for code size -- it rarely makes programs faster, and can make
764 them bigger if we did partial redundancy elimination (when optimizing
765 for space, we use a classic gcse algorithm instead of partial
766 redundancy algorithms). */
767 if (optimize_size)
769 max_gcse_regno = max_reg_num ();
770 alloc_gcse_mem (f);
771 changed |= one_code_hoisting_pass ();
772 free_gcse_mem ();
774 if (max_pass_bytes < bytes_used)
775 max_pass_bytes = bytes_used;
778 if (file)
780 fprintf (file, "\n");
781 fflush (file);
784 obstack_free (&gcse_obstack, gcse_obstack_bottom);
785 pass++;
788 /* Do one last pass of copy propagation, including cprop into
789 conditional jumps. */
791 max_gcse_regno = max_reg_num ();
792 alloc_gcse_mem (f);
793 /* This time, go ahead and allow cprop to alter jumps. */
794 one_cprop_pass (pass + 1, 1);
795 free_gcse_mem ();
797 if (file)
799 fprintf (file, "GCSE of %s: %d basic blocks, ",
800 current_function_name, n_basic_blocks);
801 fprintf (file, "%d pass%s, %d bytes\n\n",
802 pass, pass > 1 ? "es" : "", max_pass_bytes);
805 obstack_free (&gcse_obstack, NULL_PTR);
806 free_reg_set_mem ();
807 return run_jump_opt_after_gcse;
810 /* Misc. utilities. */
812 /* Compute which modes support reg/reg copy operations. */
814 static void
815 compute_can_copy ()
817 int i;
818 #ifndef AVOID_CCMODE_COPIES
819 rtx reg,insn;
820 #endif
821 memset (can_copy_p, 0, NUM_MACHINE_MODES);
823 start_sequence ();
824 for (i = 0; i < NUM_MACHINE_MODES; i++)
825 if (GET_MODE_CLASS (i) == MODE_CC)
827 #ifdef AVOID_CCMODE_COPIES
828 can_copy_p[i] = 0;
829 #else
830 reg = gen_rtx_REG ((enum machine_mode) i, LAST_VIRTUAL_REGISTER + 1);
831 insn = emit_insn (gen_rtx_SET (VOIDmode, reg, reg));
832 if (recog (PATTERN (insn), insn, NULL_PTR) >= 0)
833 can_copy_p[i] = 1;
834 #endif
836 else
837 can_copy_p[i] = 1;
839 end_sequence ();
842 /* Cover function to xmalloc to record bytes allocated. */
844 static char *
845 gmalloc (size)
846 unsigned int size;
848 bytes_used += size;
849 return xmalloc (size);
852 /* Cover function to xrealloc.
853 We don't record the additional size since we don't know it.
854 It won't affect memory usage stats much anyway. */
856 static char *
857 grealloc (ptr, size)
858 char *ptr;
859 unsigned int size;
861 return xrealloc (ptr, size);
864 /* Cover function to obstack_alloc.
865 We don't need to record the bytes allocated here since
866 obstack_chunk_alloc is set to gmalloc. */
868 static char *
869 gcse_alloc (size)
870 unsigned long size;
872 return (char *) obstack_alloc (&gcse_obstack, size);
875 /* Allocate memory for the cuid mapping array,
876 and reg/memory set tracking tables.
878 This is called at the start of each pass. */
880 static void
881 alloc_gcse_mem (f)
882 rtx f;
884 int i,n;
885 rtx insn;
887 /* Find the largest UID and create a mapping from UIDs to CUIDs.
888 CUIDs are like UIDs except they increase monotonically, have no gaps,
889 and only apply to real insns. */
891 max_uid = get_max_uid ();
892 n = (max_uid + 1) * sizeof (int);
893 uid_cuid = (int *) gmalloc (n);
894 memset ((char *) uid_cuid, 0, n);
895 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
897 if (INSN_P (insn))
898 uid_cuid[INSN_UID (insn)] = i++;
899 else
900 uid_cuid[INSN_UID (insn)] = i;
903 /* Create a table mapping cuids to insns. */
905 max_cuid = i;
906 n = (max_cuid + 1) * sizeof (rtx);
907 cuid_insn = (rtx *) gmalloc (n);
908 memset ((char *) cuid_insn, 0, n);
909 for (insn = f, i = 0; insn; insn = NEXT_INSN (insn))
910 if (INSN_P (insn))
911 CUID_INSN (i++) = insn;
913 /* Allocate vars to track sets of regs. */
914 reg_set_bitmap = (sbitmap) sbitmap_alloc (max_gcse_regno);
916 /* Allocate vars to track sets of regs, memory per block. */
917 reg_set_in_block = (sbitmap *) sbitmap_vector_alloc (n_basic_blocks,
918 max_gcse_regno);
919 mem_set_in_block = (char *) gmalloc (n_basic_blocks);
922 /* Free memory allocated by alloc_gcse_mem. */
924 static void
925 free_gcse_mem ()
927 free (uid_cuid);
928 free (cuid_insn);
930 free (reg_set_bitmap);
932 free (reg_set_in_block);
933 free (mem_set_in_block);
936 /* Many of the global optimization algorithms work by solving dataflow
937 equations for various expressions. Initially, some local value is
938 computed for each expression in each block. Then, the values across the
939 various blocks are combined (by following flow graph edges) to arrive at
940 global values. Conceptually, each set of equations is independent. We
941 may therefore solve all the equations in parallel, solve them one at a
942 time, or pick any intermediate approach.
944 When you're going to need N two-dimensional bitmaps, each X (say, the
945 number of blocks) by Y (say, the number of expressions), call this
946 function. It's not important what X and Y represent; only that Y
947 correspond to the things that can be done in parallel. This function will
948 return an appropriate chunking factor C; you should solve C sets of
949 equations in parallel. By going through this function, we can easily
950 trade space against time; by solving fewer equations in parallel we use
951 less space. */
953 static int
954 get_bitmap_width (n, x, y)
955 int n;
956 int x;
957 int y;
959 /* It's not really worth figuring out *exactly* how much memory will
960 be used by a particular choice. The important thing is to get
961 something approximately right. */
962 size_t max_bitmap_memory = 10 * 1024 * 1024;
964 /* The number of bytes we'd use for a single column of minimum
965 width. */
966 size_t column_size = n * x * sizeof (SBITMAP_ELT_TYPE);
968 /* Often, it's reasonable just to solve all the equations in
969 parallel. */
970 if (column_size * SBITMAP_SET_SIZE (y) <= max_bitmap_memory)
971 return y;
973 /* Otherwise, pick the largest width we can, without going over the
974 limit. */
975 return SBITMAP_ELT_BITS * ((max_bitmap_memory + column_size - 1)
976 / column_size);
979 /* Compute the local properties of each recorded expression.
981 Local properties are those that are defined by the block, irrespective of
982 other blocks.
984 An expression is transparent in a block if its operands are not modified
985 in the block.
987 An expression is computed (locally available) in a block if it is computed
988 at least once and expression would contain the same value if the
989 computation was moved to the end of the block.
991 An expression is locally anticipatable in a block if it is computed at
992 least once and expression would contain the same value if the computation
993 was moved to the beginning of the block.
995 We call this routine for cprop, pre and code hoisting. They all compute
996 basically the same information and thus can easily share this code.
998 TRANSP, COMP, and ANTLOC are destination sbitmaps for recording local
999 properties. If NULL, then it is not necessary to compute or record that
1000 particular property.
1002 SETP controls which hash table to look at. If zero, this routine looks at
1003 the expr hash table; if nonzero this routine looks at the set hash table.
1004 Additionally, TRANSP is computed as ~TRANSP, since this is really cprop's
1005 ABSALTERED. */
1007 static void
1008 compute_local_properties (transp, comp, antloc, setp)
1009 sbitmap *transp;
1010 sbitmap *comp;
1011 sbitmap *antloc;
1012 int setp;
1014 unsigned int i, hash_table_size;
1015 struct expr **hash_table;
1017 /* Initialize any bitmaps that were passed in. */
1018 if (transp)
1020 if (setp)
1021 sbitmap_vector_zero (transp, n_basic_blocks);
1022 else
1023 sbitmap_vector_ones (transp, n_basic_blocks);
1026 if (comp)
1027 sbitmap_vector_zero (comp, n_basic_blocks);
1028 if (antloc)
1029 sbitmap_vector_zero (antloc, n_basic_blocks);
1031 /* We use the same code for cprop, pre and hoisting. For cprop
1032 we care about the set hash table, for pre and hoisting we
1033 care about the expr hash table. */
1034 hash_table_size = setp ? set_hash_table_size : expr_hash_table_size;
1035 hash_table = setp ? set_hash_table : expr_hash_table;
1037 for (i = 0; i < hash_table_size; i++)
1039 struct expr *expr;
1041 for (expr = hash_table[i]; expr != NULL; expr = expr->next_same_hash)
1043 int indx = expr->bitmap_index;
1044 struct occr *occr;
1046 /* The expression is transparent in this block if it is not killed.
1047 We start by assuming all are transparent [none are killed], and
1048 then reset the bits for those that are. */
1049 if (transp)
1050 compute_transp (expr->expr, indx, transp, setp);
1052 /* The occurrences recorded in antic_occr are exactly those that
1053 we want to set to non-zero in ANTLOC. */
1054 if (antloc)
1055 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
1057 SET_BIT (antloc[BLOCK_NUM (occr->insn)], indx);
1059 /* While we're scanning the table, this is a good place to
1060 initialize this. */
1061 occr->deleted_p = 0;
1064 /* The occurrences recorded in avail_occr are exactly those that
1065 we want to set to non-zero in COMP. */
1066 if (comp)
1067 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
1069 SET_BIT (comp[BLOCK_NUM (occr->insn)], indx);
1071 /* While we're scanning the table, this is a good place to
1072 initialize this. */
1073 occr->copied_p = 0;
1076 /* While we're scanning the table, this is a good place to
1077 initialize this. */
1078 expr->reaching_reg = 0;
1083 /* Register set information.
1085 `reg_set_table' records where each register is set or otherwise
1086 modified. */
1088 static struct obstack reg_set_obstack;
1090 static void
1091 alloc_reg_set_mem (n_regs)
1092 int n_regs;
1094 unsigned int n;
1096 reg_set_table_size = n_regs + REG_SET_TABLE_SLOP;
1097 n = reg_set_table_size * sizeof (struct reg_set *);
1098 reg_set_table = (struct reg_set **) gmalloc (n);
1099 memset ((char *) reg_set_table, 0, n);
1101 gcc_obstack_init (&reg_set_obstack);
1104 static void
1105 free_reg_set_mem ()
1107 free (reg_set_table);
1108 obstack_free (&reg_set_obstack, NULL_PTR);
1111 /* Record REGNO in the reg_set table. */
1113 static void
1114 record_one_set (regno, insn)
1115 int regno;
1116 rtx insn;
1118 /* Allocate a new reg_set element and link it onto the list. */
1119 struct reg_set *new_reg_info;
1121 /* If the table isn't big enough, enlarge it. */
1122 if (regno >= reg_set_table_size)
1124 int new_size = regno + REG_SET_TABLE_SLOP;
1126 reg_set_table
1127 = (struct reg_set **) grealloc ((char *) reg_set_table,
1128 new_size * sizeof (struct reg_set *));
1129 memset ((char *) (reg_set_table + reg_set_table_size), 0,
1130 (new_size - reg_set_table_size) * sizeof (struct reg_set *));
1131 reg_set_table_size = new_size;
1134 new_reg_info = (struct reg_set *) obstack_alloc (&reg_set_obstack,
1135 sizeof (struct reg_set));
1136 bytes_used += sizeof (struct reg_set);
1137 new_reg_info->insn = insn;
1138 new_reg_info->next = reg_set_table[regno];
1139 reg_set_table[regno] = new_reg_info;
1142 /* Called from compute_sets via note_stores to handle one SET or CLOBBER in
1143 an insn. The DATA is really the instruction in which the SET is
1144 occurring. */
1146 static void
1147 record_set_info (dest, setter, data)
1148 rtx dest, setter ATTRIBUTE_UNUSED;
1149 void *data;
1151 rtx record_set_insn = (rtx) data;
1153 if (GET_CODE (dest) == REG && REGNO (dest) >= FIRST_PSEUDO_REGISTER)
1154 record_one_set (REGNO (dest), record_set_insn);
1157 /* Scan the function and record each set of each pseudo-register.
1159 This is called once, at the start of the gcse pass. See the comments for
1160 `reg_set_table' for further documenation. */
1162 static void
1163 compute_sets (f)
1164 rtx f;
1166 rtx insn;
1168 for (insn = f; insn != 0; insn = NEXT_INSN (insn))
1169 if (INSN_P (insn))
1170 note_stores (PATTERN (insn), record_set_info, insn);
1173 /* Hash table support. */
1175 /* For each register, the cuid of the first/last insn in the block to set it,
1176 or -1 if not set. */
1177 #define NEVER_SET -1
1178 static int *reg_first_set;
1179 static int *reg_last_set;
1181 /* While computing "first/last set" info, this is the CUID of first/last insn
1182 to set memory or -1 if not set. `mem_last_set' is also used when
1183 performing GCSE to record whether memory has been set since the beginning
1184 of the block.
1186 Note that handling of memory is very simple, we don't make any attempt
1187 to optimize things (later). */
1188 static int mem_first_set;
1189 static int mem_last_set;
1191 /* See whether X, the source of a set, is something we want to consider for
1192 GCSE. */
1194 static int
1195 want_to_gcse_p (x)
1196 rtx x;
1198 static rtx test_insn = 0;
1199 int num_clobbers = 0;
1200 int icode;
1202 switch (GET_CODE (x))
1204 case REG:
1205 case SUBREG:
1206 case CONST_INT:
1207 case CONST_DOUBLE:
1208 case CALL:
1209 return 0;
1211 default:
1212 break;
1215 /* If this is a valid operand, we are OK. If it's VOIDmode, we aren't. */
1216 if (general_operand (x, GET_MODE (x)))
1217 return 1;
1218 else if (GET_MODE (x) == VOIDmode)
1219 return 0;
1221 /* Otherwise, check if we can make a valid insn from it. First initialize
1222 our test insn if we haven't already. */
1223 if (test_insn == 0)
1225 test_insn
1226 = make_insn_raw (gen_rtx_SET (VOIDmode,
1227 gen_rtx_REG (word_mode,
1228 FIRST_PSEUDO_REGISTER * 2),
1229 const0_rtx));
1230 NEXT_INSN (test_insn) = PREV_INSN (test_insn) = 0;
1231 ggc_add_rtx_root (&test_insn, 1);
1234 /* Now make an insn like the one we would make when GCSE'ing and see if
1235 valid. */
1236 PUT_MODE (SET_DEST (PATTERN (test_insn)), GET_MODE (x));
1237 SET_SRC (PATTERN (test_insn)) = x;
1238 return ((icode = recog (PATTERN (test_insn), test_insn, &num_clobbers)) >= 0
1239 && (num_clobbers == 0 || ! added_clobbers_hard_reg_p (icode)));
1242 /* Return non-zero if the operands of expression X are unchanged from the
1243 start of INSN's basic block up to but not including INSN (if AVAIL_P == 0),
1244 or from INSN to the end of INSN's basic block (if AVAIL_P != 0). */
1246 static int
1247 oprs_unchanged_p (x, insn, avail_p)
1248 rtx x, insn;
1249 int avail_p;
1251 int i, j;
1252 enum rtx_code code;
1253 const char *fmt;
1255 if (x == 0)
1256 return 1;
1258 code = GET_CODE (x);
1259 switch (code)
1261 case REG:
1262 if (avail_p)
1263 return (reg_last_set[REGNO (x)] == NEVER_SET
1264 || reg_last_set[REGNO (x)] < INSN_CUID (insn));
1265 else
1266 return (reg_first_set[REGNO (x)] == NEVER_SET
1267 || reg_first_set[REGNO (x)] >= INSN_CUID (insn));
1269 case MEM:
1270 if (avail_p && mem_last_set != NEVER_SET
1271 && mem_last_set >= INSN_CUID (insn))
1272 return 0;
1273 else if (! avail_p && mem_first_set != NEVER_SET
1274 && mem_first_set < INSN_CUID (insn))
1275 return 0;
1276 else
1277 return oprs_unchanged_p (XEXP (x, 0), insn, avail_p);
1279 case PRE_DEC:
1280 case PRE_INC:
1281 case POST_DEC:
1282 case POST_INC:
1283 case PRE_MODIFY:
1284 case POST_MODIFY:
1285 return 0;
1287 case PC:
1288 case CC0: /*FIXME*/
1289 case CONST:
1290 case CONST_INT:
1291 case CONST_DOUBLE:
1292 case SYMBOL_REF:
1293 case LABEL_REF:
1294 case ADDR_VEC:
1295 case ADDR_DIFF_VEC:
1296 return 1;
1298 default:
1299 break;
1302 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1304 if (fmt[i] == 'e')
1306 /* If we are about to do the last recursive call needed at this
1307 level, change it into iteration. This function is called enough
1308 to be worth it. */
1309 if (i == 0)
1310 return oprs_unchanged_p (XEXP (x, i), insn, avail_p);
1312 else if (! oprs_unchanged_p (XEXP (x, i), insn, avail_p))
1313 return 0;
1315 else if (fmt[i] == 'E')
1316 for (j = 0; j < XVECLEN (x, i); j++)
1317 if (! oprs_unchanged_p (XVECEXP (x, i, j), insn, avail_p))
1318 return 0;
1321 return 1;
1324 /* Return non-zero if the operands of expression X are unchanged from
1325 the start of INSN's basic block up to but not including INSN. */
1327 static int
1328 oprs_anticipatable_p (x, insn)
1329 rtx x, insn;
1331 return oprs_unchanged_p (x, insn, 0);
1334 /* Return non-zero if the operands of expression X are unchanged from
1335 INSN to the end of INSN's basic block. */
1337 static int
1338 oprs_available_p (x, insn)
1339 rtx x, insn;
1341 return oprs_unchanged_p (x, insn, 1);
1344 /* Hash expression X.
1346 MODE is only used if X is a CONST_INT. DO_NOT_RECORD_P is a boolean
1347 indicating if a volatile operand is found or if the expression contains
1348 something we don't want to insert in the table.
1350 ??? One might want to merge this with canon_hash. Later. */
1352 static unsigned int
1353 hash_expr (x, mode, do_not_record_p, hash_table_size)
1354 rtx x;
1355 enum machine_mode mode;
1356 int *do_not_record_p;
1357 int hash_table_size;
1359 unsigned int hash;
1361 *do_not_record_p = 0;
1363 hash = hash_expr_1 (x, mode, do_not_record_p);
1364 return hash % hash_table_size;
1367 /* Hash a string. Just add its bytes up. */
1369 static inline unsigned
1370 hash_string_1 (ps)
1371 const char *ps;
1373 unsigned hash = 0;
1374 const unsigned char *p = (const unsigned char *)ps;
1376 if (p)
1377 while (*p)
1378 hash += *p++;
1380 return hash;
1383 /* Subroutine of hash_expr to do the actual work. */
1385 static unsigned int
1386 hash_expr_1 (x, mode, do_not_record_p)
1387 rtx x;
1388 enum machine_mode mode;
1389 int *do_not_record_p;
1391 int i, j;
1392 unsigned hash = 0;
1393 enum rtx_code code;
1394 const char *fmt;
1396 /* Used to turn recursion into iteration. We can't rely on GCC's
1397 tail-recursion eliminatio since we need to keep accumulating values
1398 in HASH. */
1400 if (x == 0)
1401 return hash;
1403 repeat:
1404 code = GET_CODE (x);
1405 switch (code)
1407 case REG:
1408 hash += ((unsigned int) REG << 7) + REGNO (x);
1409 return hash;
1411 case CONST_INT:
1412 hash += (((unsigned int) CONST_INT << 7) + (unsigned int) mode
1413 + (unsigned int) INTVAL (x));
1414 return hash;
1416 case CONST_DOUBLE:
1417 /* This is like the general case, except that it only counts
1418 the integers representing the constant. */
1419 hash += (unsigned int) code + (unsigned int) GET_MODE (x);
1420 if (GET_MODE (x) != VOIDmode)
1421 for (i = 2; i < GET_RTX_LENGTH (CONST_DOUBLE); i++)
1422 hash += (unsigned int) XWINT (x, i);
1423 else
1424 hash += ((unsigned int) CONST_DOUBLE_LOW (x)
1425 + (unsigned int) CONST_DOUBLE_HIGH (x));
1426 return hash;
1428 /* Assume there is only one rtx object for any given label. */
1429 case LABEL_REF:
1430 /* We don't hash on the address of the CODE_LABEL to avoid bootstrap
1431 differences and differences between each stage's debugging dumps. */
1432 hash += (((unsigned int) LABEL_REF << 7)
1433 + CODE_LABEL_NUMBER (XEXP (x, 0)));
1434 return hash;
1436 case SYMBOL_REF:
1438 /* Don't hash on the symbol's address to avoid bootstrap differences.
1439 Different hash values may cause expressions to be recorded in
1440 different orders and thus different registers to be used in the
1441 final assembler. This also avoids differences in the dump files
1442 between various stages. */
1443 unsigned int h = 0;
1444 const unsigned char *p = (const unsigned char *) XSTR (x, 0);
1446 while (*p)
1447 h += (h << 7) + *p++; /* ??? revisit */
1449 hash += ((unsigned int) SYMBOL_REF << 7) + h;
1450 return hash;
1453 case MEM:
1454 if (MEM_VOLATILE_P (x))
1456 *do_not_record_p = 1;
1457 return 0;
1460 hash += (unsigned int) MEM;
1461 hash += MEM_ALIAS_SET (x);
1462 x = XEXP (x, 0);
1463 goto repeat;
1465 case PRE_DEC:
1466 case PRE_INC:
1467 case POST_DEC:
1468 case POST_INC:
1469 case PC:
1470 case CC0:
1471 case CALL:
1472 case UNSPEC_VOLATILE:
1473 *do_not_record_p = 1;
1474 return 0;
1476 case ASM_OPERANDS:
1477 if (MEM_VOLATILE_P (x))
1479 *do_not_record_p = 1;
1480 return 0;
1482 else
1484 /* We don't want to take the filename and line into account. */
1485 hash += (unsigned) code + (unsigned) GET_MODE (x)
1486 + hash_string_1 (ASM_OPERANDS_TEMPLATE (x))
1487 + hash_string_1 (ASM_OPERANDS_OUTPUT_CONSTRAINT (x))
1488 + (unsigned) ASM_OPERANDS_OUTPUT_IDX (x);
1490 if (ASM_OPERANDS_INPUT_LENGTH (x))
1492 for (i = 1; i < ASM_OPERANDS_INPUT_LENGTH (x); i++)
1494 hash += (hash_expr_1 (ASM_OPERANDS_INPUT (x, i),
1495 GET_MODE (ASM_OPERANDS_INPUT (x, i)),
1496 do_not_record_p)
1497 + hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT
1498 (x, i)));
1501 hash += hash_string_1 (ASM_OPERANDS_INPUT_CONSTRAINT (x, 0));
1502 x = ASM_OPERANDS_INPUT (x, 0);
1503 mode = GET_MODE (x);
1504 goto repeat;
1506 return hash;
1509 default:
1510 break;
1513 hash += (unsigned) code + (unsigned) GET_MODE (x);
1514 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
1516 if (fmt[i] == 'e')
1518 /* If we are about to do the last recursive call
1519 needed at this level, change it into iteration.
1520 This function is called enough to be worth it. */
1521 if (i == 0)
1523 x = XEXP (x, i);
1524 goto repeat;
1527 hash += hash_expr_1 (XEXP (x, i), 0, do_not_record_p);
1528 if (*do_not_record_p)
1529 return 0;
1532 else if (fmt[i] == 'E')
1533 for (j = 0; j < XVECLEN (x, i); j++)
1535 hash += hash_expr_1 (XVECEXP (x, i, j), 0, do_not_record_p);
1536 if (*do_not_record_p)
1537 return 0;
1540 else if (fmt[i] == 's')
1541 hash += hash_string_1 (XSTR (x, i));
1542 else if (fmt[i] == 'i')
1543 hash += (unsigned int) XINT (x, i);
1544 else
1545 abort ();
1548 return hash;
1551 /* Hash a set of register REGNO.
1553 Sets are hashed on the register that is set. This simplifies the PRE copy
1554 propagation code.
1556 ??? May need to make things more elaborate. Later, as necessary. */
1558 static unsigned int
1559 hash_set (regno, hash_table_size)
1560 int regno;
1561 int hash_table_size;
1563 unsigned int hash;
1565 hash = regno;
1566 return hash % hash_table_size;
1569 /* Return non-zero if exp1 is equivalent to exp2.
1570 ??? Borrowed from cse.c. Might want to remerge with cse.c. Later. */
1572 static int
1573 expr_equiv_p (x, y)
1574 rtx x, y;
1576 register int i, j;
1577 register enum rtx_code code;
1578 register const char *fmt;
1580 if (x == y)
1581 return 1;
1583 if (x == 0 || y == 0)
1584 return x == y;
1586 code = GET_CODE (x);
1587 if (code != GET_CODE (y))
1588 return 0;
1590 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1591 if (GET_MODE (x) != GET_MODE (y))
1592 return 0;
1594 switch (code)
1596 case PC:
1597 case CC0:
1598 return x == y;
1600 case CONST_INT:
1601 return INTVAL (x) == INTVAL (y);
1603 case LABEL_REF:
1604 return XEXP (x, 0) == XEXP (y, 0);
1606 case SYMBOL_REF:
1607 return XSTR (x, 0) == XSTR (y, 0);
1609 case REG:
1610 return REGNO (x) == REGNO (y);
1612 case MEM:
1613 /* Can't merge two expressions in different alias sets, since we can
1614 decide that the expression is transparent in a block when it isn't,
1615 due to it being set with the different alias set. */
1616 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
1617 return 0;
1618 break;
1620 /* For commutative operations, check both orders. */
1621 case PLUS:
1622 case MULT:
1623 case AND:
1624 case IOR:
1625 case XOR:
1626 case NE:
1627 case EQ:
1628 return ((expr_equiv_p (XEXP (x, 0), XEXP (y, 0))
1629 && expr_equiv_p (XEXP (x, 1), XEXP (y, 1)))
1630 || (expr_equiv_p (XEXP (x, 0), XEXP (y, 1))
1631 && expr_equiv_p (XEXP (x, 1), XEXP (y, 0))));
1633 case ASM_OPERANDS:
1634 /* We don't use the generic code below because we want to
1635 disregard filename and line numbers. */
1637 /* A volatile asm isn't equivalent to any other. */
1638 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
1639 return 0;
1641 if (GET_MODE (x) != GET_MODE (y)
1642 || strcmp (ASM_OPERANDS_TEMPLATE (x), ASM_OPERANDS_TEMPLATE (y))
1643 || strcmp (ASM_OPERANDS_OUTPUT_CONSTRAINT (x),
1644 ASM_OPERANDS_OUTPUT_CONSTRAINT (y))
1645 || ASM_OPERANDS_OUTPUT_IDX (x) != ASM_OPERANDS_OUTPUT_IDX (y)
1646 || ASM_OPERANDS_INPUT_LENGTH (x) != ASM_OPERANDS_INPUT_LENGTH (y))
1647 return 0;
1649 if (ASM_OPERANDS_INPUT_LENGTH (x))
1651 for (i = ASM_OPERANDS_INPUT_LENGTH (x) - 1; i >= 0; i--)
1652 if (! expr_equiv_p (ASM_OPERANDS_INPUT (x, i),
1653 ASM_OPERANDS_INPUT (y, i))
1654 || strcmp (ASM_OPERANDS_INPUT_CONSTRAINT (x, i),
1655 ASM_OPERANDS_INPUT_CONSTRAINT (y, i)))
1656 return 0;
1659 return 1;
1661 default:
1662 break;
1665 /* Compare the elements. If any pair of corresponding elements
1666 fail to match, return 0 for the whole thing. */
1668 fmt = GET_RTX_FORMAT (code);
1669 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1671 switch (fmt[i])
1673 case 'e':
1674 if (! expr_equiv_p (XEXP (x, i), XEXP (y, i)))
1675 return 0;
1676 break;
1678 case 'E':
1679 if (XVECLEN (x, i) != XVECLEN (y, i))
1680 return 0;
1681 for (j = 0; j < XVECLEN (x, i); j++)
1682 if (! expr_equiv_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1683 return 0;
1684 break;
1686 case 's':
1687 if (strcmp (XSTR (x, i), XSTR (y, i)))
1688 return 0;
1689 break;
1691 case 'i':
1692 if (XINT (x, i) != XINT (y, i))
1693 return 0;
1694 break;
1696 case 'w':
1697 if (XWINT (x, i) != XWINT (y, i))
1698 return 0;
1699 break;
1701 case '0':
1702 break;
1704 default:
1705 abort ();
1709 return 1;
1712 /* Insert expression X in INSN in the hash table.
1713 If it is already present, record it as the last occurrence in INSN's
1714 basic block.
1716 MODE is the mode of the value X is being stored into.
1717 It is only used if X is a CONST_INT.
1719 ANTIC_P is non-zero if X is an anticipatable expression.
1720 AVAIL_P is non-zero if X is an available expression. */
1722 static void
1723 insert_expr_in_table (x, mode, insn, antic_p, avail_p)
1724 rtx x;
1725 enum machine_mode mode;
1726 rtx insn;
1727 int antic_p, avail_p;
1729 int found, do_not_record_p;
1730 unsigned int hash;
1731 struct expr *cur_expr, *last_expr = NULL;
1732 struct occr *antic_occr, *avail_occr;
1733 struct occr *last_occr = NULL;
1735 hash = hash_expr (x, mode, &do_not_record_p, expr_hash_table_size);
1737 /* Do not insert expression in table if it contains volatile operands,
1738 or if hash_expr determines the expression is something we don't want
1739 to or can't handle. */
1740 if (do_not_record_p)
1741 return;
1743 cur_expr = expr_hash_table[hash];
1744 found = 0;
1746 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1748 /* If the expression isn't found, save a pointer to the end of
1749 the list. */
1750 last_expr = cur_expr;
1751 cur_expr = cur_expr->next_same_hash;
1754 if (! found)
1756 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1757 bytes_used += sizeof (struct expr);
1758 if (expr_hash_table[hash] == NULL)
1759 /* This is the first pattern that hashed to this index. */
1760 expr_hash_table[hash] = cur_expr;
1761 else
1762 /* Add EXPR to end of this hash chain. */
1763 last_expr->next_same_hash = cur_expr;
1765 /* Set the fields of the expr element. */
1766 cur_expr->expr = x;
1767 cur_expr->bitmap_index = n_exprs++;
1768 cur_expr->next_same_hash = NULL;
1769 cur_expr->antic_occr = NULL;
1770 cur_expr->avail_occr = NULL;
1773 /* Now record the occurrence(s). */
1774 if (antic_p)
1776 antic_occr = cur_expr->antic_occr;
1778 /* Search for another occurrence in the same basic block. */
1779 while (antic_occr && BLOCK_NUM (antic_occr->insn) != BLOCK_NUM (insn))
1781 /* If an occurrence isn't found, save a pointer to the end of
1782 the list. */
1783 last_occr = antic_occr;
1784 antic_occr = antic_occr->next;
1787 if (antic_occr)
1788 /* Found another instance of the expression in the same basic block.
1789 Prefer the currently recorded one. We want the first one in the
1790 block and the block is scanned from start to end. */
1791 ; /* nothing to do */
1792 else
1794 /* First occurrence of this expression in this basic block. */
1795 antic_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1796 bytes_used += sizeof (struct occr);
1797 /* First occurrence of this expression in any block? */
1798 if (cur_expr->antic_occr == NULL)
1799 cur_expr->antic_occr = antic_occr;
1800 else
1801 last_occr->next = antic_occr;
1803 antic_occr->insn = insn;
1804 antic_occr->next = NULL;
1808 if (avail_p)
1810 avail_occr = cur_expr->avail_occr;
1812 /* Search for another occurrence in the same basic block. */
1813 while (avail_occr && BLOCK_NUM (avail_occr->insn) != BLOCK_NUM (insn))
1815 /* If an occurrence isn't found, save a pointer to the end of
1816 the list. */
1817 last_occr = avail_occr;
1818 avail_occr = avail_occr->next;
1821 if (avail_occr)
1822 /* Found another instance of the expression in the same basic block.
1823 Prefer this occurrence to the currently recorded one. We want
1824 the last one in the block and the block is scanned from start
1825 to end. */
1826 avail_occr->insn = insn;
1827 else
1829 /* First occurrence of this expression in this basic block. */
1830 avail_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1831 bytes_used += sizeof (struct occr);
1833 /* First occurrence of this expression in any block? */
1834 if (cur_expr->avail_occr == NULL)
1835 cur_expr->avail_occr = avail_occr;
1836 else
1837 last_occr->next = avail_occr;
1839 avail_occr->insn = insn;
1840 avail_occr->next = NULL;
1845 /* Insert pattern X in INSN in the hash table.
1846 X is a SET of a reg to either another reg or a constant.
1847 If it is already present, record it as the last occurrence in INSN's
1848 basic block. */
1850 static void
1851 insert_set_in_table (x, insn)
1852 rtx x;
1853 rtx insn;
1855 int found;
1856 unsigned int hash;
1857 struct expr *cur_expr, *last_expr = NULL;
1858 struct occr *cur_occr, *last_occr = NULL;
1860 if (GET_CODE (x) != SET
1861 || GET_CODE (SET_DEST (x)) != REG)
1862 abort ();
1864 hash = hash_set (REGNO (SET_DEST (x)), set_hash_table_size);
1866 cur_expr = set_hash_table[hash];
1867 found = 0;
1869 while (cur_expr && 0 == (found = expr_equiv_p (cur_expr->expr, x)))
1871 /* If the expression isn't found, save a pointer to the end of
1872 the list. */
1873 last_expr = cur_expr;
1874 cur_expr = cur_expr->next_same_hash;
1877 if (! found)
1879 cur_expr = (struct expr *) gcse_alloc (sizeof (struct expr));
1880 bytes_used += sizeof (struct expr);
1881 if (set_hash_table[hash] == NULL)
1882 /* This is the first pattern that hashed to this index. */
1883 set_hash_table[hash] = cur_expr;
1884 else
1885 /* Add EXPR to end of this hash chain. */
1886 last_expr->next_same_hash = cur_expr;
1888 /* Set the fields of the expr element.
1889 We must copy X because it can be modified when copy propagation is
1890 performed on its operands. */
1891 cur_expr->expr = copy_rtx (x);
1892 cur_expr->bitmap_index = n_sets++;
1893 cur_expr->next_same_hash = NULL;
1894 cur_expr->antic_occr = NULL;
1895 cur_expr->avail_occr = NULL;
1898 /* Now record the occurrence. */
1899 cur_occr = cur_expr->avail_occr;
1901 /* Search for another occurrence in the same basic block. */
1902 while (cur_occr && BLOCK_NUM (cur_occr->insn) != BLOCK_NUM (insn))
1904 /* If an occurrence isn't found, save a pointer to the end of
1905 the list. */
1906 last_occr = cur_occr;
1907 cur_occr = cur_occr->next;
1910 if (cur_occr)
1911 /* Found another instance of the expression in the same basic block.
1912 Prefer this occurrence to the currently recorded one. We want the
1913 last one in the block and the block is scanned from start to end. */
1914 cur_occr->insn = insn;
1915 else
1917 /* First occurrence of this expression in this basic block. */
1918 cur_occr = (struct occr *) gcse_alloc (sizeof (struct occr));
1919 bytes_used += sizeof (struct occr);
1921 /* First occurrence of this expression in any block? */
1922 if (cur_expr->avail_occr == NULL)
1923 cur_expr->avail_occr = cur_occr;
1924 else
1925 last_occr->next = cur_occr;
1927 cur_occr->insn = insn;
1928 cur_occr->next = NULL;
1932 /* Scan pattern PAT of INSN and add an entry to the hash table. If SET_P is
1933 non-zero, this is for the assignment hash table, otherwise it is for the
1934 expression hash table. */
1936 static void
1937 hash_scan_set (pat, insn, set_p)
1938 rtx pat, insn;
1939 int set_p;
1941 rtx src = SET_SRC (pat);
1942 rtx dest = SET_DEST (pat);
1943 rtx note;
1945 if (GET_CODE (src) == CALL)
1946 hash_scan_call (src, insn);
1948 else if (GET_CODE (dest) == REG)
1950 unsigned int regno = REGNO (dest);
1951 rtx tmp;
1953 /* If this is a single set and we are doing constant propagation,
1954 see if a REG_NOTE shows this equivalent to a constant. */
1955 if (set_p && (note = find_reg_equal_equiv_note (insn)) != 0
1956 && CONSTANT_P (XEXP (note, 0)))
1957 src = XEXP (note, 0), pat = gen_rtx_SET (VOIDmode, dest, src);
1959 /* Only record sets of pseudo-regs in the hash table. */
1960 if (! set_p
1961 && regno >= FIRST_PSEUDO_REGISTER
1962 /* Don't GCSE something if we can't do a reg/reg copy. */
1963 && can_copy_p [GET_MODE (dest)]
1964 /* Is SET_SRC something we want to gcse? */
1965 && want_to_gcse_p (src)
1966 /* Don't CSE a nop. */
1967 && src != dest)
1969 /* An expression is not anticipatable if its operands are
1970 modified before this insn or if this is not the only SET in
1971 this insn. */
1972 int antic_p = oprs_anticipatable_p (src, insn) && single_set (insn);
1973 /* An expression is not available if its operands are
1974 subsequently modified, including this insn. */
1975 int avail_p = oprs_available_p (src, insn);
1977 insert_expr_in_table (src, GET_MODE (dest), insn, antic_p, avail_p);
1980 /* Record sets for constant/copy propagation. */
1981 else if (set_p
1982 && regno >= FIRST_PSEUDO_REGISTER
1983 && ((GET_CODE (src) == REG
1984 && REGNO (src) >= FIRST_PSEUDO_REGISTER
1985 && can_copy_p [GET_MODE (dest)]
1986 && REGNO (src) != regno)
1987 || GET_CODE (src) == CONST_INT
1988 || GET_CODE (src) == SYMBOL_REF
1989 || GET_CODE (src) == CONST_DOUBLE)
1990 /* A copy is not available if its src or dest is subsequently
1991 modified. Here we want to search from INSN+1 on, but
1992 oprs_available_p searches from INSN on. */
1993 && (insn == BLOCK_END (BLOCK_NUM (insn))
1994 || ((tmp = next_nonnote_insn (insn)) != NULL_RTX
1995 && oprs_available_p (pat, tmp))))
1996 insert_set_in_table (pat, insn);
2000 static void
2001 hash_scan_clobber (x, insn)
2002 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2004 /* Currently nothing to do. */
2007 static void
2008 hash_scan_call (x, insn)
2009 rtx x ATTRIBUTE_UNUSED, insn ATTRIBUTE_UNUSED;
2011 /* Currently nothing to do. */
2014 /* Process INSN and add hash table entries as appropriate.
2016 Only available expressions that set a single pseudo-reg are recorded.
2018 Single sets in a PARALLEL could be handled, but it's an extra complication
2019 that isn't dealt with right now. The trick is handling the CLOBBERs that
2020 are also in the PARALLEL. Later.
2022 If SET_P is non-zero, this is for the assignment hash table,
2023 otherwise it is for the expression hash table.
2024 If IN_LIBCALL_BLOCK nonzero, we are in a libcall block, and should
2025 not record any expressions. */
2027 static void
2028 hash_scan_insn (insn, set_p, in_libcall_block)
2029 rtx insn;
2030 int set_p;
2031 int in_libcall_block;
2033 rtx pat = PATTERN (insn);
2034 int i;
2036 if (in_libcall_block)
2037 return;
2039 /* Pick out the sets of INSN and for other forms of instructions record
2040 what's been modified. */
2042 if (GET_CODE (pat) == SET)
2043 hash_scan_set (pat, insn, set_p);
2044 else if (GET_CODE (pat) == PARALLEL)
2045 for (i = 0; i < XVECLEN (pat, 0); i++)
2047 rtx x = XVECEXP (pat, 0, i);
2049 if (GET_CODE (x) == SET)
2050 hash_scan_set (x, insn, set_p);
2051 else if (GET_CODE (x) == CLOBBER)
2052 hash_scan_clobber (x, insn);
2053 else if (GET_CODE (x) == CALL)
2054 hash_scan_call (x, insn);
2057 else if (GET_CODE (pat) == CLOBBER)
2058 hash_scan_clobber (pat, insn);
2059 else if (GET_CODE (pat) == CALL)
2060 hash_scan_call (pat, insn);
2063 static void
2064 dump_hash_table (file, name, table, table_size, total_size)
2065 FILE *file;
2066 const char *name;
2067 struct expr **table;
2068 int table_size, total_size;
2070 int i;
2071 /* Flattened out table, so it's printed in proper order. */
2072 struct expr **flat_table;
2073 unsigned int *hash_val;
2074 struct expr *expr;
2076 flat_table
2077 = (struct expr **) xcalloc (total_size, sizeof (struct expr *));
2078 hash_val = (unsigned int *) xmalloc (total_size * sizeof (unsigned int));
2080 for (i = 0; i < table_size; i++)
2081 for (expr = table[i]; expr != NULL; expr = expr->next_same_hash)
2083 flat_table[expr->bitmap_index] = expr;
2084 hash_val[expr->bitmap_index] = i;
2087 fprintf (file, "%s hash table (%d buckets, %d entries)\n",
2088 name, table_size, total_size);
2090 for (i = 0; i < total_size; i++)
2091 if (flat_table[i] != 0)
2093 expr = flat_table[i];
2094 fprintf (file, "Index %d (hash value %d)\n ",
2095 expr->bitmap_index, hash_val[i]);
2096 print_rtl (file, expr->expr);
2097 fprintf (file, "\n");
2100 fprintf (file, "\n");
2102 free (flat_table);
2103 free (hash_val);
2106 /* Record register first/last/block set information for REGNO in INSN.
2108 reg_first_set records the first place in the block where the register
2109 is set and is used to compute "anticipatability".
2111 reg_last_set records the last place in the block where the register
2112 is set and is used to compute "availability".
2114 reg_set_in_block records whether the register is set in the block
2115 and is used to compute "transparency". */
2117 static void
2118 record_last_reg_set_info (insn, regno)
2119 rtx insn;
2120 int regno;
2122 if (reg_first_set[regno] == NEVER_SET)
2123 reg_first_set[regno] = INSN_CUID (insn);
2125 reg_last_set[regno] = INSN_CUID (insn);
2126 SET_BIT (reg_set_in_block[BLOCK_NUM (insn)], regno);
2129 /* Record memory first/last/block set information for INSN. */
2131 static void
2132 record_last_mem_set_info (insn)
2133 rtx insn;
2135 if (mem_first_set == NEVER_SET)
2136 mem_first_set = INSN_CUID (insn);
2138 mem_last_set = INSN_CUID (insn);
2139 mem_set_in_block[BLOCK_NUM (insn)] = 1;
2142 /* Called from compute_hash_table via note_stores to handle one
2143 SET or CLOBBER in an insn. DATA is really the instruction in which
2144 the SET is taking place. */
2146 static void
2147 record_last_set_info (dest, setter, data)
2148 rtx dest, setter ATTRIBUTE_UNUSED;
2149 void *data;
2151 rtx last_set_insn = (rtx) data;
2153 if (GET_CODE (dest) == SUBREG)
2154 dest = SUBREG_REG (dest);
2156 if (GET_CODE (dest) == REG)
2157 record_last_reg_set_info (last_set_insn, REGNO (dest));
2158 else if (GET_CODE (dest) == MEM
2159 /* Ignore pushes, they clobber nothing. */
2160 && ! push_operand (dest, GET_MODE (dest)))
2161 record_last_mem_set_info (last_set_insn);
2164 /* Top level function to create an expression or assignment hash table.
2166 Expression entries are placed in the hash table if
2167 - they are of the form (set (pseudo-reg) src),
2168 - src is something we want to perform GCSE on,
2169 - none of the operands are subsequently modified in the block
2171 Assignment entries are placed in the hash table if
2172 - they are of the form (set (pseudo-reg) src),
2173 - src is something we want to perform const/copy propagation on,
2174 - none of the operands or target are subsequently modified in the block
2176 Currently src must be a pseudo-reg or a const_int.
2178 F is the first insn.
2179 SET_P is non-zero for computing the assignment hash table. */
2181 static void
2182 compute_hash_table (set_p)
2183 int set_p;
2185 int bb;
2187 /* While we compute the hash table we also compute a bit array of which
2188 registers are set in which blocks.
2189 We also compute which blocks set memory, in the absence of aliasing
2190 support [which is TODO].
2191 ??? This isn't needed during const/copy propagation, but it's cheap to
2192 compute. Later. */
2193 sbitmap_vector_zero (reg_set_in_block, n_basic_blocks);
2194 memset ((char *) mem_set_in_block, 0, n_basic_blocks);
2196 /* Some working arrays used to track first and last set in each block. */
2197 /* ??? One could use alloca here, but at some size a threshold is crossed
2198 beyond which one should use malloc. Are we at that threshold here? */
2199 reg_first_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2200 reg_last_set = (int *) gmalloc (max_gcse_regno * sizeof (int));
2202 for (bb = 0; bb < n_basic_blocks; bb++)
2204 rtx insn;
2205 unsigned int regno;
2206 int in_libcall_block;
2207 unsigned int i;
2209 /* First pass over the instructions records information used to
2210 determine when registers and memory are first and last set.
2211 ??? The mem_set_in_block and hard-reg reg_set_in_block computation
2212 could be moved to compute_sets since they currently don't change. */
2214 for (i = 0; i < max_gcse_regno; i++)
2215 reg_first_set[i] = reg_last_set[i] = NEVER_SET;
2217 mem_first_set = NEVER_SET;
2218 mem_last_set = NEVER_SET;
2220 for (insn = BLOCK_HEAD (bb);
2221 insn && insn != NEXT_INSN (BLOCK_END (bb));
2222 insn = NEXT_INSN (insn))
2224 #ifdef NON_SAVING_SETJMP
2225 if (NON_SAVING_SETJMP && GET_CODE (insn) == NOTE
2226 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_SETJMP)
2228 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2229 record_last_reg_set_info (insn, regno);
2230 continue;
2232 #endif
2234 if (! INSN_P (insn))
2235 continue;
2237 if (GET_CODE (insn) == CALL_INSN)
2239 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2240 if ((call_used_regs[regno]
2241 && regno != STACK_POINTER_REGNUM
2242 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2243 && regno != HARD_FRAME_POINTER_REGNUM
2244 #endif
2245 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2246 && ! (regno == ARG_POINTER_REGNUM && fixed_regs[regno])
2247 #endif
2248 #if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2249 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2250 #endif
2252 && regno != FRAME_POINTER_REGNUM)
2253 || global_regs[regno])
2254 record_last_reg_set_info (insn, regno);
2256 if (! CONST_CALL_P (insn))
2257 record_last_mem_set_info (insn);
2260 note_stores (PATTERN (insn), record_last_set_info, insn);
2263 /* The next pass builds the hash table. */
2265 for (insn = BLOCK_HEAD (bb), in_libcall_block = 0;
2266 insn && insn != NEXT_INSN (BLOCK_END (bb));
2267 insn = NEXT_INSN (insn))
2268 if (INSN_P (insn))
2270 if (find_reg_note (insn, REG_LIBCALL, NULL_RTX))
2271 in_libcall_block = 1;
2272 else if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
2273 in_libcall_block = 0;
2274 hash_scan_insn (insn, set_p, in_libcall_block);
2278 free (reg_first_set);
2279 free (reg_last_set);
2281 /* Catch bugs early. */
2282 reg_first_set = reg_last_set = 0;
2285 /* Allocate space for the set hash table.
2286 N_INSNS is the number of instructions in the function.
2287 It is used to determine the number of buckets to use. */
2289 static void
2290 alloc_set_hash_table (n_insns)
2291 int n_insns;
2293 int n;
2295 set_hash_table_size = n_insns / 4;
2296 if (set_hash_table_size < 11)
2297 set_hash_table_size = 11;
2299 /* Attempt to maintain efficient use of hash table.
2300 Making it an odd number is simplest for now.
2301 ??? Later take some measurements. */
2302 set_hash_table_size |= 1;
2303 n = set_hash_table_size * sizeof (struct expr *);
2304 set_hash_table = (struct expr **) gmalloc (n);
2307 /* Free things allocated by alloc_set_hash_table. */
2309 static void
2310 free_set_hash_table ()
2312 free (set_hash_table);
2315 /* Compute the hash table for doing copy/const propagation. */
2317 static void
2318 compute_set_hash_table ()
2320 /* Initialize count of number of entries in hash table. */
2321 n_sets = 0;
2322 memset ((char *) set_hash_table, 0,
2323 set_hash_table_size * sizeof (struct expr *));
2325 compute_hash_table (1);
2328 /* Allocate space for the expression hash table.
2329 N_INSNS is the number of instructions in the function.
2330 It is used to determine the number of buckets to use. */
2332 static void
2333 alloc_expr_hash_table (n_insns)
2334 unsigned int n_insns;
2336 int n;
2338 expr_hash_table_size = n_insns / 2;
2339 /* Make sure the amount is usable. */
2340 if (expr_hash_table_size < 11)
2341 expr_hash_table_size = 11;
2343 /* Attempt to maintain efficient use of hash table.
2344 Making it an odd number is simplest for now.
2345 ??? Later take some measurements. */
2346 expr_hash_table_size |= 1;
2347 n = expr_hash_table_size * sizeof (struct expr *);
2348 expr_hash_table = (struct expr **) gmalloc (n);
2351 /* Free things allocated by alloc_expr_hash_table. */
2353 static void
2354 free_expr_hash_table ()
2356 free (expr_hash_table);
2359 /* Compute the hash table for doing GCSE. */
2361 static void
2362 compute_expr_hash_table ()
2364 /* Initialize count of number of entries in hash table. */
2365 n_exprs = 0;
2366 memset ((char *) expr_hash_table, 0,
2367 expr_hash_table_size * sizeof (struct expr *));
2369 compute_hash_table (0);
2372 /* Expression tracking support. */
2374 /* Lookup pattern PAT in the expression table.
2375 The result is a pointer to the table entry, or NULL if not found. */
2377 static struct expr *
2378 lookup_expr (pat)
2379 rtx pat;
2381 int do_not_record_p;
2382 unsigned int hash = hash_expr (pat, GET_MODE (pat), &do_not_record_p,
2383 expr_hash_table_size);
2384 struct expr *expr;
2386 if (do_not_record_p)
2387 return NULL;
2389 expr = expr_hash_table[hash];
2391 while (expr && ! expr_equiv_p (expr->expr, pat))
2392 expr = expr->next_same_hash;
2394 return expr;
2397 /* Lookup REGNO in the set table. If PAT is non-NULL look for the entry that
2398 matches it, otherwise return the first entry for REGNO. The result is a
2399 pointer to the table entry, or NULL if not found. */
2401 static struct expr *
2402 lookup_set (regno, pat)
2403 unsigned int regno;
2404 rtx pat;
2406 unsigned int hash = hash_set (regno, set_hash_table_size);
2407 struct expr *expr;
2409 expr = set_hash_table[hash];
2411 if (pat)
2413 while (expr && ! expr_equiv_p (expr->expr, pat))
2414 expr = expr->next_same_hash;
2416 else
2418 while (expr && REGNO (SET_DEST (expr->expr)) != regno)
2419 expr = expr->next_same_hash;
2422 return expr;
2425 /* Return the next entry for REGNO in list EXPR. */
2427 static struct expr *
2428 next_set (regno, expr)
2429 unsigned int regno;
2430 struct expr *expr;
2433 expr = expr->next_same_hash;
2434 while (expr && REGNO (SET_DEST (expr->expr)) != regno);
2436 return expr;
2439 /* Reset tables used to keep track of what's still available [since the
2440 start of the block]. */
2442 static void
2443 reset_opr_set_tables ()
2445 /* Maintain a bitmap of which regs have been set since beginning of
2446 the block. */
2447 sbitmap_zero (reg_set_bitmap);
2449 /* Also keep a record of the last instruction to modify memory.
2450 For now this is very trivial, we only record whether any memory
2451 location has been modified. */
2452 mem_last_set = 0;
2455 /* Return non-zero if the operands of X are not set before INSN in
2456 INSN's basic block. */
2458 static int
2459 oprs_not_set_p (x, insn)
2460 rtx x, insn;
2462 int i, j;
2463 enum rtx_code code;
2464 const char *fmt;
2466 if (x == 0)
2467 return 1;
2469 code = GET_CODE (x);
2470 switch (code)
2472 case PC:
2473 case CC0:
2474 case CONST:
2475 case CONST_INT:
2476 case CONST_DOUBLE:
2477 case SYMBOL_REF:
2478 case LABEL_REF:
2479 case ADDR_VEC:
2480 case ADDR_DIFF_VEC:
2481 return 1;
2483 case MEM:
2484 if (mem_last_set != 0)
2485 return 0;
2486 else
2487 return oprs_not_set_p (XEXP (x, 0), insn);
2489 case REG:
2490 return ! TEST_BIT (reg_set_bitmap, REGNO (x));
2492 default:
2493 break;
2496 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2498 if (fmt[i] == 'e')
2500 /* If we are about to do the last recursive call
2501 needed at this level, change it into iteration.
2502 This function is called enough to be worth it. */
2503 if (i == 0)
2504 return oprs_not_set_p (XEXP (x, i), insn);
2506 if (! oprs_not_set_p (XEXP (x, i), insn))
2507 return 0;
2509 else if (fmt[i] == 'E')
2510 for (j = 0; j < XVECLEN (x, i); j++)
2511 if (! oprs_not_set_p (XVECEXP (x, i, j), insn))
2512 return 0;
2515 return 1;
2518 /* Mark things set by a CALL. */
2520 static void
2521 mark_call (insn)
2522 rtx insn;
2524 mem_last_set = INSN_CUID (insn);
2527 /* Mark things set by a SET. */
2529 static void
2530 mark_set (pat, insn)
2531 rtx pat, insn;
2533 rtx dest = SET_DEST (pat);
2535 while (GET_CODE (dest) == SUBREG
2536 || GET_CODE (dest) == ZERO_EXTRACT
2537 || GET_CODE (dest) == SIGN_EXTRACT
2538 || GET_CODE (dest) == STRICT_LOW_PART)
2539 dest = XEXP (dest, 0);
2541 if (GET_CODE (dest) == REG)
2542 SET_BIT (reg_set_bitmap, REGNO (dest));
2543 else if (GET_CODE (dest) == MEM)
2544 mem_last_set = INSN_CUID (insn);
2546 if (GET_CODE (SET_SRC (pat)) == CALL)
2547 mark_call (insn);
2550 /* Record things set by a CLOBBER. */
2552 static void
2553 mark_clobber (pat, insn)
2554 rtx pat, insn;
2556 rtx clob = XEXP (pat, 0);
2558 while (GET_CODE (clob) == SUBREG || GET_CODE (clob) == STRICT_LOW_PART)
2559 clob = XEXP (clob, 0);
2561 if (GET_CODE (clob) == REG)
2562 SET_BIT (reg_set_bitmap, REGNO (clob));
2563 else
2564 mem_last_set = INSN_CUID (insn);
2567 /* Record things set by INSN.
2568 This data is used by oprs_not_set_p. */
2570 static void
2571 mark_oprs_set (insn)
2572 rtx insn;
2574 rtx pat = PATTERN (insn);
2575 int i;
2577 if (GET_CODE (pat) == SET)
2578 mark_set (pat, insn);
2579 else if (GET_CODE (pat) == PARALLEL)
2580 for (i = 0; i < XVECLEN (pat, 0); i++)
2582 rtx x = XVECEXP (pat, 0, i);
2584 if (GET_CODE (x) == SET)
2585 mark_set (x, insn);
2586 else if (GET_CODE (x) == CLOBBER)
2587 mark_clobber (x, insn);
2588 else if (GET_CODE (x) == CALL)
2589 mark_call (insn);
2592 else if (GET_CODE (pat) == CLOBBER)
2593 mark_clobber (pat, insn);
2594 else if (GET_CODE (pat) == CALL)
2595 mark_call (insn);
2599 /* Classic GCSE reaching definition support. */
2601 /* Allocate reaching def variables. */
2603 static void
2604 alloc_rd_mem (n_blocks, n_insns)
2605 int n_blocks, n_insns;
2607 rd_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2608 sbitmap_vector_zero (rd_kill, n_basic_blocks);
2610 rd_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2611 sbitmap_vector_zero (rd_gen, n_basic_blocks);
2613 reaching_defs = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2614 sbitmap_vector_zero (reaching_defs, n_basic_blocks);
2616 rd_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_insns);
2617 sbitmap_vector_zero (rd_out, n_basic_blocks);
2620 /* Free reaching def variables. */
2622 static void
2623 free_rd_mem ()
2625 free (rd_kill);
2626 free (rd_gen);
2627 free (reaching_defs);
2628 free (rd_out);
2631 /* Add INSN to the kills of BB. REGNO, set in BB, is killed by INSN. */
2633 static void
2634 handle_rd_kill_set (insn, regno, bb)
2635 rtx insn;
2636 int regno, bb;
2638 struct reg_set *this_reg;
2640 for (this_reg = reg_set_table[regno]; this_reg; this_reg = this_reg ->next)
2641 if (BLOCK_NUM (this_reg->insn) != BLOCK_NUM (insn))
2642 SET_BIT (rd_kill[bb], INSN_CUID (this_reg->insn));
2645 /* Compute the set of kill's for reaching definitions. */
2647 static void
2648 compute_kill_rd ()
2650 int bb, cuid;
2651 unsigned int regno;
2652 int i;
2654 /* For each block
2655 For each set bit in `gen' of the block (i.e each insn which
2656 generates a definition in the block)
2657 Call the reg set by the insn corresponding to that bit regx
2658 Look at the linked list starting at reg_set_table[regx]
2659 For each setting of regx in the linked list, which is not in
2660 this block
2661 Set the bit in `kill' corresponding to that insn. */
2662 for (bb = 0; bb < n_basic_blocks; bb++)
2663 for (cuid = 0; cuid < max_cuid; cuid++)
2664 if (TEST_BIT (rd_gen[bb], cuid))
2666 rtx insn = CUID_INSN (cuid);
2667 rtx pat = PATTERN (insn);
2669 if (GET_CODE (insn) == CALL_INSN)
2671 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2673 if ((call_used_regs[regno]
2674 && regno != STACK_POINTER_REGNUM
2675 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
2676 && regno != HARD_FRAME_POINTER_REGNUM
2677 #endif
2678 #if ARG_POINTER_REGNUM != FRAME_POINTER_REGNUM
2679 && ! (regno == ARG_POINTER_REGNUM
2680 && fixed_regs[regno])
2681 #endif
2682 #if !defined (PIC_OFFSET_TABLE_REG_CALL_CLOBBERED)
2683 && ! (regno == PIC_OFFSET_TABLE_REGNUM && flag_pic)
2684 #endif
2685 && regno != FRAME_POINTER_REGNUM)
2686 || global_regs[regno])
2687 handle_rd_kill_set (insn, regno, bb);
2691 if (GET_CODE (pat) == PARALLEL)
2693 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
2695 enum rtx_code code = GET_CODE (XVECEXP (pat, 0, i));
2697 if ((code == SET || code == CLOBBER)
2698 && GET_CODE (XEXP (XVECEXP (pat, 0, i), 0)) == REG)
2699 handle_rd_kill_set (insn,
2700 REGNO (XEXP (XVECEXP (pat, 0, i), 0)),
2701 bb);
2704 else if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == REG)
2705 /* Each setting of this register outside of this block
2706 must be marked in the set of kills in this block. */
2707 handle_rd_kill_set (insn, REGNO (SET_DEST (pat)), bb);
2711 /* Compute the reaching definitions as in
2712 Compilers Principles, Techniques, and Tools. Aho, Sethi, Ullman,
2713 Chapter 10. It is the same algorithm as used for computing available
2714 expressions but applied to the gens and kills of reaching definitions. */
2716 static void
2717 compute_rd ()
2719 int bb, changed, passes;
2721 for (bb = 0; bb < n_basic_blocks; bb++)
2722 sbitmap_copy (rd_out[bb] /*dst*/, rd_gen[bb] /*src*/);
2724 passes = 0;
2725 changed = 1;
2726 while (changed)
2728 changed = 0;
2729 for (bb = 0; bb < n_basic_blocks; bb++)
2731 sbitmap_union_of_preds (reaching_defs[bb], rd_out, bb);
2732 changed |= sbitmap_union_of_diff (rd_out[bb], rd_gen[bb],
2733 reaching_defs[bb], rd_kill[bb]);
2735 passes++;
2738 if (gcse_file)
2739 fprintf (gcse_file, "reaching def computation: %d passes\n", passes);
2742 /* Classic GCSE available expression support. */
2744 /* Allocate memory for available expression computation. */
2746 static void
2747 alloc_avail_expr_mem (n_blocks, n_exprs)
2748 int n_blocks, n_exprs;
2750 ae_kill = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2751 sbitmap_vector_zero (ae_kill, n_basic_blocks);
2753 ae_gen = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2754 sbitmap_vector_zero (ae_gen, n_basic_blocks);
2756 ae_in = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2757 sbitmap_vector_zero (ae_in, n_basic_blocks);
2759 ae_out = (sbitmap *) sbitmap_vector_alloc (n_blocks, n_exprs);
2760 sbitmap_vector_zero (ae_out, n_basic_blocks);
2763 static void
2764 free_avail_expr_mem ()
2766 free (ae_kill);
2767 free (ae_gen);
2768 free (ae_in);
2769 free (ae_out);
2772 /* Compute the set of available expressions generated in each basic block. */
2774 static void
2775 compute_ae_gen ()
2777 unsigned int i;
2778 struct expr *expr;
2779 struct occr *occr;
2781 /* For each recorded occurrence of each expression, set ae_gen[bb][expr].
2782 This is all we have to do because an expression is not recorded if it
2783 is not available, and the only expressions we want to work with are the
2784 ones that are recorded. */
2785 for (i = 0; i < expr_hash_table_size; i++)
2786 for (expr = expr_hash_table[i]; expr != 0; expr = expr->next_same_hash)
2787 for (occr = expr->avail_occr; occr != 0; occr = occr->next)
2788 SET_BIT (ae_gen[BLOCK_NUM (occr->insn)], expr->bitmap_index);
2791 /* Return non-zero if expression X is killed in BB. */
2793 static int
2794 expr_killed_p (x, bb)
2795 rtx x;
2796 int bb;
2798 int i, j;
2799 enum rtx_code code;
2800 const char *fmt;
2802 if (x == 0)
2803 return 1;
2805 code = GET_CODE (x);
2806 switch (code)
2808 case REG:
2809 return TEST_BIT (reg_set_in_block[bb], REGNO (x));
2811 case MEM:
2812 if (mem_set_in_block[bb])
2813 return 1;
2814 else
2815 return expr_killed_p (XEXP (x, 0), bb);
2817 case PC:
2818 case CC0: /*FIXME*/
2819 case CONST:
2820 case CONST_INT:
2821 case CONST_DOUBLE:
2822 case SYMBOL_REF:
2823 case LABEL_REF:
2824 case ADDR_VEC:
2825 case ADDR_DIFF_VEC:
2826 return 0;
2828 default:
2829 break;
2832 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
2834 if (fmt[i] == 'e')
2836 /* If we are about to do the last recursive call
2837 needed at this level, change it into iteration.
2838 This function is called enough to be worth it. */
2839 if (i == 0)
2840 return expr_killed_p (XEXP (x, i), bb);
2841 else if (expr_killed_p (XEXP (x, i), bb))
2842 return 1;
2844 else if (fmt[i] == 'E')
2845 for (j = 0; j < XVECLEN (x, i); j++)
2846 if (expr_killed_p (XVECEXP (x, i, j), bb))
2847 return 1;
2850 return 0;
2853 /* Compute the set of available expressions killed in each basic block. */
2855 static void
2856 compute_ae_kill (ae_gen, ae_kill)
2857 sbitmap *ae_gen, *ae_kill;
2859 int bb;
2860 unsigned int i;
2861 struct expr *expr;
2863 for (bb = 0; bb < n_basic_blocks; bb++)
2864 for (i = 0; i < expr_hash_table_size; i++)
2865 for (expr = expr_hash_table[i]; expr; expr = expr->next_same_hash)
2867 /* Skip EXPR if generated in this block. */
2868 if (TEST_BIT (ae_gen[bb], expr->bitmap_index))
2869 continue;
2871 if (expr_killed_p (expr->expr, bb))
2872 SET_BIT (ae_kill[bb], expr->bitmap_index);
2876 /* Actually perform the Classic GCSE optimizations. */
2878 /* Return non-zero if occurrence OCCR of expression EXPR reaches block BB.
2880 CHECK_SELF_LOOP is non-zero if we should consider a block reaching itself
2881 as a positive reach. We want to do this when there are two computations
2882 of the expression in the block.
2884 VISITED is a pointer to a working buffer for tracking which BB's have
2885 been visited. It is NULL for the top-level call.
2887 We treat reaching expressions that go through blocks containing the same
2888 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
2889 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
2890 2 as not reaching. The intent is to improve the probability of finding
2891 only one reaching expression and to reduce register lifetimes by picking
2892 the closest such expression. */
2894 static int
2895 expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited)
2896 struct occr *occr;
2897 struct expr *expr;
2898 int bb;
2899 int check_self_loop;
2900 char *visited;
2902 edge pred;
2904 for (pred = BASIC_BLOCK(bb)->pred; pred != NULL; pred = pred->pred_next)
2906 int pred_bb = pred->src->index;
2908 if (visited[pred_bb])
2909 /* This predecessor has already been visited. Nothing to do. */
2911 else if (pred_bb == bb)
2913 /* BB loops on itself. */
2914 if (check_self_loop
2915 && TEST_BIT (ae_gen[pred_bb], expr->bitmap_index)
2916 && BLOCK_NUM (occr->insn) == pred_bb)
2917 return 1;
2919 visited[pred_bb] = 1;
2922 /* Ignore this predecessor if it kills the expression. */
2923 else if (TEST_BIT (ae_kill[pred_bb], expr->bitmap_index))
2924 visited[pred_bb] = 1;
2926 /* Does this predecessor generate this expression? */
2927 else if (TEST_BIT (ae_gen[pred_bb], expr->bitmap_index))
2929 /* Is this the occurrence we're looking for?
2930 Note that there's only one generating occurrence per block
2931 so we just need to check the block number. */
2932 if (BLOCK_NUM (occr->insn) == pred_bb)
2933 return 1;
2935 visited[pred_bb] = 1;
2938 /* Neither gen nor kill. */
2939 else
2941 visited[pred_bb] = 1;
2942 if (expr_reaches_here_p_work (occr, expr, pred_bb, check_self_loop,
2943 visited))
2945 return 1;
2949 /* All paths have been checked. */
2950 return 0;
2953 /* This wrapper for expr_reaches_here_p_work() is to ensure that any
2954 memory allocated for that function is returned. */
2956 static int
2957 expr_reaches_here_p (occr, expr, bb, check_self_loop)
2958 struct occr *occr;
2959 struct expr *expr;
2960 int bb;
2961 int check_self_loop;
2963 int rval;
2964 char *visited = (char *) xcalloc (n_basic_blocks, 1);
2966 rval = expr_reaches_here_p_work (occr, expr, bb, check_self_loop, visited);
2968 free (visited);
2969 return rval;
2972 /* Return the instruction that computes EXPR that reaches INSN's basic block.
2973 If there is more than one such instruction, return NULL.
2975 Called only by handle_avail_expr. */
2977 static rtx
2978 computing_insn (expr, insn)
2979 struct expr *expr;
2980 rtx insn;
2982 int bb = BLOCK_NUM (insn);
2984 if (expr->avail_occr->next == NULL)
2986 if (BLOCK_NUM (expr->avail_occr->insn) == bb)
2987 /* The available expression is actually itself
2988 (i.e. a loop in the flow graph) so do nothing. */
2989 return NULL;
2991 /* (FIXME) Case that we found a pattern that was created by
2992 a substitution that took place. */
2993 return expr->avail_occr->insn;
2995 else
2997 /* Pattern is computed more than once.
2998 Search backwards from this insn to see how many of these
2999 computations actually reach this insn. */
3000 struct occr *occr;
3001 rtx insn_computes_expr = NULL;
3002 int can_reach = 0;
3004 for (occr = expr->avail_occr; occr != NULL; occr = occr->next)
3006 if (BLOCK_NUM (occr->insn) == bb)
3008 /* The expression is generated in this block.
3009 The only time we care about this is when the expression
3010 is generated later in the block [and thus there's a loop].
3011 We let the normal cse pass handle the other cases. */
3012 if (INSN_CUID (insn) < INSN_CUID (occr->insn)
3013 && expr_reaches_here_p (occr, expr, bb, 1))
3015 can_reach++;
3016 if (can_reach > 1)
3017 return NULL;
3019 insn_computes_expr = occr->insn;
3022 else if (expr_reaches_here_p (occr, expr, bb, 0))
3024 can_reach++;
3025 if (can_reach > 1)
3026 return NULL;
3028 insn_computes_expr = occr->insn;
3032 if (insn_computes_expr == NULL)
3033 abort ();
3035 return insn_computes_expr;
3039 /* Return non-zero if the definition in DEF_INSN can reach INSN.
3040 Only called by can_disregard_other_sets. */
3042 static int
3043 def_reaches_here_p (insn, def_insn)
3044 rtx insn, def_insn;
3046 rtx reg;
3048 if (TEST_BIT (reaching_defs[BLOCK_NUM (insn)], INSN_CUID (def_insn)))
3049 return 1;
3051 if (BLOCK_NUM (insn) == BLOCK_NUM (def_insn))
3053 if (INSN_CUID (def_insn) < INSN_CUID (insn))
3055 if (GET_CODE (PATTERN (def_insn)) == PARALLEL)
3056 return 1;
3057 else if (GET_CODE (PATTERN (def_insn)) == CLOBBER)
3058 reg = XEXP (PATTERN (def_insn), 0);
3059 else if (GET_CODE (PATTERN (def_insn)) == SET)
3060 reg = SET_DEST (PATTERN (def_insn));
3061 else
3062 abort ();
3064 return ! reg_set_between_p (reg, NEXT_INSN (def_insn), insn);
3066 else
3067 return 0;
3070 return 0;
3073 /* Return non-zero if *ADDR_THIS_REG can only have one value at INSN. The
3074 value returned is the number of definitions that reach INSN. Returning a
3075 value of zero means that [maybe] more than one definition reaches INSN and
3076 the caller can't perform whatever optimization it is trying. i.e. it is
3077 always safe to return zero. */
3079 static int
3080 can_disregard_other_sets (addr_this_reg, insn, for_combine)
3081 struct reg_set **addr_this_reg;
3082 rtx insn;
3083 int for_combine;
3085 int number_of_reaching_defs = 0;
3086 struct reg_set *this_reg;
3088 for (this_reg = *addr_this_reg; this_reg != 0; this_reg = this_reg->next)
3089 if (def_reaches_here_p (insn, this_reg->insn))
3091 number_of_reaching_defs++;
3092 /* Ignore parallels for now. */
3093 if (GET_CODE (PATTERN (this_reg->insn)) == PARALLEL)
3094 return 0;
3096 if (!for_combine
3097 && (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER
3098 || ! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3099 SET_SRC (PATTERN (insn)))))
3100 /* A setting of the reg to a different value reaches INSN. */
3101 return 0;
3103 if (number_of_reaching_defs > 1)
3105 /* If in this setting the value the register is being set to is
3106 equal to the previous value the register was set to and this
3107 setting reaches the insn we are trying to do the substitution
3108 on then we are ok. */
3109 if (GET_CODE (PATTERN (this_reg->insn)) == CLOBBER)
3110 return 0;
3111 else if (! rtx_equal_p (SET_SRC (PATTERN (this_reg->insn)),
3112 SET_SRC (PATTERN (insn))))
3113 return 0;
3116 *addr_this_reg = this_reg;
3119 return number_of_reaching_defs;
3122 /* Expression computed by insn is available and the substitution is legal,
3123 so try to perform the substitution.
3125 The result is non-zero if any changes were made. */
3127 static int
3128 handle_avail_expr (insn, expr)
3129 rtx insn;
3130 struct expr *expr;
3132 rtx pat, insn_computes_expr;
3133 rtx to;
3134 struct reg_set *this_reg;
3135 int found_setting, use_src;
3136 int changed = 0;
3138 /* We only handle the case where one computation of the expression
3139 reaches this instruction. */
3140 insn_computes_expr = computing_insn (expr, insn);
3141 if (insn_computes_expr == NULL)
3142 return 0;
3144 found_setting = 0;
3145 use_src = 0;
3147 /* At this point we know only one computation of EXPR outside of this
3148 block reaches this insn. Now try to find a register that the
3149 expression is computed into. */
3150 if (GET_CODE (SET_SRC (PATTERN (insn_computes_expr))) == REG)
3152 /* This is the case when the available expression that reaches
3153 here has already been handled as an available expression. */
3154 unsigned int regnum_for_replacing
3155 = REGNO (SET_SRC (PATTERN (insn_computes_expr)));
3157 /* If the register was created by GCSE we can't use `reg_set_table',
3158 however we know it's set only once. */
3159 if (regnum_for_replacing >= max_gcse_regno
3160 /* If the register the expression is computed into is set only once,
3161 or only one set reaches this insn, we can use it. */
3162 || (((this_reg = reg_set_table[regnum_for_replacing]),
3163 this_reg->next == NULL)
3164 || can_disregard_other_sets (&this_reg, insn, 0)))
3166 use_src = 1;
3167 found_setting = 1;
3171 if (!found_setting)
3173 unsigned int regnum_for_replacing
3174 = REGNO (SET_DEST (PATTERN (insn_computes_expr)));
3176 /* This shouldn't happen. */
3177 if (regnum_for_replacing >= max_gcse_regno)
3178 abort ();
3180 this_reg = reg_set_table[regnum_for_replacing];
3182 /* If the register the expression is computed into is set only once,
3183 or only one set reaches this insn, use it. */
3184 if (this_reg->next == NULL
3185 || can_disregard_other_sets (&this_reg, insn, 0))
3186 found_setting = 1;
3189 if (found_setting)
3191 pat = PATTERN (insn);
3192 if (use_src)
3193 to = SET_SRC (PATTERN (insn_computes_expr));
3194 else
3195 to = SET_DEST (PATTERN (insn_computes_expr));
3196 changed = validate_change (insn, &SET_SRC (pat), to, 0);
3198 /* We should be able to ignore the return code from validate_change but
3199 to play it safe we check. */
3200 if (changed)
3202 gcse_subst_count++;
3203 if (gcse_file != NULL)
3205 fprintf (gcse_file, "GCSE: Replacing the source in insn %d with",
3206 INSN_UID (insn));
3207 fprintf (gcse_file, " reg %d %s insn %d\n",
3208 REGNO (to), use_src ? "from" : "set in",
3209 INSN_UID (insn_computes_expr));
3214 /* The register that the expr is computed into is set more than once. */
3215 else if (1 /*expensive_op(this_pattrn->op) && do_expensive_gcse)*/)
3217 /* Insert an insn after insnx that copies the reg set in insnx
3218 into a new pseudo register call this new register REGN.
3219 From insnb until end of basic block or until REGB is set
3220 replace all uses of REGB with REGN. */
3221 rtx new_insn;
3223 to = gen_reg_rtx (GET_MODE (SET_DEST (PATTERN (insn_computes_expr))));
3225 /* Generate the new insn. */
3226 /* ??? If the change fails, we return 0, even though we created
3227 an insn. I think this is ok. */
3228 new_insn
3229 = emit_insn_after (gen_rtx_SET (VOIDmode, to,
3230 SET_DEST (PATTERN
3231 (insn_computes_expr))),
3232 insn_computes_expr);
3234 /* Keep block number table up to date. */
3235 set_block_num (new_insn, BLOCK_NUM (insn_computes_expr));
3237 /* Keep register set table up to date. */
3238 record_one_set (REGNO (to), new_insn);
3240 gcse_create_count++;
3241 if (gcse_file != NULL)
3243 fprintf (gcse_file, "GCSE: Creating insn %d to copy value of reg %d",
3244 INSN_UID (NEXT_INSN (insn_computes_expr)),
3245 REGNO (SET_SRC (PATTERN (NEXT_INSN (insn_computes_expr)))));
3246 fprintf (gcse_file, ", computed in insn %d,\n",
3247 INSN_UID (insn_computes_expr));
3248 fprintf (gcse_file, " into newly allocated reg %d\n",
3249 REGNO (to));
3252 pat = PATTERN (insn);
3254 /* Do register replacement for INSN. */
3255 changed = validate_change (insn, &SET_SRC (pat),
3256 SET_DEST (PATTERN
3257 (NEXT_INSN (insn_computes_expr))),
3260 /* We should be able to ignore the return code from validate_change but
3261 to play it safe we check. */
3262 if (changed)
3264 gcse_subst_count++;
3265 if (gcse_file != NULL)
3267 fprintf (gcse_file,
3268 "GCSE: Replacing the source in insn %d with reg %d ",
3269 INSN_UID (insn),
3270 REGNO (SET_DEST (PATTERN (NEXT_INSN
3271 (insn_computes_expr)))));
3272 fprintf (gcse_file, "set in insn %d\n",
3273 INSN_UID (insn_computes_expr));
3278 return changed;
3281 /* Perform classic GCSE. This is called by one_classic_gcse_pass after all
3282 the dataflow analysis has been done.
3284 The result is non-zero if a change was made. */
3286 static int
3287 classic_gcse ()
3289 int bb, changed;
3290 rtx insn;
3292 /* Note we start at block 1. */
3294 changed = 0;
3295 for (bb = 1; bb < n_basic_blocks; bb++)
3297 /* Reset tables used to keep track of what's still valid [since the
3298 start of the block]. */
3299 reset_opr_set_tables ();
3301 for (insn = BLOCK_HEAD (bb);
3302 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
3303 insn = NEXT_INSN (insn))
3305 /* Is insn of form (set (pseudo-reg) ...)? */
3306 if (GET_CODE (insn) == INSN
3307 && GET_CODE (PATTERN (insn)) == SET
3308 && GET_CODE (SET_DEST (PATTERN (insn))) == REG
3309 && REGNO (SET_DEST (PATTERN (insn))) >= FIRST_PSEUDO_REGISTER)
3311 rtx pat = PATTERN (insn);
3312 rtx src = SET_SRC (pat);
3313 struct expr *expr;
3315 if (want_to_gcse_p (src)
3316 /* Is the expression recorded? */
3317 && ((expr = lookup_expr (src)) != NULL)
3318 /* Is the expression available [at the start of the
3319 block]? */
3320 && TEST_BIT (ae_in[bb], expr->bitmap_index)
3321 /* Are the operands unchanged since the start of the
3322 block? */
3323 && oprs_not_set_p (src, insn))
3324 changed |= handle_avail_expr (insn, expr);
3327 /* Keep track of everything modified by this insn. */
3328 /* ??? Need to be careful w.r.t. mods done to INSN. */
3329 if (INSN_P (insn))
3330 mark_oprs_set (insn);
3334 return changed;
3337 /* Top level routine to perform one classic GCSE pass.
3339 Return non-zero if a change was made. */
3341 static int
3342 one_classic_gcse_pass (pass)
3343 int pass;
3345 int changed = 0;
3347 gcse_subst_count = 0;
3348 gcse_create_count = 0;
3350 alloc_expr_hash_table (max_cuid);
3351 alloc_rd_mem (n_basic_blocks, max_cuid);
3352 compute_expr_hash_table ();
3353 if (gcse_file)
3354 dump_hash_table (gcse_file, "Expression", expr_hash_table,
3355 expr_hash_table_size, n_exprs);
3357 if (n_exprs > 0)
3359 compute_kill_rd ();
3360 compute_rd ();
3361 alloc_avail_expr_mem (n_basic_blocks, n_exprs);
3362 compute_ae_gen ();
3363 compute_ae_kill (ae_gen, ae_kill);
3364 compute_available (ae_gen, ae_kill, ae_out, ae_in);
3365 changed = classic_gcse ();
3366 free_avail_expr_mem ();
3369 free_rd_mem ();
3370 free_expr_hash_table ();
3372 if (gcse_file)
3374 fprintf (gcse_file, "\n");
3375 fprintf (gcse_file, "GCSE of %s, pass %d: %d bytes needed, %d substs,",
3376 current_function_name, pass, bytes_used, gcse_subst_count);
3377 fprintf (gcse_file, "%d insns created\n", gcse_create_count);
3380 return changed;
3383 /* Compute copy/constant propagation working variables. */
3385 /* Local properties of assignments. */
3386 static sbitmap *cprop_pavloc;
3387 static sbitmap *cprop_absaltered;
3389 /* Global properties of assignments (computed from the local properties). */
3390 static sbitmap *cprop_avin;
3391 static sbitmap *cprop_avout;
3393 /* Allocate vars used for copy/const propagation. N_BLOCKS is the number of
3394 basic blocks. N_SETS is the number of sets. */
3396 static void
3397 alloc_cprop_mem (n_blocks, n_sets)
3398 int n_blocks, n_sets;
3400 cprop_pavloc = sbitmap_vector_alloc (n_blocks, n_sets);
3401 cprop_absaltered = sbitmap_vector_alloc (n_blocks, n_sets);
3403 cprop_avin = sbitmap_vector_alloc (n_blocks, n_sets);
3404 cprop_avout = sbitmap_vector_alloc (n_blocks, n_sets);
3407 /* Free vars used by copy/const propagation. */
3409 static void
3410 free_cprop_mem ()
3412 free (cprop_pavloc);
3413 free (cprop_absaltered);
3414 free (cprop_avin);
3415 free (cprop_avout);
3418 /* For each block, compute whether X is transparent. X is either an
3419 expression or an assignment [though we don't care which, for this context
3420 an assignment is treated as an expression]. For each block where an
3421 element of X is modified, set (SET_P == 1) or reset (SET_P == 0) the INDX
3422 bit in BMAP. */
3424 static void
3425 compute_transp (x, indx, bmap, set_p)
3426 rtx x;
3427 int indx;
3428 sbitmap *bmap;
3429 int set_p;
3431 int bb, i, j;
3432 enum rtx_code code;
3433 reg_set *r;
3434 const char *fmt;
3436 /* repeat is used to turn tail-recursion into iteration since GCC
3437 can't do it when there's no return value. */
3438 repeat:
3440 if (x == 0)
3441 return;
3443 code = GET_CODE (x);
3444 switch (code)
3446 case REG:
3447 if (set_p)
3449 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3451 for (bb = 0; bb < n_basic_blocks; bb++)
3452 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3453 SET_BIT (bmap[bb], indx);
3455 else
3457 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3458 SET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3461 else
3463 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3465 for (bb = 0; bb < n_basic_blocks; bb++)
3466 if (TEST_BIT (reg_set_in_block[bb], REGNO (x)))
3467 RESET_BIT (bmap[bb], indx);
3469 else
3471 for (r = reg_set_table[REGNO (x)]; r != NULL; r = r->next)
3472 RESET_BIT (bmap[BLOCK_NUM (r->insn)], indx);
3476 return;
3478 case MEM:
3479 if (set_p)
3481 for (bb = 0; bb < n_basic_blocks; bb++)
3482 if (mem_set_in_block[bb])
3483 SET_BIT (bmap[bb], indx);
3485 else
3487 for (bb = 0; bb < n_basic_blocks; bb++)
3488 if (mem_set_in_block[bb])
3489 RESET_BIT (bmap[bb], indx);
3492 x = XEXP (x, 0);
3493 goto repeat;
3495 case PC:
3496 case CC0: /*FIXME*/
3497 case CONST:
3498 case CONST_INT:
3499 case CONST_DOUBLE:
3500 case SYMBOL_REF:
3501 case LABEL_REF:
3502 case ADDR_VEC:
3503 case ADDR_DIFF_VEC:
3504 return;
3506 default:
3507 break;
3510 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3512 if (fmt[i] == 'e')
3514 /* If we are about to do the last recursive call
3515 needed at this level, change it into iteration.
3516 This function is called enough to be worth it. */
3517 if (i == 0)
3519 x = XEXP (x, i);
3520 goto repeat;
3523 compute_transp (XEXP (x, i), indx, bmap, set_p);
3525 else if (fmt[i] == 'E')
3526 for (j = 0; j < XVECLEN (x, i); j++)
3527 compute_transp (XVECEXP (x, i, j), indx, bmap, set_p);
3531 /* Top level routine to do the dataflow analysis needed by copy/const
3532 propagation. */
3534 static void
3535 compute_cprop_data ()
3537 compute_local_properties (cprop_absaltered, cprop_pavloc, NULL, 1);
3538 compute_available (cprop_pavloc, cprop_absaltered,
3539 cprop_avout, cprop_avin);
3542 /* Copy/constant propagation. */
3544 /* Maximum number of register uses in an insn that we handle. */
3545 #define MAX_USES 8
3547 /* Table of uses found in an insn.
3548 Allocated statically to avoid alloc/free complexity and overhead. */
3549 static struct reg_use reg_use_table[MAX_USES];
3551 /* Index into `reg_use_table' while building it. */
3552 static int reg_use_count;
3554 /* Set up a list of register numbers used in INSN. The found uses are stored
3555 in `reg_use_table'. `reg_use_count' is initialized to zero before entry,
3556 and contains the number of uses in the table upon exit.
3558 ??? If a register appears multiple times we will record it multiple times.
3559 This doesn't hurt anything but it will slow things down. */
3561 static void
3562 find_used_regs (x)
3563 rtx x;
3565 int i, j;
3566 enum rtx_code code;
3567 const char *fmt;
3569 /* repeat is used to turn tail-recursion into iteration since GCC
3570 can't do it when there's no return value. */
3571 repeat:
3573 if (x == 0)
3574 return;
3576 code = GET_CODE (x);
3577 switch (code)
3579 case REG:
3580 if (reg_use_count == MAX_USES)
3581 return;
3583 reg_use_table[reg_use_count].reg_rtx = x;
3584 reg_use_count++;
3585 return;
3587 case MEM:
3588 x = XEXP (x, 0);
3589 goto repeat;
3591 case PC:
3592 case CC0:
3593 case CONST:
3594 case CONST_INT:
3595 case CONST_DOUBLE:
3596 case SYMBOL_REF:
3597 case LABEL_REF:
3598 case CLOBBER:
3599 case ADDR_VEC:
3600 case ADDR_DIFF_VEC:
3601 case ASM_INPUT: /*FIXME*/
3602 return;
3604 case SET:
3605 if (GET_CODE (SET_DEST (x)) == MEM)
3606 find_used_regs (SET_DEST (x));
3607 x = SET_SRC (x);
3608 goto repeat;
3610 default:
3611 break;
3614 /* Recursively scan the operands of this expression. */
3616 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
3618 if (fmt[i] == 'e')
3620 /* If we are about to do the last recursive call
3621 needed at this level, change it into iteration.
3622 This function is called enough to be worth it. */
3623 if (i == 0)
3625 x = XEXP (x, 0);
3626 goto repeat;
3629 find_used_regs (XEXP (x, i));
3631 else if (fmt[i] == 'E')
3632 for (j = 0; j < XVECLEN (x, i); j++)
3633 find_used_regs (XVECEXP (x, i, j));
3637 /* Try to replace all non-SET_DEST occurrences of FROM in INSN with TO.
3638 Returns non-zero is successful. */
3640 static int
3641 try_replace_reg (from, to, insn)
3642 rtx from, to, insn;
3644 rtx note = find_reg_equal_equiv_note (insn);
3645 rtx src = 0;
3646 int success = 0;
3647 rtx set = single_set (insn);
3649 /* If this is a single set, try to simplify the source of the set given
3650 our substitution. We could perhaps try this for multiple SETs, but
3651 it probably won't buy us anything. */
3652 if (set != 0)
3654 src = simplify_replace_rtx (SET_SRC (set), from, to);
3656 /* Try this two ways: first just replace SET_SRC. If that doesn't
3657 work and this is a PARALLEL, try to replace the whole pattern
3658 with a new SET. */
3659 if (validate_change (insn, &SET_SRC (set), src, 0))
3660 success = 1;
3661 else if (GET_CODE (PATTERN (insn)) == PARALLEL
3662 && validate_change (insn, &PATTERN (insn),
3663 gen_rtx_SET (VOIDmode, SET_DEST (set),
3664 src),
3666 success = 1;
3669 /* Otherwise, try to do a global replacement within the insn. */
3670 if (!success)
3671 success = validate_replace_src (from, to, insn);
3673 /* If we've failed to do replacement, have a single SET, and don't already
3674 have a note, add a REG_EQUAL note to not lose information. */
3675 if (!success && note == 0 && set != 0)
3676 note = REG_NOTES (insn)
3677 = gen_rtx_EXPR_LIST (REG_EQUAL, src, REG_NOTES (insn));
3679 /* If there is already a NOTE, update the expression in it with our
3680 replacement. */
3681 else if (note != 0)
3682 XEXP (note, 0) = simplify_replace_rtx (XEXP (note, 0), from, to);
3684 /* REG_EQUAL may get simplified into register.
3685 We don't allow that. Remove that note. This code ought
3686 not to hapen, because previous code ought to syntetize
3687 reg-reg move, but be on the safe side. */
3688 if (note && REG_P (XEXP (note, 0)))
3689 remove_note (insn, note);
3691 return success;
3694 /* Find a set of REGNOs that are available on entry to INSN's block. Returns
3695 NULL no such set is found. */
3697 static struct expr *
3698 find_avail_set (regno, insn)
3699 int regno;
3700 rtx insn;
3702 /* SET1 contains the last set found that can be returned to the caller for
3703 use in a substitution. */
3704 struct expr *set1 = 0;
3706 /* Loops are not possible here. To get a loop we would need two sets
3707 available at the start of the block containing INSN. ie we would
3708 need two sets like this available at the start of the block:
3710 (set (reg X) (reg Y))
3711 (set (reg Y) (reg X))
3713 This can not happen since the set of (reg Y) would have killed the
3714 set of (reg X) making it unavailable at the start of this block. */
3715 while (1)
3717 rtx src;
3718 struct expr *set = lookup_set (regno, NULL_RTX);
3720 /* Find a set that is available at the start of the block
3721 which contains INSN. */
3722 while (set)
3724 if (TEST_BIT (cprop_avin[BLOCK_NUM (insn)], set->bitmap_index))
3725 break;
3726 set = next_set (regno, set);
3729 /* If no available set was found we've reached the end of the
3730 (possibly empty) copy chain. */
3731 if (set == 0)
3732 break;
3734 if (GET_CODE (set->expr) != SET)
3735 abort ();
3737 src = SET_SRC (set->expr);
3739 /* We know the set is available.
3740 Now check that SRC is ANTLOC (i.e. none of the source operands
3741 have changed since the start of the block).
3743 If the source operand changed, we may still use it for the next
3744 iteration of this loop, but we may not use it for substitutions. */
3746 if (CONSTANT_P (src) || oprs_not_set_p (src, insn))
3747 set1 = set;
3749 /* If the source of the set is anything except a register, then
3750 we have reached the end of the copy chain. */
3751 if (GET_CODE (src) != REG)
3752 break;
3754 /* Follow the copy chain, ie start another iteration of the loop
3755 and see if we have an available copy into SRC. */
3756 regno = REGNO (src);
3759 /* SET1 holds the last set that was available and anticipatable at
3760 INSN. */
3761 return set1;
3764 /* Subroutine of cprop_insn that tries to propagate constants into
3765 JUMP_INSNS. INSN must be a conditional jump. FROM is what we will try to
3766 replace, SRC is the constant we will try to substitute for it. Returns
3767 nonzero if a change was made. We know INSN has just a SET. */
3769 static int
3770 cprop_jump (insn, from, src)
3771 rtx insn;
3772 rtx from;
3773 rtx src;
3775 rtx set = PATTERN (insn);
3776 rtx new = simplify_replace_rtx (SET_SRC (set), from, src);
3778 /* If no simplification can be made, then try the next
3779 register. */
3780 if (rtx_equal_p (new, SET_SRC (set)))
3781 return 0;
3783 /* If this is now a no-op leave it that way, but update LABEL_NUSED if
3784 necessary. */
3785 if (new == pc_rtx)
3787 SET_SRC (set) = new;
3789 if (JUMP_LABEL (insn) != 0)
3790 --LABEL_NUSES (JUMP_LABEL (insn));
3793 /* Otherwise, this must be a valid instruction. */
3794 else if (! validate_change (insn, &SET_SRC (set), new, 0))
3795 return 0;
3797 /* If this has turned into an unconditional jump,
3798 then put a barrier after it so that the unreachable
3799 code will be deleted. */
3800 if (GET_CODE (SET_SRC (set)) == LABEL_REF)
3801 emit_barrier_after (insn);
3803 run_jump_opt_after_gcse = 1;
3805 const_prop_count++;
3806 if (gcse_file != NULL)
3808 fprintf (gcse_file,
3809 "CONST-PROP: Replacing reg %d in insn %d with constant ",
3810 REGNO (from), INSN_UID (insn));
3811 print_rtl (gcse_file, src);
3812 fprintf (gcse_file, "\n");
3815 return 1;
3818 #ifdef HAVE_cc0
3820 /* Subroutine of cprop_insn that tries to propagate constants into JUMP_INSNS
3821 for machines that have CC0. INSN is a single set that stores into CC0;
3822 the insn following it is a conditional jump. REG_USED is the use we will
3823 try to replace, SRC is the constant we will try to substitute for it.
3824 Returns nonzero if a change was made. */
3826 static int
3827 cprop_cc0_jump (insn, reg_used, src)
3828 rtx insn;
3829 struct reg_use *reg_used;
3830 rtx src;
3832 /* First substitute in the SET_SRC of INSN, then substitute that for
3833 CC0 in JUMP. */
3834 rtx jump = NEXT_INSN (insn);
3835 rtx new_src = simplify_replace_rtx (SET_SRC (PATTERN (insn)),
3836 reg_used->reg_rtx, src);
3838 if (! cprop_jump (jump, cc0_rtx, new_src))
3839 return 0;
3841 /* If we succeeded, delete the cc0 setter. */
3842 PUT_CODE (insn, NOTE);
3843 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED;
3844 NOTE_SOURCE_FILE (insn) = 0;
3846 return 1;
3848 #endif
3850 /* Perform constant and copy propagation on INSN.
3851 The result is non-zero if a change was made. */
3853 static int
3854 cprop_insn (insn, alter_jumps)
3855 rtx insn;
3856 int alter_jumps;
3858 struct reg_use *reg_used;
3859 int changed = 0;
3860 rtx note;
3862 /* Only propagate into SETs. Note that a conditional jump is a
3863 SET with pc_rtx as the destination. */
3864 if (GET_CODE (insn) != INSN && GET_CODE (insn) != JUMP_INSN)
3865 return 0;
3867 reg_use_count = 0;
3868 find_used_regs (PATTERN (insn));
3870 note = find_reg_equal_equiv_note (insn);
3872 /* We may win even when propagating constants into notes. */
3873 if (note)
3874 find_used_regs (XEXP (note, 0));
3876 for (reg_used = &reg_use_table[0]; reg_use_count > 0;
3877 reg_used++, reg_use_count--)
3879 unsigned int regno = REGNO (reg_used->reg_rtx);
3880 rtx pat, src;
3881 struct expr *set;
3883 /* Ignore registers created by GCSE.
3884 We do this because ... */
3885 if (regno >= max_gcse_regno)
3886 continue;
3888 /* If the register has already been set in this block, there's
3889 nothing we can do. */
3890 if (! oprs_not_set_p (reg_used->reg_rtx, insn))
3891 continue;
3893 /* Find an assignment that sets reg_used and is available
3894 at the start of the block. */
3895 set = find_avail_set (regno, insn);
3896 if (! set)
3897 continue;
3899 pat = set->expr;
3900 /* ??? We might be able to handle PARALLELs. Later. */
3901 if (GET_CODE (pat) != SET)
3902 abort ();
3904 src = SET_SRC (pat);
3906 /* Constant propagation. */
3907 if (GET_CODE (src) == CONST_INT || GET_CODE (src) == CONST_DOUBLE
3908 || GET_CODE (src) == SYMBOL_REF)
3910 /* Handle normal insns first. */
3911 if (GET_CODE (insn) == INSN
3912 && try_replace_reg (reg_used->reg_rtx, src, insn))
3914 changed = 1;
3915 const_prop_count++;
3916 if (gcse_file != NULL)
3918 fprintf (gcse_file, "CONST-PROP: Replacing reg %d in ",
3919 regno);
3920 fprintf (gcse_file, "insn %d with constant ",
3921 INSN_UID (insn));
3922 print_rtl (gcse_file, src);
3923 fprintf (gcse_file, "\n");
3926 /* The original insn setting reg_used may or may not now be
3927 deletable. We leave the deletion to flow. */
3930 /* Try to propagate a CONST_INT into a conditional jump.
3931 We're pretty specific about what we will handle in this
3932 code, we can extend this as necessary over time.
3934 Right now the insn in question must look like
3935 (set (pc) (if_then_else ...)) */
3936 else if (alter_jumps
3937 && GET_CODE (insn) == JUMP_INSN
3938 && condjump_p (insn)
3939 && ! simplejump_p (insn))
3940 changed |= cprop_jump (insn, reg_used->reg_rtx, src);
3942 #ifdef HAVE_cc0
3943 /* Similar code for machines that use a pair of CC0 setter and
3944 conditional jump insn. */
3945 else if (alter_jumps
3946 && GET_CODE (PATTERN (insn)) == SET
3947 && SET_DEST (PATTERN (insn)) == cc0_rtx
3948 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
3949 && condjump_p (NEXT_INSN (insn))
3950 && ! simplejump_p (NEXT_INSN (insn))
3951 && cprop_cc0_jump (insn, reg_used, src))
3953 changed = 1;
3954 break;
3956 #endif
3958 else if (GET_CODE (src) == REG
3959 && REGNO (src) >= FIRST_PSEUDO_REGISTER
3960 && REGNO (src) != regno)
3962 if (try_replace_reg (reg_used->reg_rtx, src, insn))
3964 changed = 1;
3965 copy_prop_count++;
3966 if (gcse_file != NULL)
3968 fprintf (gcse_file, "COPY-PROP: Replacing reg %d in insn %d",
3969 regno, INSN_UID (insn));
3970 fprintf (gcse_file, " with reg %d\n", REGNO (src));
3973 /* The original insn setting reg_used may or may not now be
3974 deletable. We leave the deletion to flow. */
3975 /* FIXME: If it turns out that the insn isn't deletable,
3976 then we may have unnecessarily extended register lifetimes
3977 and made things worse. */
3982 return changed;
3985 /* Forward propagate copies. This includes copies and constants. Return
3986 non-zero if a change was made. */
3988 static int
3989 cprop (alter_jumps)
3990 int alter_jumps;
3992 int bb, changed;
3993 rtx insn;
3995 /* Note we start at block 1. */
3997 changed = 0;
3998 for (bb = 1; bb < n_basic_blocks; bb++)
4000 /* Reset tables used to keep track of what's still valid [since the
4001 start of the block]. */
4002 reset_opr_set_tables ();
4004 for (insn = BLOCK_HEAD (bb);
4005 insn != NULL && insn != NEXT_INSN (BLOCK_END (bb));
4006 insn = NEXT_INSN (insn))
4007 if (INSN_P (insn))
4009 changed |= cprop_insn (insn, alter_jumps);
4011 /* Keep track of everything modified by this insn. */
4012 /* ??? Need to be careful w.r.t. mods done to INSN. Don't
4013 call mark_oprs_set if we turned the insn into a NOTE. */
4014 if (GET_CODE (insn) != NOTE)
4015 mark_oprs_set (insn);
4019 if (gcse_file != NULL)
4020 fprintf (gcse_file, "\n");
4022 return changed;
4025 /* Perform one copy/constant propagation pass.
4026 F is the first insn in the function.
4027 PASS is the pass count. */
4029 static int
4030 one_cprop_pass (pass, alter_jumps)
4031 int pass;
4032 int alter_jumps;
4034 int changed = 0;
4036 const_prop_count = 0;
4037 copy_prop_count = 0;
4039 alloc_set_hash_table (max_cuid);
4040 compute_set_hash_table ();
4041 if (gcse_file)
4042 dump_hash_table (gcse_file, "SET", set_hash_table, set_hash_table_size,
4043 n_sets);
4044 if (n_sets > 0)
4046 alloc_cprop_mem (n_basic_blocks, n_sets);
4047 compute_cprop_data ();
4048 changed = cprop (alter_jumps);
4049 free_cprop_mem ();
4052 free_set_hash_table ();
4054 if (gcse_file)
4056 fprintf (gcse_file, "CPROP of %s, pass %d: %d bytes needed, ",
4057 current_function_name, pass, bytes_used);
4058 fprintf (gcse_file, "%d const props, %d copy props\n\n",
4059 const_prop_count, copy_prop_count);
4062 return changed;
4065 /* Compute PRE+LCM working variables. */
4067 /* Local properties of expressions. */
4068 /* Nonzero for expressions that are transparent in the block. */
4069 static sbitmap *transp;
4071 /* Nonzero for expressions that are transparent at the end of the block.
4072 This is only zero for expressions killed by abnormal critical edge
4073 created by a calls. */
4074 static sbitmap *transpout;
4076 /* Nonzero for expressions that are computed (available) in the block. */
4077 static sbitmap *comp;
4079 /* Nonzero for expressions that are locally anticipatable in the block. */
4080 static sbitmap *antloc;
4082 /* Nonzero for expressions where this block is an optimal computation
4083 point. */
4084 static sbitmap *pre_optimal;
4086 /* Nonzero for expressions which are redundant in a particular block. */
4087 static sbitmap *pre_redundant;
4089 /* Nonzero for expressions which should be inserted on a specific edge. */
4090 static sbitmap *pre_insert_map;
4092 /* Nonzero for expressions which should be deleted in a specific block. */
4093 static sbitmap *pre_delete_map;
4095 /* Contains the edge_list returned by pre_edge_lcm. */
4096 static struct edge_list *edge_list;
4098 /* Redundant insns. */
4099 static sbitmap pre_redundant_insns;
4101 /* Allocate vars used for PRE analysis. */
4103 static void
4104 alloc_pre_mem (n_blocks, n_exprs)
4105 int n_blocks, n_exprs;
4107 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
4108 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
4109 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
4111 pre_optimal = NULL;
4112 pre_redundant = NULL;
4113 pre_insert_map = NULL;
4114 pre_delete_map = NULL;
4115 ae_in = NULL;
4116 ae_out = NULL;
4117 ae_kill = sbitmap_vector_alloc (n_blocks, n_exprs);
4119 /* pre_insert and pre_delete are allocated later. */
4122 /* Free vars used for PRE analysis. */
4124 static void
4125 free_pre_mem ()
4127 free (transp);
4128 free (comp);
4130 /* ANTLOC and AE_KILL are freed just after pre_lcm finishes. */
4132 if (pre_optimal)
4133 free (pre_optimal);
4134 if (pre_redundant)
4135 free (pre_redundant);
4136 if (pre_insert_map)
4137 free (pre_insert_map);
4138 if (pre_delete_map)
4139 free (pre_delete_map);
4141 if (ae_in)
4142 free (ae_in);
4143 if (ae_out)
4144 free (ae_out);
4146 transp = comp = NULL;
4147 pre_optimal = pre_redundant = pre_insert_map = pre_delete_map = NULL;
4148 ae_in = ae_out = NULL;
4151 /* Top level routine to do the dataflow analysis needed by PRE. */
4153 static void
4154 compute_pre_data ()
4156 sbitmap trapping_expr;
4157 int i;
4158 unsigned int ui;
4160 compute_local_properties (transp, comp, antloc, 0);
4161 sbitmap_vector_zero (ae_kill, n_basic_blocks);
4163 /* Collect expressions which might trap. */
4164 trapping_expr = sbitmap_alloc (n_exprs);
4165 sbitmap_zero (trapping_expr);
4166 for (ui = 0; ui < expr_hash_table_size; ui++)
4168 struct expr *e;
4169 for (e = expr_hash_table[ui]; e != NULL; e = e->next_same_hash)
4170 if (may_trap_p (e->expr))
4171 SET_BIT (trapping_expr, e->bitmap_index);
4174 /* Compute ae_kill for each basic block using:
4176 ~(TRANSP | COMP)
4178 This is significantly faster than compute_ae_kill. */
4180 for (i = 0; i < n_basic_blocks; i++)
4182 edge e;
4184 /* If the current block is the destination of an abnormal edge, we
4185 kill all trapping expressions because we won't be able to properly
4186 place the instruction on the edge. So make them neither
4187 anticipatable nor transparent. This is fairly conservative. */
4188 for (e = BASIC_BLOCK (i)->pred; e ; e = e->pred_next)
4189 if (e->flags & EDGE_ABNORMAL)
4191 sbitmap_difference (antloc[i], antloc[i], trapping_expr);
4192 sbitmap_difference (transp[i], transp[i], trapping_expr);
4193 break;
4196 sbitmap_a_or_b (ae_kill[i], transp[i], comp[i]);
4197 sbitmap_not (ae_kill[i], ae_kill[i]);
4200 edge_list = pre_edge_lcm (gcse_file, n_exprs, transp, comp, antloc,
4201 ae_kill, &pre_insert_map, &pre_delete_map);
4202 free (antloc);
4203 antloc = NULL;
4204 free (ae_kill);
4205 ae_kill = NULL;
4206 free (trapping_expr);
4209 /* PRE utilities */
4211 /* Return non-zero if an occurrence of expression EXPR in OCCR_BB would reach
4212 block BB.
4214 VISITED is a pointer to a working buffer for tracking which BB's have
4215 been visited. It is NULL for the top-level call.
4217 We treat reaching expressions that go through blocks containing the same
4218 reaching expression as "not reaching". E.g. if EXPR is generated in blocks
4219 2 and 3, INSN is in block 4, and 2->3->4, we treat the expression in block
4220 2 as not reaching. The intent is to improve the probability of finding
4221 only one reaching expression and to reduce register lifetimes by picking
4222 the closest such expression. */
4224 static int
4225 pre_expr_reaches_here_p_work (occr_bb, expr, bb, visited)
4226 int occr_bb;
4227 struct expr *expr;
4228 int bb;
4229 char *visited;
4231 edge pred;
4233 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
4235 int pred_bb = pred->src->index;
4237 if (pred->src == ENTRY_BLOCK_PTR
4238 /* Has predecessor has already been visited? */
4239 || visited[pred_bb])
4240 ;/* Nothing to do. */
4242 /* Does this predecessor generate this expression? */
4243 else if (TEST_BIT (comp[pred_bb], expr->bitmap_index))
4245 /* Is this the occurrence we're looking for?
4246 Note that there's only one generating occurrence per block
4247 so we just need to check the block number. */
4248 if (occr_bb == pred_bb)
4249 return 1;
4251 visited[pred_bb] = 1;
4253 /* Ignore this predecessor if it kills the expression. */
4254 else if (! TEST_BIT (transp[pred_bb], expr->bitmap_index))
4255 visited[pred_bb] = 1;
4257 /* Neither gen nor kill. */
4258 else
4260 visited[pred_bb] = 1;
4261 if (pre_expr_reaches_here_p_work (occr_bb, expr, pred_bb, visited))
4262 return 1;
4266 /* All paths have been checked. */
4267 return 0;
4270 /* The wrapper for pre_expr_reaches_here_work that ensures that any
4271 memory allocated for that function is returned. */
4273 static int
4274 pre_expr_reaches_here_p (occr_bb, expr, bb)
4275 int occr_bb;
4276 struct expr *expr;
4277 int bb;
4279 int rval;
4280 char *visited = (char *) xcalloc (n_basic_blocks, 1);
4282 rval = pre_expr_reaches_here_p_work(occr_bb, expr, bb, visited);
4284 free (visited);
4285 return rval;
4289 /* Given an expr, generate RTL which we can insert at the end of a BB,
4290 or on an edge. Set the block number of any insns generated to
4291 the value of BB. */
4293 static rtx
4294 process_insert_insn (expr)
4295 struct expr *expr;
4297 rtx reg = expr->reaching_reg;
4298 rtx exp = copy_rtx (expr->expr);
4299 rtx pat;
4301 start_sequence ();
4303 /* If the expression is something that's an operand, like a constant,
4304 just copy it to a register. */
4305 if (general_operand (exp, GET_MODE (reg)))
4306 emit_move_insn (reg, exp);
4308 /* Otherwise, make a new insn to compute this expression and make sure the
4309 insn will be recognized (this also adds any needed CLOBBERs). Copy the
4310 expression to make sure we don't have any sharing issues. */
4311 else if (insn_invalid_p (emit_insn (gen_rtx_SET (VOIDmode, reg, exp))))
4312 abort ();
4314 pat = gen_sequence ();
4315 end_sequence ();
4317 return pat;
4320 /* Add EXPR to the end of basic block BB.
4322 This is used by both the PRE and code hoisting.
4324 For PRE, we want to verify that the expr is either transparent
4325 or locally anticipatable in the target block. This check makes
4326 no sense for code hoisting. */
4328 static void
4329 insert_insn_end_bb (expr, bb, pre)
4330 struct expr *expr;
4331 int bb;
4332 int pre;
4334 rtx insn = BLOCK_END (bb);
4335 rtx new_insn;
4336 rtx reg = expr->reaching_reg;
4337 int regno = REGNO (reg);
4338 rtx pat;
4339 int i;
4341 pat = process_insert_insn (expr);
4343 /* If the last insn is a jump, insert EXPR in front [taking care to
4344 handle cc0, etc. properly]. */
4346 if (GET_CODE (insn) == JUMP_INSN)
4348 #ifdef HAVE_cc0
4349 rtx note;
4350 #endif
4352 /* If this is a jump table, then we can't insert stuff here. Since
4353 we know the previous real insn must be the tablejump, we insert
4354 the new instruction just before the tablejump. */
4355 if (GET_CODE (PATTERN (insn)) == ADDR_VEC
4356 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC)
4357 insn = prev_real_insn (insn);
4359 #ifdef HAVE_cc0
4360 /* FIXME: 'twould be nice to call prev_cc0_setter here but it aborts
4361 if cc0 isn't set. */
4362 note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
4363 if (note)
4364 insn = XEXP (note, 0);
4365 else
4367 rtx maybe_cc0_setter = prev_nonnote_insn (insn);
4368 if (maybe_cc0_setter
4369 && INSN_P (maybe_cc0_setter)
4370 && sets_cc0_p (PATTERN (maybe_cc0_setter)))
4371 insn = maybe_cc0_setter;
4373 #endif
4374 /* FIXME: What if something in cc0/jump uses value set in new insn? */
4375 new_insn = emit_block_insn_before (pat, insn, BASIC_BLOCK (bb));
4378 /* Likewise if the last insn is a call, as will happen in the presence
4379 of exception handling. */
4380 else if (GET_CODE (insn) == CALL_INSN)
4382 HARD_REG_SET parm_regs;
4383 int nparm_regs;
4384 rtx p;
4386 /* Keeping in mind SMALL_REGISTER_CLASSES and parameters in registers,
4387 we search backward and place the instructions before the first
4388 parameter is loaded. Do this for everyone for consistency and a
4389 presumtion that we'll get better code elsewhere as well.
4391 It should always be the case that we can put these instructions
4392 anywhere in the basic block with performing PRE optimizations.
4393 Check this. */
4395 if (pre
4396 && !TEST_BIT (antloc[bb], expr->bitmap_index)
4397 && !TEST_BIT (transp[bb], expr->bitmap_index))
4398 abort ();
4400 /* Since different machines initialize their parameter registers
4401 in different orders, assume nothing. Collect the set of all
4402 parameter registers. */
4403 CLEAR_HARD_REG_SET (parm_regs);
4404 nparm_regs = 0;
4405 for (p = CALL_INSN_FUNCTION_USAGE (insn); p ; p = XEXP (p, 1))
4406 if (GET_CODE (XEXP (p, 0)) == USE
4407 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
4409 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
4410 abort ();
4412 SET_HARD_REG_BIT (parm_regs, REGNO (XEXP (XEXP (p, 0), 0)));
4413 nparm_regs++;
4416 /* Search backward for the first set of a register in this set. */
4417 while (nparm_regs && BLOCK_HEAD (bb) != insn)
4419 insn = PREV_INSN (insn);
4420 p = single_set (insn);
4421 if (p && GET_CODE (SET_DEST (p)) == REG
4422 && REGNO (SET_DEST (p)) < FIRST_PSEUDO_REGISTER
4423 && TEST_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p))))
4425 CLEAR_HARD_REG_BIT (parm_regs, REGNO (SET_DEST (p)));
4426 nparm_regs--;
4430 /* If we found all the parameter loads, then we want to insert
4431 before the first parameter load.
4433 If we did not find all the parameter loads, then we might have
4434 stopped on the head of the block, which could be a CODE_LABEL.
4435 If we inserted before the CODE_LABEL, then we would be putting
4436 the insn in the wrong basic block. In that case, put the insn
4437 after the CODE_LABEL. Also, respect NOTE_INSN_BASIC_BLOCK. */
4438 while (GET_CODE (insn) == CODE_LABEL
4439 || NOTE_INSN_BASIC_BLOCK_P (insn))
4440 insn = NEXT_INSN (insn);
4442 new_insn = emit_block_insn_before (pat, insn, BASIC_BLOCK (bb));
4444 else
4446 new_insn = emit_insn_after (pat, insn);
4447 BLOCK_END (bb) = new_insn;
4450 /* Keep block number table up to date.
4451 Note, PAT could be a multiple insn sequence, we have to make
4452 sure that each insn in the sequence is handled. */
4453 if (GET_CODE (pat) == SEQUENCE)
4455 for (i = 0; i < XVECLEN (pat, 0); i++)
4457 rtx insn = XVECEXP (pat, 0, i);
4459 set_block_num (insn, bb);
4460 if (INSN_P (insn))
4461 add_label_notes (PATTERN (insn), new_insn);
4463 note_stores (PATTERN (insn), record_set_info, insn);
4466 else
4468 add_label_notes (SET_SRC (pat), new_insn);
4469 set_block_num (new_insn, bb);
4471 /* Keep register set table up to date. */
4472 record_one_set (regno, new_insn);
4475 gcse_create_count++;
4477 if (gcse_file)
4479 fprintf (gcse_file, "PRE/HOIST: end of bb %d, insn %d, ",
4480 bb, INSN_UID (new_insn));
4481 fprintf (gcse_file, "copying expression %d to reg %d\n",
4482 expr->bitmap_index, regno);
4486 /* Insert partially redundant expressions on edges in the CFG to make
4487 the expressions fully redundant. */
4489 static int
4490 pre_edge_insert (edge_list, index_map)
4491 struct edge_list *edge_list;
4492 struct expr **index_map;
4494 int e, i, j, num_edges, set_size, did_insert = 0;
4495 sbitmap *inserted;
4497 /* Where PRE_INSERT_MAP is nonzero, we add the expression on that edge
4498 if it reaches any of the deleted expressions. */
4500 set_size = pre_insert_map[0]->size;
4501 num_edges = NUM_EDGES (edge_list);
4502 inserted = sbitmap_vector_alloc (num_edges, n_exprs);
4503 sbitmap_vector_zero (inserted, num_edges);
4505 for (e = 0; e < num_edges; e++)
4507 int indx;
4508 basic_block pred = INDEX_EDGE_PRED_BB (edge_list, e);
4509 int bb = pred->index;
4511 for (i = indx = 0; i < set_size; i++, indx += SBITMAP_ELT_BITS)
4513 SBITMAP_ELT_TYPE insert = pre_insert_map[e]->elms[i];
4515 for (j = indx; insert && j < n_exprs; j++, insert >>= 1)
4516 if ((insert & 1) != 0 && index_map[j]->reaching_reg != NULL_RTX)
4518 struct expr *expr = index_map[j];
4519 struct occr *occr;
4521 /* Now look at each deleted occurence of this expression. */
4522 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4524 if (! occr->deleted_p)
4525 continue;
4527 /* Insert this expression on this edge if if it would
4528 reach the deleted occurence in BB. */
4529 if (!TEST_BIT (inserted[e], j))
4531 rtx insn;
4532 edge eg = INDEX_EDGE (edge_list, e);
4534 /* We can't insert anything on an abnormal and
4535 critical edge, so we insert the insn at the end of
4536 the previous block. There are several alternatives
4537 detailed in Morgans book P277 (sec 10.5) for
4538 handling this situation. This one is easiest for
4539 now. */
4541 if ((eg->flags & EDGE_ABNORMAL) == EDGE_ABNORMAL)
4542 insert_insn_end_bb (index_map[j], bb, 0);
4543 else
4545 insn = process_insert_insn (index_map[j]);
4546 insert_insn_on_edge (insn, eg);
4549 if (gcse_file)
4551 fprintf (gcse_file, "PRE/HOIST: edge (%d,%d), ",
4553 INDEX_EDGE_SUCC_BB (edge_list, e)->index);
4554 fprintf (gcse_file, "copy expression %d\n",
4555 expr->bitmap_index);
4558 SET_BIT (inserted[e], j);
4559 did_insert = 1;
4560 gcse_create_count++;
4567 free (inserted);
4568 return did_insert;
4571 /* Copy the result of INSN to REG. INDX is the expression number. */
4573 static void
4574 pre_insert_copy_insn (expr, insn)
4575 struct expr *expr;
4576 rtx insn;
4578 rtx reg = expr->reaching_reg;
4579 int regno = REGNO (reg);
4580 int indx = expr->bitmap_index;
4581 rtx set = single_set (insn);
4582 rtx new_insn;
4583 int bb = BLOCK_NUM (insn);
4585 if (!set)
4586 abort ();
4588 new_insn = emit_insn_after (gen_rtx_SET (VOIDmode, reg, SET_DEST (set)),
4589 insn);
4591 /* Keep block number table up to date. */
4592 set_block_num (new_insn, bb);
4594 /* Keep register set table up to date. */
4595 record_one_set (regno, new_insn);
4596 if (insn == BLOCK_END (bb))
4597 BLOCK_END (bb) = new_insn;
4599 gcse_create_count++;
4601 if (gcse_file)
4602 fprintf (gcse_file,
4603 "PRE: bb %d, insn %d, copy expression %d in insn %d to reg %d\n",
4604 BLOCK_NUM (insn), INSN_UID (new_insn), indx,
4605 INSN_UID (insn), regno);
4608 /* Copy available expressions that reach the redundant expression
4609 to `reaching_reg'. */
4611 static void
4612 pre_insert_copies ()
4614 unsigned int i;
4615 struct expr *expr;
4616 struct occr *occr;
4617 struct occr *avail;
4619 /* For each available expression in the table, copy the result to
4620 `reaching_reg' if the expression reaches a deleted one.
4622 ??? The current algorithm is rather brute force.
4623 Need to do some profiling. */
4625 for (i = 0; i < expr_hash_table_size; i++)
4626 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4628 /* If the basic block isn't reachable, PPOUT will be TRUE. However,
4629 we don't want to insert a copy here because the expression may not
4630 really be redundant. So only insert an insn if the expression was
4631 deleted. This test also avoids further processing if the
4632 expression wasn't deleted anywhere. */
4633 if (expr->reaching_reg == NULL)
4634 continue;
4636 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4638 if (! occr->deleted_p)
4639 continue;
4641 for (avail = expr->avail_occr; avail != NULL; avail = avail->next)
4643 rtx insn = avail->insn;
4645 /* No need to handle this one if handled already. */
4646 if (avail->copied_p)
4647 continue;
4649 /* Don't handle this one if it's a redundant one. */
4650 if (TEST_BIT (pre_redundant_insns, INSN_CUID (insn)))
4651 continue;
4653 /* Or if the expression doesn't reach the deleted one. */
4654 if (! pre_expr_reaches_here_p (BLOCK_NUM (avail->insn), expr,
4655 BLOCK_NUM (occr->insn)))
4656 continue;
4658 /* Copy the result of avail to reaching_reg. */
4659 pre_insert_copy_insn (expr, insn);
4660 avail->copied_p = 1;
4666 /* Delete redundant computations.
4667 Deletion is done by changing the insn to copy the `reaching_reg' of
4668 the expression into the result of the SET. It is left to later passes
4669 (cprop, cse2, flow, combine, regmove) to propagate the copy or eliminate it.
4671 Returns non-zero if a change is made. */
4673 static int
4674 pre_delete ()
4676 unsigned int i;
4677 int changed;
4678 struct expr *expr;
4679 struct occr *occr;
4681 changed = 0;
4682 for (i = 0; i < expr_hash_table_size; i++)
4683 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4685 int indx = expr->bitmap_index;
4687 /* We only need to search antic_occr since we require
4688 ANTLOC != 0. */
4690 for (occr = expr->antic_occr; occr != NULL; occr = occr->next)
4692 rtx insn = occr->insn;
4693 rtx set;
4694 int bb = BLOCK_NUM (insn);
4696 if (TEST_BIT (pre_delete_map[bb], indx))
4698 set = single_set (insn);
4699 if (! set)
4700 abort ();
4702 /* Create a pseudo-reg to store the result of reaching
4703 expressions into. Get the mode for the new pseudo from
4704 the mode of the original destination pseudo. */
4705 if (expr->reaching_reg == NULL)
4706 expr->reaching_reg
4707 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
4709 /* In theory this should never fail since we're creating
4710 a reg->reg copy.
4712 However, on the x86 some of the movXX patterns actually
4713 contain clobbers of scratch regs. This may cause the
4714 insn created by validate_change to not match any pattern
4715 and thus cause validate_change to fail. */
4716 if (validate_change (insn, &SET_SRC (set),
4717 expr->reaching_reg, 0))
4719 occr->deleted_p = 1;
4720 SET_BIT (pre_redundant_insns, INSN_CUID (insn));
4721 changed = 1;
4722 gcse_subst_count++;
4725 if (gcse_file)
4727 fprintf (gcse_file,
4728 "PRE: redundant insn %d (expression %d) in ",
4729 INSN_UID (insn), indx);
4730 fprintf (gcse_file, "bb %d, reaching reg is %d\n",
4731 bb, REGNO (expr->reaching_reg));
4737 return changed;
4740 /* Perform GCSE optimizations using PRE.
4741 This is called by one_pre_gcse_pass after all the dataflow analysis
4742 has been done.
4744 This is based on the original Morel-Renvoise paper Fred Chow's thesis, and
4745 lazy code motion from Knoop, Ruthing and Steffen as described in Advanced
4746 Compiler Design and Implementation.
4748 ??? A new pseudo reg is created to hold the reaching expression. The nice
4749 thing about the classical approach is that it would try to use an existing
4750 reg. If the register can't be adequately optimized [i.e. we introduce
4751 reload problems], one could add a pass here to propagate the new register
4752 through the block.
4754 ??? We don't handle single sets in PARALLELs because we're [currently] not
4755 able to copy the rest of the parallel when we insert copies to create full
4756 redundancies from partial redundancies. However, there's no reason why we
4757 can't handle PARALLELs in the cases where there are no partial
4758 redundancies. */
4760 static int
4761 pre_gcse ()
4763 unsigned int i;
4764 int did_insert, changed;
4765 struct expr **index_map;
4766 struct expr *expr;
4768 /* Compute a mapping from expression number (`bitmap_index') to
4769 hash table entry. */
4771 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
4772 for (i = 0; i < expr_hash_table_size; i++)
4773 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
4774 index_map[expr->bitmap_index] = expr;
4776 /* Reset bitmap used to track which insns are redundant. */
4777 pre_redundant_insns = sbitmap_alloc (max_cuid);
4778 sbitmap_zero (pre_redundant_insns);
4780 /* Delete the redundant insns first so that
4781 - we know what register to use for the new insns and for the other
4782 ones with reaching expressions
4783 - we know which insns are redundant when we go to create copies */
4785 changed = pre_delete ();
4787 did_insert = pre_edge_insert (edge_list, index_map);
4789 /* In other places with reaching expressions, copy the expression to the
4790 specially allocated pseudo-reg that reaches the redundant expr. */
4791 pre_insert_copies ();
4792 if (did_insert)
4794 commit_edge_insertions ();
4795 changed = 1;
4798 free (index_map);
4799 free (pre_redundant_insns);
4800 return changed;
4803 /* Top level routine to perform one PRE GCSE pass.
4805 Return non-zero if a change was made. */
4807 static int
4808 one_pre_gcse_pass (pass)
4809 int pass;
4811 int changed = 0;
4813 gcse_subst_count = 0;
4814 gcse_create_count = 0;
4816 alloc_expr_hash_table (max_cuid);
4817 add_noreturn_fake_exit_edges ();
4818 compute_expr_hash_table ();
4819 if (gcse_file)
4820 dump_hash_table (gcse_file, "Expression", expr_hash_table,
4821 expr_hash_table_size, n_exprs);
4823 if (n_exprs > 0)
4825 alloc_pre_mem (n_basic_blocks, n_exprs);
4826 compute_pre_data ();
4827 changed |= pre_gcse ();
4828 free_edge_list (edge_list);
4829 free_pre_mem ();
4832 remove_fake_edges ();
4833 free_expr_hash_table ();
4835 if (gcse_file)
4837 fprintf (gcse_file, "\nPRE GCSE of %s, pass %d: %d bytes needed, ",
4838 current_function_name, pass, bytes_used);
4839 fprintf (gcse_file, "%d substs, %d insns created\n",
4840 gcse_subst_count, gcse_create_count);
4843 return changed;
4846 /* If X contains any LABEL_REF's, add REG_LABEL notes for them to INSN.
4847 If notes are added to an insn which references a CODE_LABEL, the
4848 LABEL_NUSES count is incremented. We have to add REG_LABEL notes,
4849 because the following loop optimization pass requires them. */
4851 /* ??? This is very similar to the loop.c add_label_notes function. We
4852 could probably share code here. */
4854 /* ??? If there was a jump optimization pass after gcse and before loop,
4855 then we would not need to do this here, because jump would add the
4856 necessary REG_LABEL notes. */
4858 static void
4859 add_label_notes (x, insn)
4860 rtx x;
4861 rtx insn;
4863 enum rtx_code code = GET_CODE (x);
4864 int i, j;
4865 const char *fmt;
4867 if (code == LABEL_REF && !LABEL_REF_NONLOCAL_P (x))
4869 /* This code used to ignore labels that referred to dispatch tables to
4870 avoid flow generating (slighly) worse code.
4872 We no longer ignore such label references (see LABEL_REF handling in
4873 mark_jump_label for additional information). */
4875 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_LABEL, XEXP (x, 0),
4876 REG_NOTES (insn));
4877 if (LABEL_P (XEXP (x, 0)))
4878 LABEL_NUSES (XEXP (x, 0))++;
4879 return;
4882 for (i = GET_RTX_LENGTH (code) - 1, fmt = GET_RTX_FORMAT (code); i >= 0; i--)
4884 if (fmt[i] == 'e')
4885 add_label_notes (XEXP (x, i), insn);
4886 else if (fmt[i] == 'E')
4887 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
4888 add_label_notes (XVECEXP (x, i, j), insn);
4892 /* Compute transparent outgoing information for each block.
4894 An expression is transparent to an edge unless it is killed by
4895 the edge itself. This can only happen with abnormal control flow,
4896 when the edge is traversed through a call. This happens with
4897 non-local labels and exceptions.
4899 This would not be necessary if we split the edge. While this is
4900 normally impossible for abnormal critical edges, with some effort
4901 it should be possible with exception handling, since we still have
4902 control over which handler should be invoked. But due to increased
4903 EH table sizes, this may not be worthwhile. */
4905 static void
4906 compute_transpout ()
4908 int bb;
4909 unsigned int i;
4910 struct expr *expr;
4912 sbitmap_vector_ones (transpout, n_basic_blocks);
4914 for (bb = 0; bb < n_basic_blocks; ++bb)
4916 /* Note that flow inserted a nop a the end of basic blocks that
4917 end in call instructions for reasons other than abnormal
4918 control flow. */
4919 if (GET_CODE (BLOCK_END (bb)) != CALL_INSN)
4920 continue;
4922 for (i = 0; i < expr_hash_table_size; i++)
4923 for (expr = expr_hash_table[i]; expr ; expr = expr->next_same_hash)
4924 if (GET_CODE (expr->expr) == MEM)
4926 if (GET_CODE (XEXP (expr->expr, 0)) == SYMBOL_REF
4927 && CONSTANT_POOL_ADDRESS_P (XEXP (expr->expr, 0)))
4928 continue;
4930 /* ??? Optimally, we would use interprocedural alias
4931 analysis to determine if this mem is actually killed
4932 by this call. */
4933 RESET_BIT (transpout[bb], expr->bitmap_index);
4938 /* Removal of useless null pointer checks */
4940 /* Called via note_stores. X is set by SETTER. If X is a register we must
4941 invalidate nonnull_local and set nonnull_killed. DATA is really a
4942 `null_pointer_info *'.
4944 We ignore hard registers. */
4946 static void
4947 invalidate_nonnull_info (x, setter, data)
4948 rtx x;
4949 rtx setter ATTRIBUTE_UNUSED;
4950 void *data;
4952 unsigned int regno;
4953 struct null_pointer_info *npi = (struct null_pointer_info *) data;
4955 while (GET_CODE (x) == SUBREG)
4956 x = SUBREG_REG (x);
4958 /* Ignore anything that is not a register or is a hard register. */
4959 if (GET_CODE (x) != REG
4960 || REGNO (x) < npi->min_reg
4961 || REGNO (x) >= npi->max_reg)
4962 return;
4964 regno = REGNO (x) - npi->min_reg;
4966 RESET_BIT (npi->nonnull_local[npi->current_block], regno);
4967 SET_BIT (npi->nonnull_killed[npi->current_block], regno);
4970 /* Do null-pointer check elimination for the registers indicated in
4971 NPI. NONNULL_AVIN and NONNULL_AVOUT are pre-allocated sbitmaps;
4972 they are not our responsibility to free. */
4974 static void
4975 delete_null_pointer_checks_1 (block_reg, nonnull_avin, nonnull_avout, npi)
4976 unsigned int *block_reg;
4977 sbitmap *nonnull_avin;
4978 sbitmap *nonnull_avout;
4979 struct null_pointer_info *npi;
4981 int bb;
4982 int current_block;
4983 sbitmap *nonnull_local = npi->nonnull_local;
4984 sbitmap *nonnull_killed = npi->nonnull_killed;
4986 /* Compute local properties, nonnull and killed. A register will have
4987 the nonnull property if at the end of the current block its value is
4988 known to be nonnull. The killed property indicates that somewhere in
4989 the block any information we had about the register is killed.
4991 Note that a register can have both properties in a single block. That
4992 indicates that it's killed, then later in the block a new value is
4993 computed. */
4994 sbitmap_vector_zero (nonnull_local, n_basic_blocks);
4995 sbitmap_vector_zero (nonnull_killed, n_basic_blocks);
4997 for (current_block = 0; current_block < n_basic_blocks; current_block++)
4999 rtx insn, stop_insn;
5001 /* Set the current block for invalidate_nonnull_info. */
5002 npi->current_block = current_block;
5004 /* Scan each insn in the basic block looking for memory references and
5005 register sets. */
5006 stop_insn = NEXT_INSN (BLOCK_END (current_block));
5007 for (insn = BLOCK_HEAD (current_block);
5008 insn != stop_insn;
5009 insn = NEXT_INSN (insn))
5011 rtx set;
5012 rtx reg;
5014 /* Ignore anything that is not a normal insn. */
5015 if (! INSN_P (insn))
5016 continue;
5018 /* Basically ignore anything that is not a simple SET. We do have
5019 to make sure to invalidate nonnull_local and set nonnull_killed
5020 for such insns though. */
5021 set = single_set (insn);
5022 if (!set)
5024 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5025 continue;
5028 /* See if we've got a useable memory load. We handle it first
5029 in case it uses its address register as a dest (which kills
5030 the nonnull property). */
5031 if (GET_CODE (SET_SRC (set)) == MEM
5032 && GET_CODE ((reg = XEXP (SET_SRC (set), 0))) == REG
5033 && REGNO (reg) >= npi->min_reg
5034 && REGNO (reg) < npi->max_reg)
5035 SET_BIT (nonnull_local[current_block],
5036 REGNO (reg) - npi->min_reg);
5038 /* Now invalidate stuff clobbered by this insn. */
5039 note_stores (PATTERN (insn), invalidate_nonnull_info, npi);
5041 /* And handle stores, we do these last since any sets in INSN can
5042 not kill the nonnull property if it is derived from a MEM
5043 appearing in a SET_DEST. */
5044 if (GET_CODE (SET_DEST (set)) == MEM
5045 && GET_CODE ((reg = XEXP (SET_DEST (set), 0))) == REG
5046 && REGNO (reg) >= npi->min_reg
5047 && REGNO (reg) < npi->max_reg)
5048 SET_BIT (nonnull_local[current_block],
5049 REGNO (reg) - npi->min_reg);
5053 /* Now compute global properties based on the local properties. This
5054 is a classic global availablity algorithm. */
5055 compute_available (nonnull_local, nonnull_killed,
5056 nonnull_avout, nonnull_avin);
5058 /* Now look at each bb and see if it ends with a compare of a value
5059 against zero. */
5060 for (bb = 0; bb < n_basic_blocks; bb++)
5062 rtx last_insn = BLOCK_END (bb);
5063 rtx condition, earliest;
5064 int compare_and_branch;
5066 /* Since MIN_REG is always at least FIRST_PSEUDO_REGISTER, and
5067 since BLOCK_REG[BB] is zero if this block did not end with a
5068 comparison against zero, this condition works. */
5069 if (block_reg[bb] < npi->min_reg
5070 || block_reg[bb] >= npi->max_reg)
5071 continue;
5073 /* LAST_INSN is a conditional jump. Get its condition. */
5074 condition = get_condition (last_insn, &earliest);
5076 /* If we can't determine the condition then skip. */
5077 if (! condition)
5078 continue;
5080 /* Is the register known to have a nonzero value? */
5081 if (!TEST_BIT (nonnull_avout[bb], block_reg[bb] - npi->min_reg))
5082 continue;
5084 /* Try to compute whether the compare/branch at the loop end is one or
5085 two instructions. */
5086 if (earliest == last_insn)
5087 compare_and_branch = 1;
5088 else if (earliest == prev_nonnote_insn (last_insn))
5089 compare_and_branch = 2;
5090 else
5091 continue;
5093 /* We know the register in this comparison is nonnull at exit from
5094 this block. We can optimize this comparison. */
5095 if (GET_CODE (condition) == NE)
5097 rtx new_jump;
5099 new_jump = emit_jump_insn_before (gen_jump (JUMP_LABEL (last_insn)),
5100 last_insn);
5101 JUMP_LABEL (new_jump) = JUMP_LABEL (last_insn);
5102 LABEL_NUSES (JUMP_LABEL (new_jump))++;
5103 emit_barrier_after (new_jump);
5105 delete_insn (last_insn);
5106 if (compare_and_branch == 2)
5107 delete_insn (earliest);
5109 /* Don't check this block again. (Note that BLOCK_END is
5110 invalid here; we deleted the last instruction in the
5111 block.) */
5112 block_reg[bb] = 0;
5116 /* Find EQ/NE comparisons against zero which can be (indirectly) evaluated
5117 at compile time.
5119 This is conceptually similar to global constant/copy propagation and
5120 classic global CSE (it even uses the same dataflow equations as cprop).
5122 If a register is used as memory address with the form (mem (reg)), then we
5123 know that REG can not be zero at that point in the program. Any instruction
5124 which sets REG "kills" this property.
5126 So, if every path leading to a conditional branch has an available memory
5127 reference of that form, then we know the register can not have the value
5128 zero at the conditional branch.
5130 So we merely need to compute the local properies and propagate that data
5131 around the cfg, then optimize where possible.
5133 We run this pass two times. Once before CSE, then again after CSE. This
5134 has proven to be the most profitable approach. It is rare for new
5135 optimization opportunities of this nature to appear after the first CSE
5136 pass.
5138 This could probably be integrated with global cprop with a little work. */
5140 void
5141 delete_null_pointer_checks (f)
5142 rtx f ATTRIBUTE_UNUSED;
5144 sbitmap *nonnull_avin, *nonnull_avout;
5145 unsigned int *block_reg;
5146 int bb;
5147 int reg;
5148 int regs_per_pass;
5149 int max_reg;
5150 struct null_pointer_info npi;
5152 /* If we have only a single block, then there's nothing to do. */
5153 if (n_basic_blocks <= 1)
5154 return;
5156 /* Trying to perform global optimizations on flow graphs which have
5157 a high connectivity will take a long time and is unlikely to be
5158 particularly useful.
5160 In normal circumstances a cfg should have about twice has many edges
5161 as blocks. But we do not want to punish small functions which have
5162 a couple switch statements. So we require a relatively large number
5163 of basic blocks and the ratio of edges to blocks to be high. */
5164 if (n_basic_blocks > 1000 && n_edges / n_basic_blocks >= 20)
5165 return;
5167 /* We need four bitmaps, each with a bit for each register in each
5168 basic block. */
5169 max_reg = max_reg_num ();
5170 regs_per_pass = get_bitmap_width (4, n_basic_blocks, max_reg);
5172 /* Allocate bitmaps to hold local and global properties. */
5173 npi.nonnull_local = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5174 npi.nonnull_killed = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5175 nonnull_avin = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5176 nonnull_avout = sbitmap_vector_alloc (n_basic_blocks, regs_per_pass);
5178 /* Go through the basic blocks, seeing whether or not each block
5179 ends with a conditional branch whose condition is a comparison
5180 against zero. Record the register compared in BLOCK_REG. */
5181 block_reg = (unsigned int *) xcalloc (n_basic_blocks, sizeof (int));
5182 for (bb = 0; bb < n_basic_blocks; bb++)
5184 rtx last_insn = BLOCK_END (bb);
5185 rtx condition, earliest, reg;
5187 /* We only want conditional branches. */
5188 if (GET_CODE (last_insn) != JUMP_INSN
5189 || !any_condjump_p (last_insn)
5190 || !onlyjump_p (last_insn))
5191 continue;
5193 /* LAST_INSN is a conditional jump. Get its condition. */
5194 condition = get_condition (last_insn, &earliest);
5196 /* If we were unable to get the condition, or it is not a equality
5197 comparison against zero then there's nothing we can do. */
5198 if (!condition
5199 || (GET_CODE (condition) != NE && GET_CODE (condition) != EQ)
5200 || GET_CODE (XEXP (condition, 1)) != CONST_INT
5201 || (XEXP (condition, 1)
5202 != CONST0_RTX (GET_MODE (XEXP (condition, 0)))))
5203 continue;
5205 /* We must be checking a register against zero. */
5206 reg = XEXP (condition, 0);
5207 if (GET_CODE (reg) != REG)
5208 continue;
5210 block_reg[bb] = REGNO (reg);
5213 /* Go through the algorithm for each block of registers. */
5214 for (reg = FIRST_PSEUDO_REGISTER; reg < max_reg; reg += regs_per_pass)
5216 npi.min_reg = reg;
5217 npi.max_reg = MIN (reg + regs_per_pass, max_reg);
5218 delete_null_pointer_checks_1 (block_reg, nonnull_avin,
5219 nonnull_avout, &npi);
5222 /* Free the table of registers compared at the end of every block. */
5223 free (block_reg);
5225 /* Free bitmaps. */
5226 free (npi.nonnull_local);
5227 free (npi.nonnull_killed);
5228 free (nonnull_avin);
5229 free (nonnull_avout);
5232 /* Code Hoisting variables and subroutines. */
5234 /* Very busy expressions. */
5235 static sbitmap *hoist_vbein;
5236 static sbitmap *hoist_vbeout;
5238 /* Hoistable expressions. */
5239 static sbitmap *hoist_exprs;
5241 /* Dominator bitmaps. */
5242 static sbitmap *dominators;
5244 /* ??? We could compute post dominators and run this algorithm in
5245 reverse to to perform tail merging, doing so would probably be
5246 more effective than the tail merging code in jump.c.
5248 It's unclear if tail merging could be run in parallel with
5249 code hoisting. It would be nice. */
5251 /* Allocate vars used for code hoisting analysis. */
5253 static void
5254 alloc_code_hoist_mem (n_blocks, n_exprs)
5255 int n_blocks, n_exprs;
5257 antloc = sbitmap_vector_alloc (n_blocks, n_exprs);
5258 transp = sbitmap_vector_alloc (n_blocks, n_exprs);
5259 comp = sbitmap_vector_alloc (n_blocks, n_exprs);
5261 hoist_vbein = sbitmap_vector_alloc (n_blocks, n_exprs);
5262 hoist_vbeout = sbitmap_vector_alloc (n_blocks, n_exprs);
5263 hoist_exprs = sbitmap_vector_alloc (n_blocks, n_exprs);
5264 transpout = sbitmap_vector_alloc (n_blocks, n_exprs);
5266 dominators = sbitmap_vector_alloc (n_blocks, n_blocks);
5269 /* Free vars used for code hoisting analysis. */
5271 static void
5272 free_code_hoist_mem ()
5274 free (antloc);
5275 free (transp);
5276 free (comp);
5278 free (hoist_vbein);
5279 free (hoist_vbeout);
5280 free (hoist_exprs);
5281 free (transpout);
5283 free (dominators);
5286 /* Compute the very busy expressions at entry/exit from each block.
5288 An expression is very busy if all paths from a given point
5289 compute the expression. */
5291 static void
5292 compute_code_hoist_vbeinout ()
5294 int bb, changed, passes;
5296 sbitmap_vector_zero (hoist_vbeout, n_basic_blocks);
5297 sbitmap_vector_zero (hoist_vbein, n_basic_blocks);
5299 passes = 0;
5300 changed = 1;
5302 while (changed)
5304 changed = 0;
5306 /* We scan the blocks in the reverse order to speed up
5307 the convergence. */
5308 for (bb = n_basic_blocks - 1; bb >= 0; bb--)
5310 changed |= sbitmap_a_or_b_and_c (hoist_vbein[bb], antloc[bb],
5311 hoist_vbeout[bb], transp[bb]);
5312 if (bb != n_basic_blocks - 1)
5313 sbitmap_intersection_of_succs (hoist_vbeout[bb], hoist_vbein, bb);
5316 passes++;
5319 if (gcse_file)
5320 fprintf (gcse_file, "hoisting vbeinout computation: %d passes\n", passes);
5323 /* Top level routine to do the dataflow analysis needed by code hoisting. */
5325 static void
5326 compute_code_hoist_data ()
5328 compute_local_properties (transp, comp, antloc, 0);
5329 compute_transpout ();
5330 compute_code_hoist_vbeinout ();
5331 calculate_dominance_info (NULL, dominators, CDI_DOMINATORS);
5332 if (gcse_file)
5333 fprintf (gcse_file, "\n");
5336 /* Determine if the expression identified by EXPR_INDEX would
5337 reach BB unimpared if it was placed at the end of EXPR_BB.
5339 It's unclear exactly what Muchnick meant by "unimpared". It seems
5340 to me that the expression must either be computed or transparent in
5341 *every* block in the path(s) from EXPR_BB to BB. Any other definition
5342 would allow the expression to be hoisted out of loops, even if
5343 the expression wasn't a loop invariant.
5345 Contrast this to reachability for PRE where an expression is
5346 considered reachable if *any* path reaches instead of *all*
5347 paths. */
5349 static int
5350 hoist_expr_reaches_here_p (expr_bb, expr_index, bb, visited)
5351 int expr_bb;
5352 int expr_index;
5353 int bb;
5354 char *visited;
5356 edge pred;
5357 int visited_allocated_locally = 0;
5360 if (visited == NULL)
5362 visited_allocated_locally = 1;
5363 visited = xcalloc (n_basic_blocks, 1);
5366 for (pred = BASIC_BLOCK (bb)->pred; pred != NULL; pred = pred->pred_next)
5368 int pred_bb = pred->src->index;
5370 if (pred->src == ENTRY_BLOCK_PTR)
5371 break;
5372 else if (visited[pred_bb])
5373 continue;
5375 /* Does this predecessor generate this expression? */
5376 else if (TEST_BIT (comp[pred_bb], expr_index))
5377 break;
5378 else if (! TEST_BIT (transp[pred_bb], expr_index))
5379 break;
5381 /* Not killed. */
5382 else
5384 visited[pred_bb] = 1;
5385 if (! hoist_expr_reaches_here_p (expr_bb, expr_index,
5386 pred_bb, visited))
5387 break;
5390 if (visited_allocated_locally)
5391 free (visited);
5393 return (pred == NULL);
5396 /* Actually perform code hoisting. */
5398 static void
5399 hoist_code ()
5401 int bb, dominated;
5402 unsigned int i;
5403 struct expr **index_map;
5404 struct expr *expr;
5406 sbitmap_vector_zero (hoist_exprs, n_basic_blocks);
5408 /* Compute a mapping from expression number (`bitmap_index') to
5409 hash table entry. */
5411 index_map = (struct expr **) xcalloc (n_exprs, sizeof (struct expr *));
5412 for (i = 0; i < expr_hash_table_size; i++)
5413 for (expr = expr_hash_table[i]; expr != NULL; expr = expr->next_same_hash)
5414 index_map[expr->bitmap_index] = expr;
5416 /* Walk over each basic block looking for potentially hoistable
5417 expressions, nothing gets hoisted from the entry block. */
5418 for (bb = 0; bb < n_basic_blocks; bb++)
5420 int found = 0;
5421 int insn_inserted_p;
5423 /* Examine each expression that is very busy at the exit of this
5424 block. These are the potentially hoistable expressions. */
5425 for (i = 0; i < hoist_vbeout[bb]->n_bits; i++)
5427 int hoistable = 0;
5429 if (TEST_BIT (hoist_vbeout[bb], i) && TEST_BIT (transpout[bb], i))
5431 /* We've found a potentially hoistable expression, now
5432 we look at every block BB dominates to see if it
5433 computes the expression. */
5434 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5436 /* Ignore self dominance. */
5437 if (bb == dominated
5438 || ! TEST_BIT (dominators[dominated], bb))
5439 continue;
5441 /* We've found a dominated block, now see if it computes
5442 the busy expression and whether or not moving that
5443 expression to the "beginning" of that block is safe. */
5444 if (!TEST_BIT (antloc[dominated], i))
5445 continue;
5447 /* Note if the expression would reach the dominated block
5448 unimpared if it was placed at the end of BB.
5450 Keep track of how many times this expression is hoistable
5451 from a dominated block into BB. */
5452 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5453 hoistable++;
5456 /* If we found more than one hoistable occurence of this
5457 expression, then note it in the bitmap of expressions to
5458 hoist. It makes no sense to hoist things which are computed
5459 in only one BB, and doing so tends to pessimize register
5460 allocation. One could increase this value to try harder
5461 to avoid any possible code expansion due to register
5462 allocation issues; however experiments have shown that
5463 the vast majority of hoistable expressions are only movable
5464 from two successors, so raising this threshhold is likely
5465 to nullify any benefit we get from code hoisting. */
5466 if (hoistable > 1)
5468 SET_BIT (hoist_exprs[bb], i);
5469 found = 1;
5474 /* If we found nothing to hoist, then quit now. */
5475 if (! found)
5476 continue;
5478 /* Loop over all the hoistable expressions. */
5479 for (i = 0; i < hoist_exprs[bb]->n_bits; i++)
5481 /* We want to insert the expression into BB only once, so
5482 note when we've inserted it. */
5483 insn_inserted_p = 0;
5485 /* These tests should be the same as the tests above. */
5486 if (TEST_BIT (hoist_vbeout[bb], i))
5488 /* We've found a potentially hoistable expression, now
5489 we look at every block BB dominates to see if it
5490 computes the expression. */
5491 for (dominated = 0; dominated < n_basic_blocks; dominated++)
5493 /* Ignore self dominance. */
5494 if (bb == dominated
5495 || ! TEST_BIT (dominators[dominated], bb))
5496 continue;
5498 /* We've found a dominated block, now see if it computes
5499 the busy expression and whether or not moving that
5500 expression to the "beginning" of that block is safe. */
5501 if (!TEST_BIT (antloc[dominated], i))
5502 continue;
5504 /* The expression is computed in the dominated block and
5505 it would be safe to compute it at the start of the
5506 dominated block. Now we have to determine if the
5507 expresion would reach the dominated block if it was
5508 placed at the end of BB. */
5509 if (hoist_expr_reaches_here_p (bb, i, dominated, NULL))
5511 struct expr *expr = index_map[i];
5512 struct occr *occr = expr->antic_occr;
5513 rtx insn;
5514 rtx set;
5516 /* Find the right occurence of this expression. */
5517 while (BLOCK_NUM (occr->insn) != dominated && occr)
5518 occr = occr->next;
5520 /* Should never happen. */
5521 if (!occr)
5522 abort ();
5524 insn = occr->insn;
5526 set = single_set (insn);
5527 if (! set)
5528 abort ();
5530 /* Create a pseudo-reg to store the result of reaching
5531 expressions into. Get the mode for the new pseudo
5532 from the mode of the original destination pseudo. */
5533 if (expr->reaching_reg == NULL)
5534 expr->reaching_reg
5535 = gen_reg_rtx (GET_MODE (SET_DEST (set)));
5537 /* In theory this should never fail since we're creating
5538 a reg->reg copy.
5540 However, on the x86 some of the movXX patterns
5541 actually contain clobbers of scratch regs. This may
5542 cause the insn created by validate_change to not
5543 match any pattern and thus cause validate_change to
5544 fail. */
5545 if (validate_change (insn, &SET_SRC (set),
5546 expr->reaching_reg, 0))
5548 occr->deleted_p = 1;
5549 if (!insn_inserted_p)
5551 insert_insn_end_bb (index_map[i], bb, 0);
5552 insn_inserted_p = 1;
5561 free (index_map);
5564 /* Top level routine to perform one code hoisting (aka unification) pass
5566 Return non-zero if a change was made. */
5568 static int
5569 one_code_hoisting_pass ()
5571 int changed = 0;
5573 alloc_expr_hash_table (max_cuid);
5574 compute_expr_hash_table ();
5575 if (gcse_file)
5576 dump_hash_table (gcse_file, "Code Hosting Expressions", expr_hash_table,
5577 expr_hash_table_size, n_exprs);
5579 if (n_exprs > 0)
5581 alloc_code_hoist_mem (n_basic_blocks, n_exprs);
5582 compute_code_hoist_data ();
5583 hoist_code ();
5584 free_code_hoist_mem ();
5587 free_expr_hash_table ();
5589 return changed;